@@ -362,7 +362,8 @@ def test_create_temporary_table_from_schema(con_no_data, new_schema):
362
362
== column_type
363
363
)
364
364
365
- con_no_data .disconnect ()
365
+ if con_no_data .name != "pyspark" :
366
+ con_no_data .disconnect ()
366
367
con_no_data .reconnect ()
367
368
# verify table no longer exist after reconnect
368
369
assert temp_table not in con_no_data .list_tables ()
@@ -822,7 +823,8 @@ def test_connect_url(url):
822
823
try :
823
824
assert con .execute (ibis .literal (1 )) == 1
824
825
finally :
825
- con .disconnect ()
826
+ if con .name != "pyspark" :
827
+ con .disconnect ()
826
828
827
829
828
830
@pytest .mark .parametrize (
@@ -1256,7 +1258,7 @@ def test_set_backend(con, monkeypatch):
1256
1258
"name" ,
1257
1259
[
1258
1260
param (name , marks = getattr (mark , name ), id = name )
1259
- for name in ("datafusion" , "duckdb" , "polars" , "sqlite" )
1261
+ for name in ("datafusion" , "duckdb" , "polars" , "sqlite" , "pyspark" )
1260
1262
],
1261
1263
)
1262
1264
def test_set_backend_name (name , monkeypatch ):
@@ -1268,7 +1270,8 @@ def test_set_backend_name(name, monkeypatch):
1268
1270
con = ibis .get_backend ()
1269
1271
assert con .name == name
1270
1272
finally :
1271
- con .disconnect ()
1273
+ if con .name != "pyspark" :
1274
+ con .disconnect ()
1272
1275
1273
1276
1274
1277
@pytest .mark .parametrize (
0 commit comments