Skip to content

Commit c7d1d38

Browse files
gforsythcpcloud
andauthored
fix(pyspark): generate IF NOT EXISTS if force=True (#10782)
Co-authored-by: Phillip Cloud <[email protected]>
1 parent 3e86e2d commit c7d1d38

File tree

2 files changed

+35
-2
lines changed

2 files changed

+35
-2
lines changed

ibis/backends/pyspark/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -507,7 +507,7 @@ def create_database(
507507

508508
sql = sge.Create(
509509
kind="DATABASE",
510-
exist=force,
510+
exists=force,
511511
this=sg.to_identifier(name, quoted=self.compiler.quoted),
512512
properties=properties,
513513
)
@@ -533,7 +533,7 @@ def drop_database(
533533
"""
534534
sql = sge.Drop(
535535
kind="DATABASE",
536-
exist=force,
536+
exists=force,
537537
this=sg.to_identifier(name, quoted=self.compiler.quoted),
538538
cascade=force,
539539
)

ibis/backends/pyspark/tests/test_ddl.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010
import ibis
1111
from ibis import util
12+
from ibis.backends.tests.errors import PySparkAnalysisException
1213
from ibis.tests.util import assert_equal
1314

1415
pyspark = pytest.importorskip("pyspark")
@@ -177,3 +178,35 @@ def test_create_table_reserved_identifier(con, alltypes, keyword_t):
177178
t = con.create_table(keyword_t, expr)
178179
result = t.count().execute()
179180
assert result == expected
181+
182+
183+
@pytest.mark.xfail_version(
184+
pyspark=["pyspark<3.5"],
185+
raises=ValueError,
186+
reason="PySparkAnalysisException is not available in PySpark <3.5",
187+
)
188+
def test_create_database_exists(con):
189+
con.create_database(dbname := util.gen_name("dbname"))
190+
191+
with pytest.raises(PySparkAnalysisException):
192+
con.create_database(dbname)
193+
194+
con.create_database(dbname, force=True)
195+
196+
con.drop_database(dbname, force=True)
197+
198+
199+
@pytest.mark.xfail_version(
200+
pyspark=["pyspark<3.5"],
201+
raises=ValueError,
202+
reason="PySparkAnalysisException is not available in PySpark <3.5",
203+
)
204+
def test_drop_database_exists(con):
205+
con.create_database(dbname := util.gen_name("dbname"))
206+
207+
con.drop_database(dbname)
208+
209+
with pytest.raises(PySparkAnalysisException):
210+
con.drop_database(dbname)
211+
212+
con.drop_database(dbname, force=True)

0 commit comments

Comments
 (0)