|
9 | 9 |
|
10 | 10 | import ibis
|
11 | 11 | from ibis import util
|
| 12 | +from ibis.backends.tests.errors import PySparkAnalysisException |
12 | 13 | from ibis.tests.util import assert_equal
|
13 | 14 |
|
14 | 15 | pyspark = pytest.importorskip("pyspark")
|
@@ -177,3 +178,35 @@ def test_create_table_reserved_identifier(con, alltypes, keyword_t):
|
177 | 178 | t = con.create_table(keyword_t, expr)
|
178 | 179 | result = t.count().execute()
|
179 | 180 | assert result == expected
|
| 181 | + |
| 182 | + |
| 183 | +@pytest.mark.xfail_version( |
| 184 | + pyspark=["pyspark<3.5"], |
| 185 | + raises=ValueError, |
| 186 | + reason="PySparkAnalysisException is not available in PySpark <3.5", |
| 187 | +) |
| 188 | +def test_create_database_exists(con): |
| 189 | + con.create_database(dbname := util.gen_name("dbname")) |
| 190 | + |
| 191 | + with pytest.raises(PySparkAnalysisException): |
| 192 | + con.create_database(dbname) |
| 193 | + |
| 194 | + con.create_database(dbname, force=True) |
| 195 | + |
| 196 | + con.drop_database(dbname, force=True) |
| 197 | + |
| 198 | + |
| 199 | +@pytest.mark.xfail_version( |
| 200 | + pyspark=["pyspark<3.5"], |
| 201 | + raises=ValueError, |
| 202 | + reason="PySparkAnalysisException is not available in PySpark <3.5", |
| 203 | +) |
| 204 | +def test_drop_database_exists(con): |
| 205 | + con.create_database(dbname := util.gen_name("dbname")) |
| 206 | + |
| 207 | + con.drop_database(dbname) |
| 208 | + |
| 209 | + with pytest.raises(PySparkAnalysisException): |
| 210 | + con.drop_database(dbname) |
| 211 | + |
| 212 | + con.drop_database(dbname, force=True) |
0 commit comments