Skip to content

Commit c051617

Browse files
committed
feat(pyspark): implement percent_rank and enable testing
1 parent 77652a4 commit c051617

File tree

2 files changed

+7
-16
lines changed

2 files changed

+7
-16
lines changed

ibis/backends/pyspark/compiler.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1313,11 +1313,7 @@ def compile_dense_rank(t, expr, scope, timecontext, **kwargs):
13131313

13141314
@compiles(ops.PercentRank)
13151315
def compile_percent_rank(t, expr, scope, timecontext, **kwargs):
1316-
raise com.UnsupportedOperationError(
1317-
'Pyspark percent_rank() function indexes from 0 '
1318-
'instead of 1, and does not match expected '
1319-
'output of ibis expressions.'
1320-
)
1316+
return F.percent_rank()
13211317

13221318

13231319
@compiles(ops.NTile)

ibis/backends/tests/test_window.py

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,6 @@ def calc_zscore(s):
4949
lambda t, win: t.id.percent_rank().over(win),
5050
lambda t: t.id.rank(pct=True),
5151
id='percent_rank',
52-
marks=pytest.mark.notimpl(
53-
["duckdb", "impala", "mysql", "postgres", "pyspark", "sqlite"]
54-
),
5552
),
5653
param(
5754
lambda t, win: t.float_col.ntile(buckets=7).over(win),
@@ -70,7 +67,7 @@ def calc_zscore(s):
7067
id='last',
7168
),
7269
param(
73-
lambda t, win: ibis.row_number().over(win),
70+
lambda _, win: ibis.row_number().over(win),
7471
lambda t: t.cumcount(),
7572
id='row_number',
7673
marks=pytest.mark.notimpl(["pandas"]),
@@ -180,7 +177,7 @@ def calc_zscore(s):
180177
)
181178
@pytest.mark.notimpl(["clickhouse", "dask", "datafusion"])
182179
def test_grouped_bounded_expanding_window(
183-
backend, alltypes, df, con, result_fn, expected_fn
180+
backend, alltypes, df, result_fn, expected_fn
184181
):
185182

186183
expr = alltypes.mutate(
@@ -228,7 +225,7 @@ def test_grouped_bounded_expanding_window(
228225
# Some backends do not support non-grouped window specs
229226
@pytest.mark.notimpl(["clickhouse", "dask", "datafusion"])
230227
def test_ungrouped_bounded_expanding_window(
231-
backend, alltypes, df, con, result_fn, expected_fn
228+
backend, alltypes, df, result_fn, expected_fn
232229
):
233230

234231
expr = alltypes.mutate(
@@ -248,7 +245,7 @@ def test_ungrouped_bounded_expanding_window(
248245

249246

250247
@pytest.mark.notimpl(["clickhouse", "dask", "datafusion", "pandas"])
251-
def test_grouped_bounded_following_window(backend, alltypes, df, con):
248+
def test_grouped_bounded_following_window(backend, alltypes, df):
252249

253250
window = ibis.window(
254251
preceding=0,
@@ -304,9 +301,7 @@ def test_grouped_bounded_following_window(backend, alltypes, df, con):
304301
],
305302
)
306303
@pytest.mark.notimpl(["clickhouse", "dask", "datafusion"])
307-
def test_grouped_bounded_preceding_window(
308-
backend, alltypes, df, con, window_fn
309-
):
304+
def test_grouped_bounded_preceding_window(backend, alltypes, df, window_fn):
310305
window = window_fn(alltypes)
311306

312307
expr = alltypes.mutate(val=alltypes.double_col.sum().over(window))
@@ -358,7 +353,7 @@ def test_grouped_bounded_preceding_window(
358353
)
359354
@pytest.mark.notimpl(["clickhouse", "datafusion"])
360355
def test_grouped_unbounded_window(
361-
backend, alltypes, df, con, result_fn, expected_fn, ordered
356+
backend, alltypes, df, result_fn, expected_fn, ordered
362357
):
363358
# Define a window that is
364359
# 1) Grouped

0 commit comments

Comments
 (0)