Skip to content

Commit e0afb7a

Browse files
authored
chre: add tpch q19-22 and benchmark readme (#929)
1 parent 5f9a801 commit e0afb7a

File tree

9 files changed

+351
-0
lines changed

9 files changed

+351
-0
lines changed

tests/benchmark/README.md

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
# Benchmark Tests
2+
3+
## Overview
4+
5+
This directory contains scripts for performance benchmarking of various components of BigFrames.
6+
7+
## Execution Details
8+
9+
Scripts in this directory can be executed as part of the benchmarking session or independently from the command line. This allows for quick, standalone runs for immediate debugging and validation without the overhead of initiating full benchmark sessions.
10+
11+
## Why Separate Processes?
12+
13+
Each benchmark is executed in a separate process to mitigate the effects of any residual caching or settings that may persist in BigFrames, ensuring that each test is conducted in a clean state.

tests/benchmark/tpch/q19.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
# Copyright 2024 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# https://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
import pathlib
15+
16+
import benchmark.utils as utils
17+
import bigframes_vendored.tpch.queries.q19 as vendored_tpch_q19
18+
19+
if __name__ == "__main__":
20+
dataset_id, session, suffix = utils.get_tpch_configuration()
21+
current_path = pathlib.Path(__file__).absolute()
22+
23+
utils.get_execution_time(
24+
vendored_tpch_q19.q, current_path, suffix, dataset_id, session
25+
)

tests/benchmark/tpch/q20.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
# Copyright 2024 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# https://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
import pathlib
15+
16+
import benchmark.utils as utils
17+
import bigframes_vendored.tpch.queries.q20 as vendored_tpch_q20
18+
19+
if __name__ == "__main__":
20+
dataset_id, session, suffix = utils.get_tpch_configuration()
21+
current_path = pathlib.Path(__file__).absolute()
22+
23+
utils.get_execution_time(
24+
vendored_tpch_q20.q, current_path, suffix, dataset_id, session
25+
)

tests/benchmark/tpch/q21.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
# Copyright 2024 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# https://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
import pathlib
15+
16+
import benchmark.utils as utils
17+
import bigframes_vendored.tpch.queries.q21 as vendored_tpch_q21
18+
19+
if __name__ == "__main__":
20+
dataset_id, session, suffix = utils.get_tpch_configuration()
21+
current_path = pathlib.Path(__file__).absolute()
22+
23+
utils.get_execution_time(
24+
vendored_tpch_q21.q, current_path, suffix, dataset_id, session
25+
)

tests/benchmark/tpch/q22.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
# Copyright 2024 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# https://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
import pathlib
15+
16+
import benchmark.utils as utils
17+
import bigframes_vendored.tpch.queries.q22 as vendored_tpch_q22
18+
19+
if __name__ == "__main__":
20+
dataset_id, session, suffix = utils.get_tpch_configuration()
21+
current_path = pathlib.Path(__file__).absolute()
22+
23+
utils.get_execution_time(
24+
vendored_tpch_q22.q, current_path, suffix, dataset_id, session
25+
)
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
# Contains code from https://github.com/pola-rs/tpch/blob/main/queries/polars/q19.py
2+
3+
import bigframes
4+
import bigframes.pandas as bpd
5+
6+
7+
def q(dataset_id: str, session: bigframes.Session):
8+
lineitem = session.read_gbq(
9+
f"bigframes-dev-perf.{dataset_id}.LINEITEM",
10+
index_col=bigframes.enums.DefaultIndexKind.NULL,
11+
)
12+
part = session.read_gbq(
13+
f"bigframes-dev-perf.{dataset_id}.PART",
14+
index_col=bigframes.enums.DefaultIndexKind.NULL,
15+
)
16+
17+
merged = bpd.merge(part, lineitem, left_on="P_PARTKEY", right_on="L_PARTKEY")
18+
19+
filtered = merged[
20+
(merged["L_SHIPMODE"].isin(["AIR", "AIR REG"]))
21+
& (merged["L_SHIPINSTRUCT"] == "DELIVER IN PERSON")
22+
& (
23+
(
24+
(merged["P_BRAND"] == "Brand#12")
25+
& (
26+
merged["P_CONTAINER"].isin(
27+
["SM CASE", "SM BOX", "SM PACK", "SM PKG"]
28+
)
29+
)
30+
& (merged["L_QUANTITY"].between(1, 11, inclusive="both"))
31+
& (merged["P_SIZE"].between(1, 5, inclusive="both"))
32+
)
33+
| (
34+
(merged["P_BRAND"] == "Brand#23")
35+
& (
36+
merged["P_CONTAINER"].isin(
37+
["MED BAG", "MED BOX", "MED PKG", "MED PACK"]
38+
)
39+
)
40+
& (merged["L_QUANTITY"].between(10, 20, inclusive="both"))
41+
& (merged["P_SIZE"].between(1, 10, inclusive="both"))
42+
)
43+
| (
44+
(merged["P_BRAND"] == "Brand#34")
45+
& (
46+
merged["P_CONTAINER"].isin(
47+
["LG CASE", "LG BOX", "LG PACK", "LG PKG"]
48+
)
49+
)
50+
& (merged["L_QUANTITY"].between(20, 30, inclusive="both"))
51+
& (merged["P_SIZE"].between(1, 15, inclusive="both"))
52+
)
53+
)
54+
]
55+
56+
revenue = (filtered["L_EXTENDEDPRICE"] * (1 - filtered["L_DISCOUNT"])).sum()
57+
_ = round(revenue, 2)
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
# Contains code from https://github.com/pola-rs/tpch/blob/main/queries/polars/q20.py
2+
3+
from datetime import date
4+
5+
import bigframes
6+
import bigframes.pandas as bpd
7+
8+
9+
def q(dataset_id: str, session: bigframes.Session):
10+
lineitem = session.read_gbq(
11+
f"bigframes-dev-perf.{dataset_id}.LINEITEM",
12+
index_col=bigframes.enums.DefaultIndexKind.NULL,
13+
)
14+
nation = session.read_gbq(
15+
f"bigframes-dev-perf.{dataset_id}.NATION",
16+
index_col=bigframes.enums.DefaultIndexKind.NULL,
17+
)
18+
part = session.read_gbq(
19+
f"bigframes-dev-perf.{dataset_id}.PART",
20+
index_col=bigframes.enums.DefaultIndexKind.NULL,
21+
)
22+
partsupp = session.read_gbq(
23+
f"bigframes-dev-perf.{dataset_id}.PARTSUPP",
24+
index_col=bigframes.enums.DefaultIndexKind.NULL,
25+
)
26+
supplier = session.read_gbq(
27+
f"bigframes-dev-perf.{dataset_id}.SUPPLIER",
28+
index_col=bigframes.enums.DefaultIndexKind.NULL,
29+
)
30+
31+
var1 = date(1994, 1, 1)
32+
var2 = date(1995, 1, 1)
33+
var3 = "CANADA"
34+
var4 = "forest"
35+
36+
q1 = lineitem[(lineitem["L_SHIPDATE"] >= var1) & (lineitem["L_SHIPDATE"] < var2)]
37+
q1 = q1.groupby(["L_PARTKEY", "L_SUPPKEY"], as_index=False).agg(
38+
SUM_QUANTITY=bpd.NamedAgg(column="L_QUANTITY", aggfunc="sum")
39+
)
40+
q1["SUM_QUANTITY"] = q1["SUM_QUANTITY"] * 0.5
41+
q2 = nation[nation["N_NAME"] == var3]
42+
43+
q3 = supplier.merge(q2, left_on="S_NATIONKEY", right_on="N_NATIONKEY")
44+
45+
filtered_parts = part[part["P_NAME"].str.startswith(var4)]
46+
47+
if not session._strictly_ordered:
48+
filtered_parts = filtered_parts[["P_PARTKEY"]].sort_values(by=["P_PARTKEY"])
49+
filtered_parts = filtered_parts[["P_PARTKEY"]].drop_duplicates()
50+
joined_parts = filtered_parts.merge(
51+
partsupp, left_on="P_PARTKEY", right_on="PS_PARTKEY"
52+
)
53+
54+
final_join = joined_parts.merge(
55+
q1, left_on=["PS_SUPPKEY", "P_PARTKEY"], right_on=["L_SUPPKEY", "L_PARTKEY"]
56+
)
57+
final_filtered = final_join[final_join["PS_AVAILQTY"] > final_join["SUM_QUANTITY"]]
58+
59+
final_filtered = final_filtered[["PS_SUPPKEY"]]
60+
if not session._strictly_ordered:
61+
final_filtered = final_filtered.sort_values(by="PS_SUPPKEY")
62+
final_filtered = final_filtered.drop_duplicates()
63+
64+
final_result = final_filtered.merge(q3, left_on="PS_SUPPKEY", right_on="S_SUPPKEY")
65+
final_result = final_result[["S_NAME", "S_ADDRESS"]].sort_values(by="S_NAME")
66+
67+
final_result.to_gbq()
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
# Contains code from https://github.com/pola-rs/tpch/blob/main/queries/duckdb/q21.py
2+
3+
import typing
4+
5+
import bigframes
6+
import bigframes.pandas as bpd
7+
8+
9+
def q(dataset_id: str, session: bigframes.Session):
10+
lineitem = session.read_gbq(
11+
f"bigframes-dev-perf.{dataset_id}.LINEITEM",
12+
index_col=bigframes.enums.DefaultIndexKind.NULL,
13+
)
14+
nation = session.read_gbq(
15+
f"bigframes-dev-perf.{dataset_id}.NATION",
16+
index_col=bigframes.enums.DefaultIndexKind.NULL,
17+
)
18+
orders = session.read_gbq(
19+
f"bigframes-dev-perf.{dataset_id}.ORDERS",
20+
index_col=bigframes.enums.DefaultIndexKind.NULL,
21+
)
22+
supplier = session.read_gbq(
23+
f"bigframes-dev-perf.{dataset_id}.SUPPLIER",
24+
index_col=bigframes.enums.DefaultIndexKind.NULL,
25+
)
26+
27+
nation = nation[nation["N_NAME"] == "SAUDI ARABIA"]
28+
orders = orders[orders["O_ORDERSTATUS"] == "F"]
29+
30+
l1 = lineitem[lineitem["L_RECEIPTDATE"] > lineitem["L_COMMITDATE"]][
31+
["L_ORDERKEY", "L_SUPPKEY"]
32+
]
33+
34+
l2 = lineitem.groupby("L_ORDERKEY", as_index=False).agg(
35+
NUNIQUE_COL=bpd.NamedAgg(column="L_SUPPKEY", aggfunc="nunique")
36+
)
37+
l2 = l2[l2["NUNIQUE_COL"] > 1][["L_ORDERKEY"]]
38+
39+
l3 = l1.groupby("L_ORDERKEY", as_index=False).agg(
40+
NUNIQUE_COL=bpd.NamedAgg(column="L_SUPPKEY", aggfunc="nunique")
41+
)
42+
l3 = l3[l3["NUNIQUE_COL"] == 1][["L_ORDERKEY"]]
43+
44+
l1 = l1.merge(l2, on="L_ORDERKEY", how="inner").merge(
45+
l3, on="L_ORDERKEY", how="inner"
46+
)
47+
48+
merged = supplier.merge(nation, left_on="S_NATIONKEY", right_on="N_NATIONKEY")
49+
merged = merged.merge(l1, left_on="S_SUPPKEY", right_on="L_SUPPKEY")
50+
merged = merged.merge(orders, left_on="L_ORDERKEY", right_on="O_ORDERKEY")
51+
52+
result = merged.groupby("S_NAME", as_index=False).agg(
53+
NUMWAIT=bpd.NamedAgg(column="L_SUPPKEY", aggfunc="size")
54+
)
55+
56+
result = (
57+
typing.cast(bpd.DataFrame, result)
58+
.sort_values(["NUMWAIT", "S_NAME"], ascending=[False, True])
59+
.head(100)
60+
)
61+
62+
result.to_gbq()
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
# Contains code from https://github.com/pola-rs/tpch/blob/main/queries/polars/q22.py
2+
3+
import bigframes
4+
import bigframes.pandas as bpd
5+
6+
7+
def q(dataset_id: str, session: bigframes.Session):
8+
customer = session.read_gbq(
9+
f"bigframes-dev-perf.{dataset_id}.CUSTOMER",
10+
index_col=bigframes.enums.DefaultIndexKind.NULL,
11+
)
12+
orders = session.read_gbq(
13+
f"bigframes-dev-perf.{dataset_id}.ORDERS",
14+
index_col=bigframes.enums.DefaultIndexKind.NULL,
15+
)
16+
17+
country_codes = ["13", "31", "23", "29", "30", "18", "17"]
18+
19+
customer["CNTRYCODE"] = customer["C_PHONE"].str.slice(0, 2)
20+
21+
avg_acctbal = customer[
22+
(customer["CNTRYCODE"].isin(country_codes)) & (customer["C_ACCTBAL"] > 0)
23+
]["C_ACCTBAL"].mean()
24+
25+
if not session._strictly_ordered:
26+
orders = orders.sort_values(by="O_CUSTKEY")
27+
orders_unique = orders.drop_duplicates(subset=["O_CUSTKEY"])
28+
29+
matched_customers = customer.merge(
30+
orders_unique, left_on="C_CUSTKEY", right_on="O_CUSTKEY"
31+
)
32+
matched_customers["IS_IN_ORDERS"] = True
33+
34+
customer = customer.merge(
35+
matched_customers[["C_CUSTKEY", "IS_IN_ORDERS"]], on="C_CUSTKEY", how="left"
36+
)
37+
customer["IS_IN_ORDERS"] = customer["IS_IN_ORDERS"].fillna(False)
38+
39+
filtered_customers = customer[
40+
(customer["CNTRYCODE"].isin(country_codes))
41+
& (customer["C_ACCTBAL"] > avg_acctbal)
42+
& (~customer["IS_IN_ORDERS"])
43+
]
44+
45+
result = filtered_customers.groupby("CNTRYCODE", as_index=False).agg(
46+
NUMCUST=bpd.NamedAgg(column="C_CUSTKEY", aggfunc="count"),
47+
TOTACCTBAL=bpd.NamedAgg(column="C_ACCTBAL", aggfunc="sum"),
48+
)
49+
50+
result = result.sort_values(by="CNTRYCODE")
51+
52+
result.to_gbq()

0 commit comments

Comments
 (0)