diff --git a/tests/benchmark/README.md b/tests/benchmark/README.md new file mode 100644 index 0000000000..9c5f4a99d6 --- /dev/null +++ b/tests/benchmark/README.md @@ -0,0 +1,13 @@ +# Benchmark Tests + +## Overview + +This directory contains scripts for performance benchmarking of various components of BigFrames. + +## Execution Details + +Scripts in this directory can be executed as part of the benchmarking session or independently from the command line. This allows for quick, standalone runs for immediate debugging and validation without the overhead of initiating full benchmark sessions. + +## Why Separate Processes? + +Each benchmark is executed in a separate process to mitigate the effects of any residual caching or settings that may persist in BigFrames, ensuring that each test is conducted in a clean state. diff --git a/tests/benchmark/tpch/q19.py b/tests/benchmark/tpch/q19.py new file mode 100644 index 0000000000..7b13b0d250 --- /dev/null +++ b/tests/benchmark/tpch/q19.py @@ -0,0 +1,25 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pathlib + +import benchmark.utils as utils +import bigframes_vendored.tpch.queries.q19 as vendored_tpch_q19 + +if __name__ == "__main__": + dataset_id, session, suffix = utils.get_tpch_configuration() + current_path = pathlib.Path(__file__).absolute() + + utils.get_execution_time( + vendored_tpch_q19.q, current_path, suffix, dataset_id, session + ) diff --git a/tests/benchmark/tpch/q20.py b/tests/benchmark/tpch/q20.py new file mode 100644 index 0000000000..e02e9306f8 --- /dev/null +++ b/tests/benchmark/tpch/q20.py @@ -0,0 +1,25 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pathlib + +import benchmark.utils as utils +import bigframes_vendored.tpch.queries.q20 as vendored_tpch_q20 + +if __name__ == "__main__": + dataset_id, session, suffix = utils.get_tpch_configuration() + current_path = pathlib.Path(__file__).absolute() + + utils.get_execution_time( + vendored_tpch_q20.q, current_path, suffix, dataset_id, session + ) diff --git a/tests/benchmark/tpch/q21.py b/tests/benchmark/tpch/q21.py new file mode 100644 index 0000000000..d123286c3e --- /dev/null +++ b/tests/benchmark/tpch/q21.py @@ -0,0 +1,25 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pathlib + +import benchmark.utils as utils +import bigframes_vendored.tpch.queries.q21 as vendored_tpch_q21 + +if __name__ == "__main__": + dataset_id, session, suffix = utils.get_tpch_configuration() + current_path = pathlib.Path(__file__).absolute() + + utils.get_execution_time( + vendored_tpch_q21.q, current_path, suffix, dataset_id, session + ) diff --git a/tests/benchmark/tpch/q22.py b/tests/benchmark/tpch/q22.py new file mode 100644 index 0000000000..841740da0e --- /dev/null +++ b/tests/benchmark/tpch/q22.py @@ -0,0 +1,25 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pathlib + +import benchmark.utils as utils +import bigframes_vendored.tpch.queries.q22 as vendored_tpch_q22 + +if __name__ == "__main__": + dataset_id, session, suffix = utils.get_tpch_configuration() + current_path = pathlib.Path(__file__).absolute() + + utils.get_execution_time( + vendored_tpch_q22.q, current_path, suffix, dataset_id, session + ) diff --git a/third_party/bigframes_vendored/tpch/queries/q19.py b/third_party/bigframes_vendored/tpch/queries/q19.py new file mode 100644 index 0000000000..526d0aa1a6 --- /dev/null +++ b/third_party/bigframes_vendored/tpch/queries/q19.py @@ -0,0 +1,57 @@ +# Contains code from https://github.com/pola-rs/tpch/blob/main/queries/polars/q19.py + +import bigframes +import bigframes.pandas as bpd + + +def q(dataset_id: str, session: bigframes.Session): + lineitem = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.LINEITEM", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + part = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.PART", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + + merged = bpd.merge(part, lineitem, left_on="P_PARTKEY", right_on="L_PARTKEY") + + filtered = merged[ + (merged["L_SHIPMODE"].isin(["AIR", "AIR REG"])) + & (merged["L_SHIPINSTRUCT"] == "DELIVER IN PERSON") + & ( + ( + (merged["P_BRAND"] == "Brand#12") + & ( + merged["P_CONTAINER"].isin( + ["SM CASE", "SM BOX", "SM PACK", "SM PKG"] + ) + ) + & (merged["L_QUANTITY"].between(1, 11, inclusive="both")) + & (merged["P_SIZE"].between(1, 5, inclusive="both")) + ) + | ( + (merged["P_BRAND"] == "Brand#23") + & ( + merged["P_CONTAINER"].isin( + ["MED BAG", "MED BOX", "MED PKG", "MED PACK"] + ) + ) + & (merged["L_QUANTITY"].between(10, 20, inclusive="both")) + & (merged["P_SIZE"].between(1, 10, inclusive="both")) + ) + | ( + (merged["P_BRAND"] == "Brand#34") + & ( + merged["P_CONTAINER"].isin( + ["LG CASE", "LG BOX", "LG PACK", "LG PKG"] + ) + ) + & (merged["L_QUANTITY"].between(20, 30, inclusive="both")) + & (merged["P_SIZE"].between(1, 15, inclusive="both")) + ) + ) + ] + + revenue = (filtered["L_EXTENDEDPRICE"] * (1 - filtered["L_DISCOUNT"])).sum() + _ = round(revenue, 2) diff --git a/third_party/bigframes_vendored/tpch/queries/q20.py b/third_party/bigframes_vendored/tpch/queries/q20.py new file mode 100644 index 0000000000..671d7e06fb --- /dev/null +++ b/third_party/bigframes_vendored/tpch/queries/q20.py @@ -0,0 +1,67 @@ +# Contains code from https://github.com/pola-rs/tpch/blob/main/queries/polars/q20.py + +from datetime import date + +import bigframes +import bigframes.pandas as bpd + + +def q(dataset_id: str, session: bigframes.Session): + lineitem = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.LINEITEM", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + nation = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.NATION", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + part = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.PART", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + partsupp = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.PARTSUPP", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + supplier = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.SUPPLIER", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + + var1 = date(1994, 1, 1) + var2 = date(1995, 1, 1) + var3 = "CANADA" + var4 = "forest" + + q1 = lineitem[(lineitem["L_SHIPDATE"] >= var1) & (lineitem["L_SHIPDATE"] < var2)] + q1 = q1.groupby(["L_PARTKEY", "L_SUPPKEY"], as_index=False).agg( + SUM_QUANTITY=bpd.NamedAgg(column="L_QUANTITY", aggfunc="sum") + ) + q1["SUM_QUANTITY"] = q1["SUM_QUANTITY"] * 0.5 + q2 = nation[nation["N_NAME"] == var3] + + q3 = supplier.merge(q2, left_on="S_NATIONKEY", right_on="N_NATIONKEY") + + filtered_parts = part[part["P_NAME"].str.startswith(var4)] + + if not session._strictly_ordered: + filtered_parts = filtered_parts[["P_PARTKEY"]].sort_values(by=["P_PARTKEY"]) + filtered_parts = filtered_parts[["P_PARTKEY"]].drop_duplicates() + joined_parts = filtered_parts.merge( + partsupp, left_on="P_PARTKEY", right_on="PS_PARTKEY" + ) + + final_join = joined_parts.merge( + q1, left_on=["PS_SUPPKEY", "P_PARTKEY"], right_on=["L_SUPPKEY", "L_PARTKEY"] + ) + final_filtered = final_join[final_join["PS_AVAILQTY"] > final_join["SUM_QUANTITY"]] + + final_filtered = final_filtered[["PS_SUPPKEY"]] + if not session._strictly_ordered: + final_filtered = final_filtered.sort_values(by="PS_SUPPKEY") + final_filtered = final_filtered.drop_duplicates() + + final_result = final_filtered.merge(q3, left_on="PS_SUPPKEY", right_on="S_SUPPKEY") + final_result = final_result[["S_NAME", "S_ADDRESS"]].sort_values(by="S_NAME") + + final_result.to_gbq() diff --git a/third_party/bigframes_vendored/tpch/queries/q21.py b/third_party/bigframes_vendored/tpch/queries/q21.py new file mode 100644 index 0000000000..3a4ea495c9 --- /dev/null +++ b/third_party/bigframes_vendored/tpch/queries/q21.py @@ -0,0 +1,62 @@ +# Contains code from https://github.com/pola-rs/tpch/blob/main/queries/duckdb/q21.py + +import typing + +import bigframes +import bigframes.pandas as bpd + + +def q(dataset_id: str, session: bigframes.Session): + lineitem = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.LINEITEM", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + nation = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.NATION", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + orders = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.ORDERS", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + supplier = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.SUPPLIER", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + + nation = nation[nation["N_NAME"] == "SAUDI ARABIA"] + orders = orders[orders["O_ORDERSTATUS"] == "F"] + + l1 = lineitem[lineitem["L_RECEIPTDATE"] > lineitem["L_COMMITDATE"]][ + ["L_ORDERKEY", "L_SUPPKEY"] + ] + + l2 = lineitem.groupby("L_ORDERKEY", as_index=False).agg( + NUNIQUE_COL=bpd.NamedAgg(column="L_SUPPKEY", aggfunc="nunique") + ) + l2 = l2[l2["NUNIQUE_COL"] > 1][["L_ORDERKEY"]] + + l3 = l1.groupby("L_ORDERKEY", as_index=False).agg( + NUNIQUE_COL=bpd.NamedAgg(column="L_SUPPKEY", aggfunc="nunique") + ) + l3 = l3[l3["NUNIQUE_COL"] == 1][["L_ORDERKEY"]] + + l1 = l1.merge(l2, on="L_ORDERKEY", how="inner").merge( + l3, on="L_ORDERKEY", how="inner" + ) + + merged = supplier.merge(nation, left_on="S_NATIONKEY", right_on="N_NATIONKEY") + merged = merged.merge(l1, left_on="S_SUPPKEY", right_on="L_SUPPKEY") + merged = merged.merge(orders, left_on="L_ORDERKEY", right_on="O_ORDERKEY") + + result = merged.groupby("S_NAME", as_index=False).agg( + NUMWAIT=bpd.NamedAgg(column="L_SUPPKEY", aggfunc="size") + ) + + result = ( + typing.cast(bpd.DataFrame, result) + .sort_values(["NUMWAIT", "S_NAME"], ascending=[False, True]) + .head(100) + ) + + result.to_gbq() diff --git a/third_party/bigframes_vendored/tpch/queries/q22.py b/third_party/bigframes_vendored/tpch/queries/q22.py new file mode 100644 index 0000000000..97180cd11a --- /dev/null +++ b/third_party/bigframes_vendored/tpch/queries/q22.py @@ -0,0 +1,52 @@ +# Contains code from https://github.com/pola-rs/tpch/blob/main/queries/polars/q22.py + +import bigframes +import bigframes.pandas as bpd + + +def q(dataset_id: str, session: bigframes.Session): + customer = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.CUSTOMER", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + orders = session.read_gbq( + f"bigframes-dev-perf.{dataset_id}.ORDERS", + index_col=bigframes.enums.DefaultIndexKind.NULL, + ) + + country_codes = ["13", "31", "23", "29", "30", "18", "17"] + + customer["CNTRYCODE"] = customer["C_PHONE"].str.slice(0, 2) + + avg_acctbal = customer[ + (customer["CNTRYCODE"].isin(country_codes)) & (customer["C_ACCTBAL"] > 0) + ]["C_ACCTBAL"].mean() + + if not session._strictly_ordered: + orders = orders.sort_values(by="O_CUSTKEY") + orders_unique = orders.drop_duplicates(subset=["O_CUSTKEY"]) + + matched_customers = customer.merge( + orders_unique, left_on="C_CUSTKEY", right_on="O_CUSTKEY" + ) + matched_customers["IS_IN_ORDERS"] = True + + customer = customer.merge( + matched_customers[["C_CUSTKEY", "IS_IN_ORDERS"]], on="C_CUSTKEY", how="left" + ) + customer["IS_IN_ORDERS"] = customer["IS_IN_ORDERS"].fillna(False) + + filtered_customers = customer[ + (customer["CNTRYCODE"].isin(country_codes)) + & (customer["C_ACCTBAL"] > avg_acctbal) + & (~customer["IS_IN_ORDERS"]) + ] + + result = filtered_customers.groupby("CNTRYCODE", as_index=False).agg( + NUMCUST=bpd.NamedAgg(column="C_CUSTKEY", aggfunc="count"), + TOTACCTBAL=bpd.NamedAgg(column="C_ACCTBAL", aggfunc="sum"), + ) + + result = result.sort_values(by="CNTRYCODE") + + result.to_gbq()