diff --git a/ibis-server/Metrics.md b/ibis-server/Metrics.md new file mode 100644 index 000000000..211f079d7 --- /dev/null +++ b/ibis-server/Metrics.md @@ -0,0 +1,47 @@ +# Ibis Server Traced Metrics + +The ibis-server codebase uses OpenTelemetry for tracing. The following spans are traced across different components: + +## Rewriter Module +- `transpile` - Internal span for SQL transpilation operations +- `rewrite` - Internal span for SQL rewriting operations +- `extract_manifest` - Internal span for manifest extraction from SQL +- `external_rewrite` - Client span for external engine rewriting operations +- `embedded_rewrite` - Internal span for embedded engine rewriting operations + +## Substitute Module +- `substitute` - Internal span for model substitution operations + +## Connector Module +- `connector_init` - Internal span for connector initialization +- `connector_query` - Client span for executing queries +- `connector_dry_run` - Client span for dry-running queries +- `describe_sql_for_error_message` - Client span for generating SQL error messages +- `get_schema` - Client span for retrieving schema information +- `duckdb_query` - Internal span for DuckDB queries +- `duckdb_dry_run` - Internal span for DuckDB dry runs + +## API Endpoints (v2) +- `v2_query_{data_source}` - Server span for query operations +- `v2_query_{data_source}_dry_run` - Server span for dry run query operations +- `v2_validate_{data_source}` - Server span for validation operations +- `v2_metadata_tables_{data_source}` - Server span for metadata table listing +- `v2_metadata_constraints_{data_source}` - Server span for metadata constraint listing +- `dry_plan` - Server span for dry planning operations +- `v2_dry_plan_{data_source}` - Server span for data source specific dry planning +- `v2_model_substitute_{data_source}` - Server span for model substitution operations + +## API Endpoints (v3) +- `v3_query_{data_source}` - Server span for query operations +- `v3_query_{data_source}_dry_run` - Server span for dry run query operations +- `v3_dry_plan_{data_source}` - Server span for data source specific dry planning +- `v3_validate_{data_source}` - Server span for validation operations +- `v3_functions_{data_source}` - Server span for function listing +- `v3_model-substitute_{data_source}` - Server span for model substitution operations + +## Utility Functions +- `base64_to_dict` - Internal span for base64 to dictionary conversion +- `to_json` - Internal span for DataFrame to JSON conversion + +## Trace Context +- Each endpoint accepts request headers and properly propagates trace context using the `build_context` function. diff --git a/ibis-server/README.md b/ibis-server/README.md index 1d4c6d8b3..37c584502 100644 --- a/ibis-server/README.md +++ b/ibis-server/README.md @@ -78,5 +78,15 @@ Run the server just run ``` +### Enable Tracing +We uses OpenTelemetry as its tracing framework. Refer to OpenTelemetry zero-code instrumentation to install the required dependencies. +Then, use the following just command to start the Ibis server, which exports tracing logs to the console: +``` +just run-trace-console +``` +OpenTelemetry zero-code instrumentation is highly configurable. You can set the necessary exporters to send traces to your tracing services. + +[Metrics we are tracing right now](./Metrics.md) + ## Contributing Please see [CONTRIBUTING.md](docs/CONTRIBUTING.md) for more information. diff --git a/ibis-server/app/mdl/rewriter.py b/ibis-server/app/mdl/rewriter.py index ec3161256..65df64183 100644 --- a/ibis-server/app/mdl/rewriter.py +++ b/ibis-server/app/mdl/rewriter.py @@ -4,6 +4,7 @@ import sqlglot from anyio import to_thread from loguru import logger +from opentelemetry import trace from app.config import get_config from app.mdl.core import ( @@ -21,6 +22,8 @@ # Register custom dialects importlib.import_module("app.custom_sqlglot.dialects") +tracer = trace.get_tracer(__name__) + class Rewriter: def __init__( @@ -39,11 +42,13 @@ def __init__( else: self._rewriter = ExternalEngineRewriter(java_engine_connector) + @tracer.start_as_current_span("transpile", kind=trace.SpanKind.INTERNAL) def _transpile(self, planned_sql: str) -> str: read = self._get_read_dialect(self.experiment) write = self._get_write_dialect(self.data_source) return sqlglot.transpile(planned_sql, read=read, write=write)[0] + @tracer.start_as_current_span("rewrite", kind=trace.SpanKind.INTERNAL) async def rewrite(self, sql: str) -> str: manifest_str = ( self._extract_manifest(self.manifest_str, sql) or self.manifest_str @@ -55,6 +60,7 @@ async def rewrite(self, sql: str) -> str: logger.debug("Dialect SQL: {}", dialect_sql) return dialect_sql + @tracer.start_as_current_span("extract_manifest", kind=trace.SpanKind.INTERNAL) def _extract_manifest(self, manifest_str: str, sql: str) -> str: try: extractor = get_manifest_extractor(manifest_str) @@ -86,6 +92,7 @@ class ExternalEngineRewriter: def __init__(self, java_engine_connector: JavaEngineConnector): self.java_engine_connector = java_engine_connector + @tracer.start_as_current_span("external_rewrite", kind=trace.SpanKind.CLIENT) async def rewrite(self, manifest_str: str, sql: str) -> str: try: return await self.java_engine_connector.dry_plan(manifest_str, sql) @@ -105,6 +112,7 @@ class EmbeddedEngineRewriter: def __init__(self, function_path: str): self.function_path = function_path + @tracer.start_as_current_span("embedded_rewrite", kind=trace.SpanKind.INTERNAL) async def rewrite(self, manifest_str: str, sql: str) -> str: try: session_context = get_session_context(manifest_str, self.function_path) diff --git a/ibis-server/app/mdl/substitute.py b/ibis-server/app/mdl/substitute.py index 4c20c6c36..11d436058 100644 --- a/ibis-server/app/mdl/substitute.py +++ b/ibis-server/app/mdl/substitute.py @@ -1,3 +1,4 @@ +from opentelemetry import trace from sqlglot import exp, parse_one from sqlglot.optimizer.scope import build_scope @@ -5,6 +6,8 @@ from app.model.data_source import DataSource from app.util import base64_to_dict +tracer = trace.get_tracer(__name__) + class ModelSubstitute: def __init__(self, data_source: DataSource, manifest_str: str): @@ -12,6 +15,7 @@ def __init__(self, data_source: DataSource, manifest_str: str): self.manifest = base64_to_dict(manifest_str) self.model_dict = self._build_model_dict(self.manifest["models"]) + @tracer.start_as_current_span("substitute", kind=trace.SpanKind.INTERNAL) def substitute(self, sql: str, write: str | None = None) -> str: ast = parse_one(sql, dialect=self.data_source.value) root = build_scope(ast) diff --git a/ibis-server/app/model/connector.py b/ibis-server/app/model/connector.py index 3ebbcfd79..46186b955 100644 --- a/ibis-server/app/model/connector.py +++ b/ibis-server/app/model/connector.py @@ -15,6 +15,7 @@ from google.oauth2 import service_account from ibis import BaseBackend from ibis.backends.sql.compilers.postgres import compiler as postgres_compiler +from opentelemetry import trace from app.model import ( ConnectionInfo, @@ -30,8 +31,11 @@ # Override datatypes of ibis importlib.import_module("app.custom_ibis.backends.sql.datatypes") +tracer = trace.get_tracer(__name__) + class Connector: + @tracer.start_as_current_span("connector_init", kind=trace.SpanKind.INTERNAL) def __init__(self, data_source: DataSource, connection_info: ConnectionInfo): if data_source == DataSource.mssql: self._connector = MSSqlConnector(connection_info) @@ -64,9 +68,11 @@ def __init__(self, data_source: DataSource, connection_info: ConnectionInfo): self.data_source = data_source self.connection = self.data_source.get_connection(connection_info) + @tracer.start_as_current_span("connector_query", kind=trace.SpanKind.CLIENT) def query(self, sql: str, limit: int) -> pd.DataFrame: return self.connection.sql(sql).limit(limit).to_pandas() + @tracer.start_as_current_span("connector_dry_run", kind=trace.SpanKind.CLIENT) def dry_run(self, sql: str) -> None: self.connection.sql(sql) @@ -85,6 +91,9 @@ def dry_run(self, sql: str) -> None: raise QueryDryRunError(f"The sql dry run failed. {error_message}.") raise UnknownIbisError(e) + @tracer.start_as_current_span( + "describe_sql_for_error_message", kind=trace.SpanKind.CLIENT + ) def _describe_sql_for_error_message(self, sql: str) -> str: tsql = sge.convert(sql).sql("mssql") describe_sql = f"SELECT error_message FROM sys.dm_exec_describe_first_result_set({tsql}, NULL, 0)" @@ -99,15 +108,18 @@ class CannerConnector: def __init__(self, connection_info: ConnectionInfo): self.connection = DataSource.canner.get_connection(connection_info) + @tracer.start_as_current_span("connector_query", kind=trace.SpanKind.CLIENT) def query(self, sql: str, limit: int) -> pd.DataFrame: # Canner enterprise does not support `CREATE TEMPORARY VIEW` for getting schema schema = self._get_schema(sql) return self.connection.sql(sql, schema=schema).limit(limit).to_pandas() + @tracer.start_as_current_span("connector_dry_run", kind=trace.SpanKind.CLIENT) def dry_run(self, sql: str) -> Any: # Canner enterprise does not support dry-run, so we have to query with limit zero return self.connection.raw_sql(f"SELECT * FROM ({sql}) LIMIT 0") + @tracer.start_as_current_span("get_schema", kind=trace.SpanKind.CLIENT) def _get_schema(self, sql: str) -> sch.Schema: cur = self.dry_run(sql) type_names = _get_pg_type_names(self.connection) @@ -143,25 +155,28 @@ def query(self, sql: str, limit: int) -> pd.DataFrame: # - https://github.com/Canner/wren-engine/issues/909 # - https://github.com/ibis-project/ibis/issues/10612 if "Must pass schema" in str(e): - credits_json = loads( - base64.b64decode( - self.connection_info.credentials.get_secret_value() - ).decode("utf-8") - ) - credentials = service_account.Credentials.from_service_account_info( - credits_json - ) - credentials = credentials.with_scopes( - [ - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/cloud-platform", - ] - ) - client = bigquery.Client(credentials=credentials) - ibis_schema_mapper = ibis.backends.bigquery.BigQuerySchema() - bq_fields = client.query(sql).result() - ibis_fields = ibis_schema_mapper.to_ibis(bq_fields.schema) - return pd.DataFrame(columns=ibis_fields.names) + with tracer.start_as_current_span( + "get_schema", kind=trace.SpanKind.CLIENT + ): + credits_json = loads( + base64.b64decode( + self.connection_info.credentials.get_secret_value() + ).decode("utf-8") + ) + credentials = service_account.Credentials.from_service_account_info( + credits_json + ) + credentials = credentials.with_scopes( + [ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/cloud-platform", + ] + ) + client = bigquery.Client(credentials=credentials) + ibis_schema_mapper = ibis.backends.bigquery.BigQuerySchema() + bq_fields = client.query(sql).result() + ibis_fields = ibis_schema_mapper.to_ibis(bq_fields.schema) + return pd.DataFrame(columns=ibis_fields.names) else: raise e @@ -178,6 +193,7 @@ def __init__(self, connection_info: ConnectionInfo): if isinstance(connection_info, GcsFileConnectionInfo): init_duckdb_gcs(self.connection, connection_info) + @tracer.start_as_current_span("duckdb_query", kind=trace.SpanKind.INTERNAL) def query(self, sql: str, limit: int) -> pd.DataFrame: try: return self.connection.execute(sql).fetch_df().head(limit) @@ -186,6 +202,7 @@ def query(self, sql: str, limit: int) -> pd.DataFrame: except HTTPException as e: raise UnprocessableEntityError(f"Failed to execute query: {e!s}") + @tracer.start_as_current_span("duckdb_dry_run", kind=trace.SpanKind.INTERNAL) def dry_run(self, sql: str) -> None: try: self.connection.execute(sql) diff --git a/ibis-server/app/routers/v2/connector.py b/ibis-server/app/routers/v2/connector.py index b947cd26d..61d2ea514 100644 --- a/ibis-server/app/routers/v2/connector.py +++ b/ibis-server/app/routers/v2/connector.py @@ -1,7 +1,8 @@ from typing import Annotated -from fastapi import APIRouter, Depends, Query, Request, Response +from fastapi import APIRouter, Depends, Header, Query, Request, Response from fastapi.responses import ORJSONResponse +from opentelemetry import trace from app.dependencies import verify_query_dto from app.mdl.java_engine import JavaEngineConnector @@ -18,9 +19,10 @@ from app.model.metadata.dto import Constraint, MetadataDTO, Table from app.model.metadata.factory import MetadataFactory from app.model.validator import Validator -from app.util import to_json +from app.util import build_context, to_json router = APIRouter(prefix="/connector") +tracer = trace.get_tracer(__name__) def get_java_engine_connector(request: Request) -> JavaEngineConnector: @@ -34,17 +36,24 @@ async def query( dry_run: Annotated[bool, Query(alias="dryRun")] = False, limit: int | None = None, java_engine_connector: JavaEngineConnector = Depends(get_java_engine_connector), + headers: Annotated[str | None, Header()] = None, ) -> Response: - rewritten_sql = await Rewriter( - dto.manifest_str, - data_source=data_source, - java_engine_connector=java_engine_connector, - ).rewrite(dto.sql) - connector = Connector(data_source, dto.connection_info) - if dry_run: - connector.dry_run(rewritten_sql) - return Response(status_code=204) - return ORJSONResponse(to_json(connector.query(rewritten_sql, limit=limit))) + span_name = ( + f"v2_query_{data_source}_dry_run" if dry_run else f"v2_query_{data_source}" + ) + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + rewritten_sql = await Rewriter( + dto.manifest_str, + data_source=data_source, + java_engine_connector=java_engine_connector, + ).rewrite(dto.sql) + connector = Connector(data_source, dto.connection_info) + if dry_run: + connector.dry_run(rewritten_sql) + return Response(status_code=204) + return ORJSONResponse(to_json(connector.query(rewritten_sql, limit=limit))) @router.post("/{data_source}/validate/{rule_name}") @@ -53,31 +62,52 @@ async def validate( rule_name: str, dto: ValidateDTO, java_engine_connector: JavaEngineConnector = Depends(get_java_engine_connector), + headers: Annotated[str | None, Header()] = None, ) -> Response: - validator = Validator( - Connector(data_source, dto.connection_info), - Rewriter( - dto.manifest_str, - data_source=data_source, - java_engine_connector=java_engine_connector, - ), - ) - await validator.validate(rule_name, dto.parameters, dto.manifest_str) - return Response(status_code=204) + span_name = f"v2_validate_{data_source}" + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + validator = Validator( + Connector(data_source, dto.connection_info), + Rewriter( + dto.manifest_str, + data_source=data_source, + java_engine_connector=java_engine_connector, + ), + ) + await validator.validate(rule_name, dto.parameters, dto.manifest_str) + return Response(status_code=204) @router.post("/{data_source}/metadata/tables", response_model=list[Table]) -def get_table_list(data_source: DataSource, dto: MetadataDTO) -> list[Table]: - return MetadataFactory.get_metadata( - data_source, dto.connection_info - ).get_table_list() +def get_table_list( + data_source: DataSource, + dto: MetadataDTO, + headers: Annotated[str | None, Header()] = None, +) -> list[Table]: + span_name = f"v2_metadata_tables_{data_source}" + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + return MetadataFactory.get_metadata( + data_source, dto.connection_info + ).get_table_list() @router.post("/{data_source}/metadata/constraints", response_model=list[Constraint]) -def get_constraints(data_source: DataSource, dto: MetadataDTO) -> list[Constraint]: - return MetadataFactory.get_metadata( - data_source, dto.connection_info - ).get_constraints() +def get_constraints( + data_source: DataSource, + dto: MetadataDTO, + headers: Annotated[str | None, Header()] = None, +) -> list[Constraint]: + span_name = f"v2_metadata_constraints_{data_source}" + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + return MetadataFactory.get_metadata( + data_source, dto.connection_info + ).get_constraints() @router.post("/{data_source}/metadata/version") @@ -89,10 +119,14 @@ def get_db_version(data_source: DataSource, dto: MetadataDTO) -> str: async def dry_plan( dto: DryPlanDTO, java_engine_connector: JavaEngineConnector = Depends(get_java_engine_connector), + headers: Annotated[str | None, Header()] = None, ) -> str: - return await Rewriter( - dto.manifest_str, java_engine_connector=java_engine_connector - ).rewrite(dto.sql) + with tracer.start_as_current_span( + name="dry_plan", kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + return await Rewriter( + dto.manifest_str, java_engine_connector=java_engine_connector + ).rewrite(dto.sql) @router.post("/{data_source}/dry-plan") @@ -100,12 +134,17 @@ async def dry_plan_for_data_source( data_source: DataSource, dto: DryPlanDTO, java_engine_connector: JavaEngineConnector = Depends(get_java_engine_connector), + headers: Annotated[str | None, Header()] = None, ) -> str: - return await Rewriter( - dto.manifest_str, - data_source=data_source, - java_engine_connector=java_engine_connector, - ).rewrite(dto.sql) + span_name = f"v2_dry_plan_{data_source}" + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + return await Rewriter( + dto.manifest_str, + data_source=data_source, + java_engine_connector=java_engine_connector, + ).rewrite(dto.sql) @router.post("/{data_source}/model-substitute") @@ -113,15 +152,20 @@ async def model_substitute( data_source: DataSource, dto: TranspileDTO, java_engine_connector: JavaEngineConnector = Depends(get_java_engine_connector), + headers: Annotated[str | None, Header()] = None, ) -> str: - sql = ModelSubstitute(data_source, dto.manifest_str).substitute( - dto.sql, write="trino" - ) - Connector(data_source, dto.connection_info).dry_run( - await Rewriter( - dto.manifest_str, - data_source=data_source, - java_engine_connector=java_engine_connector, - ).rewrite(sql) - ) - return sql + span_name = f"v2_model_substitute_{data_source}" + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + sql = ModelSubstitute(data_source, dto.manifest_str).substitute( + dto.sql, write="trino" + ) + Connector(data_source, dto.connection_info).dry_run( + await Rewriter( + dto.manifest_str, + data_source=data_source, + java_engine_connector=java_engine_connector, + ).rewrite(sql) + ) + return sql diff --git a/ibis-server/app/routers/v3/connector.py b/ibis-server/app/routers/v3/connector.py index ceecfd815..4e8dd378a 100644 --- a/ibis-server/app/routers/v3/connector.py +++ b/ibis-server/app/routers/v3/connector.py @@ -1,7 +1,8 @@ from typing import Annotated -from fastapi import APIRouter, Depends, Query, Response +from fastapi import APIRouter, Depends, Header, Query, Response from fastapi.responses import ORJSONResponse +from opentelemetry import trace from app.config import get_config from app.dependencies import verify_query_dto @@ -17,9 +18,10 @@ from app.model.connector import Connector from app.model.data_source import DataSource from app.model.validator import Validator -from app.util import to_json +from app.util import build_context, to_json router = APIRouter(prefix="/connector") +tracer = trace.get_tracer(__name__) @router.post("/{data_source}/query", dependencies=[Depends(verify_query_dto)]) @@ -28,60 +30,100 @@ async def query( dto: QueryDTO, dry_run: Annotated[bool, Query(alias="dryRun")] = False, limit: int | None = None, + headers: Annotated[str | None, Header()] = None, ) -> Response: - rewritten_sql = await Rewriter( - dto.manifest_str, data_source=data_source, experiment=True - ).rewrite(dto.sql) - connector = Connector(data_source, dto.connection_info) - if dry_run: - connector.dry_run(rewritten_sql) - return Response(status_code=204) - return ORJSONResponse(to_json(connector.query(rewritten_sql, limit=limit))) + span_name = ( + f"v3_query_{data_source}_dry_run" if dry_run else f"v3_query_{data_source}" + ) + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + rewritten_sql = await Rewriter( + dto.manifest_str, data_source=data_source, experiment=True + ).rewrite(dto.sql) + connector = Connector(data_source, dto.connection_info) + if dry_run: + connector.dry_run(rewritten_sql) + return Response(status_code=204) + return ORJSONResponse(to_json(connector.query(rewritten_sql, limit=limit))) @router.post("/dry-plan") -async def dry_plan(dto: DryPlanDTO) -> str: - return await Rewriter(dto.manifest_str, experiment=True).rewrite(dto.sql) +async def dry_plan( + dto: DryPlanDTO, + headers: Annotated[str | None, Header()] = None, +) -> str: + with tracer.start_as_current_span( + name="dry_plan", kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + return await Rewriter(dto.manifest_str, experiment=True).rewrite(dto.sql) @router.post("/{data_source}/dry-plan") -async def dry_plan_for_data_source(data_source: DataSource, dto: DryPlanDTO) -> str: - return await Rewriter( - dto.manifest_str, data_source=data_source, experiment=True - ).rewrite(dto.sql) +async def dry_plan_for_data_source( + data_source: DataSource, + dto: DryPlanDTO, + headers: Annotated[str | None, Header()] = None, +) -> str: + span_name = f"v3_dry_plan_{data_source}" + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + return await Rewriter( + dto.manifest_str, data_source=data_source, experiment=True + ).rewrite(dto.sql) @router.post("/{data_source}/validate/{rule_name}") async def validate( - data_source: DataSource, rule_name: str, dto: ValidateDTO + data_source: DataSource, + rule_name: str, + dto: ValidateDTO, + headers: Annotated[str | None, Header()] = None, ) -> Response: - validator = Validator( - Connector(data_source, dto.connection_info), - Rewriter(dto.manifest_str, data_source=data_source, experiment=True), - ) - await validator.validate(rule_name, dto.parameters, dto.manifest_str) - return Response(status_code=204) + span_name = f"v3_validate_{data_source}" + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + validator = Validator( + Connector(data_source, dto.connection_info), + Rewriter(dto.manifest_str, data_source=data_source, experiment=True), + ) + await validator.validate(rule_name, dto.parameters, dto.manifest_str) + return Response(status_code=204) @router.get("/{data_source}/functions") -def functions(data_source: DataSource) -> Response: - file_path = get_config().get_remote_function_list_path(data_source) - session_context = get_session_context(None, file_path) - func_list = [f.to_dict() for f in session_context.get_available_functions()] - return ORJSONResponse(func_list) +def functions( + data_source: DataSource, + headers: Annotated[str | None, Header()] = None, +) -> Response: + span_name = f"v3_functions_{data_source}" + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + file_path = get_config().get_remote_function_list_path(data_source) + session_context = get_session_context(None, file_path) + func_list = [f.to_dict() for f in session_context.get_available_functions()] + return ORJSONResponse(func_list) @router.post("/{data_source}/model-substitute") async def model_substitute( data_source: DataSource, dto: TranspileDTO, + headers: Annotated[str | None, Header()] = None, ) -> str: - sql = ModelSubstitute(data_source, dto.manifest_str).substitute(dto.sql) - Connector(data_source, dto.connection_info).dry_run( - await Rewriter( - dto.manifest_str, - data_source=data_source, - experiment=True, - ).rewrite(sql) - ) - return sql + span_name = f"v3_model-substitute_{data_source}" + with tracer.start_as_current_span( + name=span_name, kind=trace.SpanKind.SERVER, context=build_context(headers) + ): + sql = ModelSubstitute(data_source, dto.manifest_str).substitute(dto.sql) + Connector(data_source, dto.connection_info).dry_run( + await Rewriter( + dto.manifest_str, + data_source=data_source, + experiment=True, + ).rewrite(sql) + ) + return sql diff --git a/ibis-server/app/util.py b/ibis-server/app/util.py index f20df5a5f..ff03919df 100644 --- a/ibis-server/app/util.py +++ b/ibis-server/app/util.py @@ -4,13 +4,21 @@ import orjson import pandas as pd +from fastapi import Header +from opentelemetry import trace +from opentelemetry.context import Context +from opentelemetry.propagate import extract from pandas.core.dtypes.common import is_datetime64_any_dtype +tracer = trace.get_tracer(__name__) + +@tracer.start_as_current_span("base64_to_dict", kind=trace.SpanKind.INTERNAL) def base64_to_dict(base64_str: str) -> dict: return orjson.loads(base64.b64decode(base64_str).decode("utf-8")) +@tracer.start_as_current_span("to_json", kind=trace.SpanKind.INTERNAL) def to_json(df: pd.DataFrame) -> dict: for column in df.columns: if is_datetime64_any_dtype(df[column].dtype): @@ -84,3 +92,9 @@ def _date_offset_to_str(offset: pd.tseries.offsets.DateOffset) -> str: parts.append(f"{value} {unit if value > 1 else unit.rstrip('s')}") return " ".join(parts) + + +def build_context(headers: Header) -> Context: + if headers is None: + return None + return extract(headers) diff --git a/ibis-server/justfile b/ibis-server/justfile index 37632b286..c5690bbed 100644 --- a/ibis-server/justfile +++ b/ibis-server/justfile @@ -23,6 +23,9 @@ port := "8000" run: poetry run fastapi run --port {{ port }} +run-trace-console: + opentelemetry-instrument --traces_exporter console --metrics_exporter none fastapi run --port {{ port }} + dev: poetry run fastapi dev --port {{ port }} diff --git a/ibis-server/poetry.lock b/ibis-server/poetry.lock index 9ad7eada1..28687dab8 100644 --- a/ibis-server/poetry.lock +++ b/ibis-server/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" -version = "2.4.6" +version = "2.4.8" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.9" files = [ - {file = "aiohappyeyeballs-2.4.6-py3-none-any.whl", hash = "sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1"}, - {file = "aiohappyeyeballs-2.4.6.tar.gz", hash = "sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0"}, + {file = "aiohappyeyeballs-2.4.8-py3-none-any.whl", hash = "sha256:6cac4f5dd6e34a9644e69cf9021ef679e4394f54e58a183056d12009e42ea9e3"}, + {file = "aiohappyeyeballs-2.4.8.tar.gz", hash = "sha256:19728772cb12263077982d2f55453babd8bec6a052a926cd5c0c42796da8bf62"}, ] [[package]] @@ -761,42 +761,46 @@ files = [ [[package]] name = "cryptography" -version = "44.0.1" +version = "44.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" files = [ - {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0"}, - {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf"}, - {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864"}, - {file = "cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a"}, - {file = "cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00"}, - {file = "cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41"}, - {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b"}, - {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7"}, - {file = "cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9"}, - {file = "cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"}, - {file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"}, + {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, + {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, + {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, + {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, + {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, + {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, + {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, ] [package.dependencies] @@ -809,7 +813,7 @@ nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] @@ -829,6 +833,23 @@ packaging = ">=17.0" pandas = ">=0.24.2" pyarrow = ">=3.0.0" +[[package]] +name = "deprecated" +version = "1.2.18" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, + {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] + [[package]] name = "distlib" version = "0.3.9" @@ -1189,13 +1210,13 @@ tool = ["click (>=6.0.0)"] [[package]] name = "google-cloud-bigquery" -version = "3.29.0" +version = "3.30.0" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google_cloud_bigquery-3.29.0-py2.py3-none-any.whl", hash = "sha256:5453a4eabe50118254eda9778f3d7dad413490de5f7046b5e66c98f5a1580308"}, - {file = "google_cloud_bigquery-3.29.0.tar.gz", hash = "sha256:fafc2b455ffce3bcc6ce0e884184ef50b6a11350a83b91e327fadda4d5566e72"}, + {file = "google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877"}, + {file = "google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6"}, ] [package.dependencies] @@ -1215,7 +1236,7 @@ geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<2.0dev)"] ipython = ["bigquery-magics (>=0.1.0)"] ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pandas-gbq (>=0.26.1)", "pyarrow (>=3.0.0)"] tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] [[package]] @@ -1317,13 +1338,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.68.0" +version = "1.69.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis_common_protos-1.68.0-py2.py3-none-any.whl", hash = "sha256:aaf179b2f81df26dfadac95def3b16a95064c76a5f45f07e4c68a21bb371c4ac"}, - {file = "googleapis_common_protos-1.68.0.tar.gz", hash = "sha256:95d38161f4f9af0d9423eed8fb7b64ffd2568c3464eb542ff02c5bfa1953ab3c"}, + {file = "googleapis_common_protos-1.69.0-py2.py3-none-any.whl", hash = "sha256:17835fdc4fa8da1d61cfe2d4d5d57becf7c61d4112f8d81c67eaa9d7ce43042d"}, + {file = "googleapis_common_protos-1.69.0.tar.gz", hash = "sha256:5a46d58af72846f59009b9c4710425b9af2139555c71837081706b213b298187"}, ] [package.dependencies] @@ -1744,6 +1765,29 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -2197,6 +2241,52 @@ docs = ["pdoc"] lint = ["ruff"] test = ["pytest", "pytest-asyncio", "python-dotenv"] +[[package]] +name = "opentelemetry-api" +version = "1.30.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_api-1.30.0-py3-none-any.whl", hash = "sha256:d5f5284890d73fdf47f843dda3210edf37a38d66f44f2b5aedc1e89ed455dc09"}, + {file = "opentelemetry_api-1.30.0.tar.gz", hash = "sha256:375893400c1435bf623f7dfb3bcd44825fe6b56c34d0667c542ea8257b1a1240"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=8.5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.30.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_sdk-1.30.0-py3-none-any.whl", hash = "sha256:14fe7afc090caad881addb6926cec967129bd9260c4d33ae6a217359f6b61091"}, + {file = "opentelemetry_sdk-1.30.0.tar.gz", hash = "sha256:c9287a9e4a7614b9946e933a67168450b9ab35f08797eb9bc77d998fa480fa18"}, +] + +[package.dependencies] +opentelemetry-api = "1.30.0" +opentelemetry-semantic-conventions = "0.51b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.51b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_semantic_conventions-0.51b0-py3-none-any.whl", hash = "sha256:fdc777359418e8d06c86012c3dc92c88a6453ba662e941593adb062e48c2eeae"}, + {file = "opentelemetry_semantic_conventions-0.51b0.tar.gz", hash = "sha256:3fabf47f35d1fd9aebcdca7e6802d86bd5ebc3bc3408b7e3248dde6e87a18c47"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +opentelemetry-api = "1.30.0" + [[package]] name = "oracledb" version = "2.5.1" @@ -2736,7 +2826,6 @@ files = [ {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, - {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, @@ -3196,17 +3285,18 @@ files = [ [[package]] name = "pyopenssl" -version = "24.3.0" +version = "25.0.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"}, - {file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"}, + {file = "pyOpenSSL-25.0.0-py3-none-any.whl", hash = "sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90"}, + {file = "pyopenssl-25.0.0.tar.gz", hash = "sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16"}, ] [package.dependencies] cryptography = ">=41.0.5,<45" +typing-extensions = {version = ">=4.9", markers = "python_version < \"3.13\" and python_version >= \"3.8\""} [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"] @@ -3540,37 +3630,37 @@ files = [ [[package]] name = "snowflake-connector-python" -version = "3.13.2" +version = "3.14.0" description = "Snowflake Connector for Python" optional = false python-versions = ">=3.8" files = [ - {file = "snowflake_connector_python-3.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c180dec770076409d422cfc25e4077561026316c4f0e17a001bc0a15ffbe9184"}, - {file = "snowflake_connector_python-3.13.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f4503e841c9bb22fe7af168a6a4e3a76394d8e0d8731a29ad797273d5e9a62b3"}, - {file = "snowflake_connector_python-3.13.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04ce1bba327868712a15f5b6d12f0ac5559d0bbf8c7c18f9847cf825e34f36f7"}, - {file = "snowflake_connector_python-3.13.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c27d59696b41cab854379d81577a0802db3b64dbe0fd18d5a562e3739ee12b7f"}, - {file = "snowflake_connector_python-3.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:61634af1dd78203b41bdf89cea0d930b3e8cec19b50a038db3cea1a531a7d36c"}, - {file = "snowflake_connector_python-3.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fd693b1db31c70a9669b5818980f718149d6f7b4624628bed087801dcd617051"}, - {file = "snowflake_connector_python-3.13.2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:67fcde6666075cc8e6e2fd4ba9dbf1291af780567ffe55a5adbb808de715b39f"}, - {file = "snowflake_connector_python-3.13.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8092ec250b1dcc7c38d8a101a29e9118be56079d8e4f410a50159421c22b3b8e"}, - {file = "snowflake_connector_python-3.13.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8912334af6851d325a5f2bc72416e6a46be889d045e0e09412084b99602c3122"}, - {file = "snowflake_connector_python-3.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:c11599b5d19b4aaab880b5e7b57525645dc1ee9768acc7dad11abf6998c75b22"}, - {file = "snowflake_connector_python-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1cf34116687653b7467d519da1dfdbff4f58d0032932d31d61f9d27717e7d61f"}, - {file = "snowflake_connector_python-3.13.2-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:38426293ecf6c1fc3dd87d506e55d8b82dcf763fab1c827b0d09bc74f9852c50"}, - {file = "snowflake_connector_python-3.13.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd95811b8378d8a268038b59ea8dba8d30dd0b96a1c323191805ae152224ff70"}, - {file = "snowflake_connector_python-3.13.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fe0cd6fab07fccdea020394a6175baa1ddf57b3d1f4dc288bd7eebcf29c6a0b"}, - {file = "snowflake_connector_python-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:82028c55d949889f759b78dd86b6249e9e4934cd6fcbe026cf7f41aefc9eb999"}, - {file = "snowflake_connector_python-3.13.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f29ce0451482348993eed830c81f41a53efd8908691781dc6872d505b1aca12d"}, - {file = "snowflake_connector_python-3.13.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:8ffc295307f380dba2f054bf4c9df847dce5168d4e023bdf04ee08abf732672a"}, - {file = "snowflake_connector_python-3.13.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea90169e12e1b60883a4de35129d9761920a92990f963108dc18072a0ee79fae"}, - {file = "snowflake_connector_python-3.13.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7da0d18a4de0c0bd4b2d57093b60103ed0d89fd9e8b813ceb266fb8490ad615"}, - {file = "snowflake_connector_python-3.13.2-cp38-cp38-win_amd64.whl", hash = "sha256:d966edfe7a8fd61ed73dd56ee0de3ed0c79bd405fc4243391420f2db2e6b4d77"}, - {file = "snowflake_connector_python-3.13.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b8c807e4cc52cb343546624d90f0540c396bb924cce1c64b029d7ab94e9d571"}, - {file = "snowflake_connector_python-3.13.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e7dd2acc26db04c718bb31f30a2bfdd7cc76eaf429dbfa1c31b29da70127125d"}, - {file = "snowflake_connector_python-3.13.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f50eb2a6f9ef94de875bfcbca27a8aa9a08eeec3099ca5fbfc53530a082c4c"}, - {file = "snowflake_connector_python-3.13.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfd5f30c6e8011f5f11fdd7c762a189d94a8a3f9259c4f13cbc6cd5d1ca31f85"}, - {file = "snowflake_connector_python-3.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:db600d32b4854716ef5327cdebeb8ba087256819829f1a0c45697caabe898716"}, - {file = "snowflake_connector_python-3.13.2.tar.gz", hash = "sha256:c9954a5e237566420e087a4fcef227775e0c62fbfc821e0ef6f81325fd44c904"}, + {file = "snowflake_connector_python-3.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:92f10c629c5b01dbe42b04b13b02a367b2d6014c01982bce9b8647b1bc4f7b27"}, + {file = "snowflake_connector_python-3.14.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ff71e58f2d49db2b79c77e6618bf1488c47141887cab0dadb6e974d9b63469e5"}, + {file = "snowflake_connector_python-3.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd21b4377d70bb0015d3b2f9dea70f4b1b17921f0bca351c3d36d4da8f43e22a"}, + {file = "snowflake_connector_python-3.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da4f7aeeec2305d6baafb600cdbf50ce1df3a1566efbce5d06086a304fa8367f"}, + {file = "snowflake_connector_python-3.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:499aeeef478deb61aa24192821b14189d1692d2ea3877ae98d86dbb72bbf6eda"}, + {file = "snowflake_connector_python-3.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b128812ac0043568c07efa9d16a827fb921976ffab6b346599f4536a0da80788"}, + {file = "snowflake_connector_python-3.14.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:a057a69f8b6af89b0627cf23954476ee5f3161df66164c0eec2b5f1ae8bc5cc1"}, + {file = "snowflake_connector_python-3.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b533308c6063316a0b320c63bcc6b0be0e218e249c6d198091b062f021179efd"}, + {file = "snowflake_connector_python-3.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3b6e24baa08d2834b34e3c41438b81144f7932c37539300291265929fcd8b"}, + {file = "snowflake_connector_python-3.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:779de94435fdfedd02ee057ef0623b7b67816e01fcaeee3bc49e15f513ad258c"}, + {file = "snowflake_connector_python-3.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1276f5eb148c3eb11c3c50b4bc040d1452ec3f86b7bda44e9992d8e1b8378a81"}, + {file = "snowflake_connector_python-3.14.0-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:9647a4247e5b05ef7605cbd848d6e441f418500af728f82a176a11bf2bbce88a"}, + {file = "snowflake_connector_python-3.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf057c86f9cdd101da0832f75c95ed762077f0e66d6b1e835f99b1850ea222d7"}, + {file = "snowflake_connector_python-3.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74eeedaf3c9275f6d56336ac3d1d19522ae69db11c88a6a4866ec66e51fd3ed1"}, + {file = "snowflake_connector_python-3.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:1224d2b33ce6f42d99bb01aaf4ad585a72cf9de53334dd849fecfaca22880560"}, + {file = "snowflake_connector_python-3.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d04c6f4927abfc4e694148ad0e2674a8e6886b3d2a5b0cb8cf6ae363591926a5"}, + {file = "snowflake_connector_python-3.14.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:494db51c1f1046cacf4b092dbe40513e3c9170e2b07890a5c23c71b3f6264f53"}, + {file = "snowflake_connector_python-3.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a9cac4aa11ef96a6a86733c451d3e5fe285ecdc0793adc23eb13fb4ca3fb26"}, + {file = "snowflake_connector_python-3.14.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cf26792155e11cb755d242aa3c95cf53e072f936be387f9f54c689f6051aa3f"}, + {file = "snowflake_connector_python-3.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:9ad2fdd3ef6bd0677c58fe4b9d866400c172f71390f1b6bb8811984b56ff6e61"}, + {file = "snowflake_connector_python-3.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c97a70764ac1d75e539e686c8797a429f6b3527677ee1f5312bcaa70a31bd26a"}, + {file = "snowflake_connector_python-3.14.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:4c658af575ddd27df8f3b7cda1b0933e6dea1908bb5ffcc83928e15b35fa48e0"}, + {file = "snowflake_connector_python-3.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74b4f6635989dfd17fe1ccbb3767aed220abb94ccd0062f24e492ce5c673cd95"}, + {file = "snowflake_connector_python-3.14.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14752c6612b40f3a9d5b4562e789f8b8165c4f0a41ca9bf1941fa748f00820a"}, + {file = "snowflake_connector_python-3.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:c13a94fef37405a9ad57f5b2c1664a3e3277b8dffe67e17144ae786df6afcc89"}, + {file = "snowflake_connector_python-3.14.0.tar.gz", hash = "sha256:baa10f3f8a2cdbe2be0ff973f2313df684f4d0147db6a4f76f3b311bedc299ed"}, ] [package.dependencies] @@ -3584,7 +3674,7 @@ idna = ">=2.5,<4" packaging = "*" platformdirs = ">=2.6.0,<5.0.0" pyjwt = "<3.0.0" -pyOpenSSL = ">=22.0.0,<25.0.0" +pyOpenSSL = ">=22.0.0,<26.0.0" pytz = "*" requests = "<3.0.0" sortedcontainers = ">=2.4.0" @@ -3593,7 +3683,7 @@ typing_extensions = ">=4.3,<5" [package.extras] development = ["Cython", "coverage", "more-itertools", "numpy (<1.27.0)", "pendulum (!=2.1.1)", "pexpect", "pytest (<7.5.0)", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist", "pytzdata"] -pandas = ["pandas (>=1.0.0,<3.0.0)", "pyarrow"] +pandas = ["pandas (>=1.0.0,<3.0.0)", "pyarrow (<19.0.0)"] secure-local-storage = ["keyring (>=23.1.0,<26.0.0)"] [[package]] @@ -3927,13 +4017,13 @@ tests = ["black", "boto3", "httpretty (<1.1)", "isort", "keyring", "krb5 (==0.5. [[package]] name = "typer" -version = "0.15.1" +version = "0.15.2" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" files = [ - {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, - {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, + {file = "typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc"}, + {file = "typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5"}, ] [package.dependencies] @@ -4453,6 +4543,25 @@ idna = ">=2.0" multidict = ">=4.0" propcache = ">=0.2.0" +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [[package]] name = "zstandard" version = "0.23.0" @@ -4568,4 +4677,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "ff42082393048f026850c839e944267989fd58ba26726c68683d50a099930701" +content-hash = "2b3fbd059fa68483cc166504113c3330cea678058890a5c5e87f1dce382118db" diff --git a/ibis-server/pyproject.toml b/ibis-server/pyproject.toml index 903ace5f4..92d9b52ae 100644 --- a/ibis-server/pyproject.toml +++ b/ibis-server/pyproject.toml @@ -36,6 +36,8 @@ duckdb = "1.2.0" opendal = ">=0.45" oracledb = "2.5.1" mysqlclient = { version = ">=2.2.4,<3", optional = true } +opentelemetry-api = ">=1.30.0" +opentelemetry-sdk = ">=1.30.0" [tool.poetry.group.dev.dependencies] pytest = "8.3.4"