Skip to content

[airbyte-ci] Format using a poe task #38043

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
May 8, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion airbyte-cdk/python/airbyte_cdk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#

from importlib import metadata

from .connector import AirbyteSpec, Connector
from .entrypoint import AirbyteEntrypoint
from .logger import AirbyteLogger
from importlib import metadata

__all__ = ["AirbyteEntrypoint", "AirbyteLogger", "AirbyteSpec", "Connector"]
__version__ = metadata.version("airbyte_cdk")
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,11 @@ def create_source(config: Mapping[str, Any], limits: TestReadLimits) -> Manifest


def read_stream(
source: DeclarativeSource, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, state: List[AirbyteStateMessage], limits: TestReadLimits
source: DeclarativeSource,
config: Mapping[str, Any],
configured_catalog: ConfiguredAirbyteCatalog,
state: List[AirbyteStateMessage],
limits: TestReadLimits,
) -> AirbyteMessage:
try:
handler = MessageGrouper(limits.max_pages_per_slice, limits.max_slices, limits.max_records)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ def get_message_groups(
raise ValueError(f"Unknown message group type: {type(message_group)}")

try:
configured_stream = configured_catalog.streams[0] # The connector builder currently only supports reading from a single stream at a time
# The connector builder currently only supports reading from a single stream at a time
configured_stream = configured_catalog.streams[0]
schema = schema_inferrer.get_stream_schema(configured_stream.stream.name)
except SchemaValidationException as exception:
for validation_error in exception.validation_errors:
Expand Down Expand Up @@ -183,7 +184,11 @@ def _get_message_groups(
and message.type == MessageType.LOG
and message.log.message.startswith(SliceLogger.SLICE_LOG_PREFIX)
):
yield StreamReadSlices(pages=current_slice_pages, slice_descriptor=current_slice_descriptor, state=[latest_state_message] if latest_state_message else [])
yield StreamReadSlices(
pages=current_slice_pages,
slice_descriptor=current_slice_descriptor,
state=[latest_state_message] if latest_state_message else [],
)
current_slice_descriptor = self._parse_slice_description(message.log.message)
current_slice_pages = []
at_least_one_page_in_group = False
Expand Down Expand Up @@ -230,7 +235,11 @@ def _get_message_groups(
else:
if current_page_request or current_page_response or current_page_records:
self._close_page(current_page_request, current_page_response, current_slice_pages, current_page_records)
yield StreamReadSlices(pages=current_slice_pages, slice_descriptor=current_slice_descriptor, state=[latest_state_message] if latest_state_message else [])
yield StreamReadSlices(
pages=current_slice_pages,
slice_descriptor=current_slice_descriptor,
state=[latest_state_message] if latest_state_message else [],
)

@staticmethod
def _need_to_close_page(at_least_one_page_in_group: bool, message: AirbyteMessage, json_message: Optional[Dict[str, Any]]) -> bool:
Expand Down Expand Up @@ -281,8 +290,11 @@ def _close_page(
current_page_records.clear()

def _read_stream(
self, source: DeclarativeSource, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog,
state: List[AirbyteStateMessage]
self,
source: DeclarativeSource,
config: Mapping[str, Any],
configured_catalog: ConfiguredAirbyteCatalog,
state: List[AirbyteStateMessage],
) -> Iterator[AirbyteMessage]:
# the generator can raise an exception
# iterate over the generated messages. if next raise an exception, catch it and yield it as an AirbyteLogMessage
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,7 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#

from airbyte_cdk.sources.declarative.auth.oauth import DeclarativeOauth2Authenticator
from airbyte_cdk.sources.declarative.auth.jwt import JwtAuthenticator
from airbyte_cdk.sources.declarative.auth.oauth import DeclarativeOauth2Authenticator

__all__ = [
"DeclarativeOauth2Authenticator",
"JwtAuthenticator"
]
__all__ = ["DeclarativeOauth2Authenticator", "JwtAuthenticator"]
2 changes: 1 addition & 1 deletion airbyte-cdk/python/airbyte_cdk/sources/streams/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,6 @@
#

# Initialize Streams Package
from .core import NO_CURSOR_STATE_KEY, IncrementalMixin, CheckpointMixin, Stream
from .core import NO_CURSOR_STATE_KEY, CheckpointMixin, IncrementalMixin, Stream

__all__ = ["NO_CURSOR_STATE_KEY", "IncrementalMixin", "CheckpointMixin", "Stream"]
Original file line number Diff line number Diff line change
@@ -1,6 +1,18 @@
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.


from .checkpoint_reader import CheckpointMode, CheckpointReader, FullRefreshCheckpointReader, IncrementalCheckpointReader, ResumableFullRefreshCheckpointReader
from .checkpoint_reader import (
CheckpointMode,
CheckpointReader,
FullRefreshCheckpointReader,
IncrementalCheckpointReader,
ResumableFullRefreshCheckpointReader,
)

__all__ = ["CheckpointMode", "CheckpointReader", "FullRefreshCheckpointReader", "IncrementalCheckpointReader", "ResumableFullRefreshCheckpointReader"]
__all__ = [
"CheckpointMode",
"CheckpointReader",
"FullRefreshCheckpointReader",
"IncrementalCheckpointReader",
"ResumableFullRefreshCheckpointReader",
]
6 changes: 5 additions & 1 deletion airbyte-cdk/python/airbyte_cdk/test/catalog_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,11 @@ def with_stream(self, name: Union[str, ConfiguredAirbyteStreamBuilder], sync_mod

# to avoid a breaking change, `name` needs to stay in the API but this can be either a name or a builder
name_or_builder = name
builder = name_or_builder if isinstance(name_or_builder, ConfiguredAirbyteStreamBuilder) else ConfiguredAirbyteStreamBuilder().with_name(name_or_builder).with_sync_mode(sync_mode)
builder = (
name_or_builder
if isinstance(name_or_builder, ConfiguredAirbyteStreamBuilder)
else ConfiguredAirbyteStreamBuilder().with_name(name_or_builder).with_sync_mode(sync_mode)
)
self._streams.append(builder)
return self

Expand Down
2 changes: 1 addition & 1 deletion airbyte-cdk/python/airbyte_cdk/test/mock_http/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from airbyte_cdk.test.mock_http.matcher import HttpRequestMatcher
from airbyte_cdk.test.mock_http.mocker import HttpMocker
from airbyte_cdk.test.mock_http.request import HttpRequest
from airbyte_cdk.test.mock_http.response import HttpResponse
from airbyte_cdk.test.mock_http.mocker import HttpMocker

__all__ = ["HttpMocker", "HttpRequest", "HttpRequestMatcher", "HttpResponse"]
Original file line number Diff line number Diff line change
Expand Up @@ -138,10 +138,7 @@ def build(self) -> Dict[str, Any]:

class HttpResponseBuilder:
def __init__(
self,
template: Dict[str, Any],
records_path: Union[FieldPath, NestedPath],
pagination_strategy: Optional[PaginationStrategy]
self, template: Dict[str, Any], records_path: Union[FieldPath, NestedPath], pagination_strategy: Optional[PaginationStrategy]
):
self._response = template
self._records: List[RecordBuilder] = []
Expand Down Expand Up @@ -198,16 +195,16 @@ def create_record_builder(
try:
record_template = records_path.extract(response_template)[0]
if not record_template:
raise ValueError(f"Could not extract any record from template at path `{records_path}`. "
f"Please fix the template to provide a record sample or fix `records_path`.")
raise ValueError(
f"Could not extract any record from template at path `{records_path}`. "
f"Please fix the template to provide a record sample or fix `records_path`."
)
return RecordBuilder(record_template, record_id_path, record_cursor_path)
except (IndexError, KeyError):
raise ValueError(f"Error while extracting records at path `{records_path}` from response template `{response_template}`")


def create_response_builder(
response_template: Dict[str, Any],
records_path: Union[FieldPath, NestedPath],
pagination_strategy: Optional[PaginationStrategy] = None
response_template: Dict[str, Any], records_path: Union[FieldPath, NestedPath], pagination_strategy: Optional[PaginationStrategy] = None
) -> HttpResponseBuilder:
return HttpResponseBuilder(response_template, records_path, pagination_strategy)
12 changes: 3 additions & 9 deletions airbyte-cdk/python/airbyte_cdk/test/state_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,9 @@ def __init__(self) -> None:
self._state: List[AirbyteStateMessage] = []

def with_stream_state(self, stream_name: str, state: Any) -> "StateBuilder":
self._state.append(AirbyteStateMessage.parse_obj({
"type": "STREAM",
"stream": {
"stream_state": state,
"stream_descriptor": {
"name": stream_name
}
}
}))
self._state.append(
AirbyteStateMessage.parse_obj({"type": "STREAM", "stream": {"stream_state": state, "stream_descriptor": {"name": stream_name}}})
)
return self

def build(self) -> List[AirbyteStateMessage]:
Expand Down
1 change: 1 addition & 0 deletions airbyte-cdk/python/unit_tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,6 @@

# Import the thing that needs to be imported to stop the tests from falling over
from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource

# "Use" the thing so that the linter doesn't complain
placeholder = ManifestDeclarativeSource
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,4 @@
from .packaging import ENABLED_CHECKS as PACKAGING_CHECKS
from .security import ENABLED_CHECKS as SECURITY_CHECKS

ENABLED_CHECKS = (
DOCUMENTATION_CHECKS
+ METADATA_CORRECTNESS_CHECKS
+ PACKAGING_CHECKS
+ ASSETS_CHECKS
+ SECURITY_CHECKS
)
ENABLED_CHECKS = DOCUMENTATION_CHECKS + METADATA_CORRECTNESS_CHECKS + PACKAGING_CHECKS + ASSETS_CHECKS + SECURITY_CHECKS
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.

Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,5 @@

import dagger

DAGGER_EXEC_TIMEOUT = dagger.Timeout(
int(os.environ.get("DAGGER_EXEC_TIMEOUT", "3600"))
) # One hour by default
DAGGER_EXEC_TIMEOUT = dagger.Timeout(int(os.environ.get("DAGGER_EXEC_TIMEOUT", "3600"))) # One hour by default
DAGGER_CONFIG = dagger.Config(timeout=DAGGER_EXEC_TIMEOUT, log_output=sys.stderr)
5 changes: 4 additions & 1 deletion airbyte-ci/connectors/pipelines/pipelines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import logging
import os
from typing import Union

from rich.logging import RichHandler

from .helpers import sentry_utils
Expand All @@ -17,7 +18,9 @@
logging.getLogger("httpx").setLevel(logging.WARNING)

# RichHandler does not work great in the CI environment, so we use a StreamHandler instead
logging_handler: Union[RichHandler, logging.StreamHandler] = RichHandler(rich_tracebacks=True) if "CI" not in os.environ else logging.StreamHandler()
logging_handler: Union[RichHandler, logging.StreamHandler] = (
RichHandler(rich_tracebacks=True) if "CI" not in os.environ else logging.StreamHandler()
)


logging.basicConfig(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,6 @@ class FormatConfiguration:
Formatter.PYTHON,
["**/*.py"],
format_python_container,
["poetry run isort --settings-file pyproject.toml .", "poetry run black --config pyproject.toml ."],
["poetry run poe format"],
),
]
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
"**/pnpm-lock.yaml", # This file is generated and should not be formatted
"**/normalization_test_output",
"**/source-amplitude/unit_tests/api_data/zipped.json", # Zipped file presents as non-UTF-8 making spotless sad
"**/tools/git_hooks/tests/test_spec_linter.py",
"airbyte-cdk/python/airbyte_cdk/sources/declarative/models/**", # These files are generated and should not be formatted
"airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/**", # These files are generated and should not be formatted
"**/airbyte-ci/connectors/metadata_service/lib/tests/fixtures/**/invalid", # This is a test directory with invalid and sometimes unformatted code
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,17 @@
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#

from .test_core import TestBasicRead, TestConnection, TestConnectorAttributes, TestDiscovery, TestSpec, TestConnectorDocumentation
from .test_core import TestBasicRead, TestConnection, TestConnectorAttributes, TestConnectorDocumentation, TestDiscovery, TestSpec
from .test_full_refresh import TestFullRefresh
from .test_incremental import TestIncremental

__all__ = ["TestSpec", "TestBasicRead", "TestConnection", "TestConnectorAttributes", "TestDiscovery", "TestFullRefresh", "TestIncremental", "TestConnectorDocumentation"]
__all__ = [
"TestSpec",
"TestBasicRead",
"TestConnection",
"TestConnectorAttributes",
"TestDiscovery",
"TestFullRefresh",
"TestIncremental",
"TestConnectorDocumentation",
]
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,9 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from .attribution_report import AttributionReportModel
from .common import (
CatalogModel,
Keywords,
MetricsReport,
NegativeKeywords,
Portfolio
)
from .common import CatalogModel, Keywords, MetricsReport, NegativeKeywords, Portfolio
from .profile import Profile
from .sponsored_brands import (
BrandsAdGroup,
BrandsCampaign,
)
from .sponsored_brands import BrandsAdGroup, BrandsCampaign
from .sponsored_display import DisplayAdGroup, DisplayBudgetRules, DisplayCampaign, DisplayCreatives, DisplayProductAds, DisplayTargeting
from .sponsored_products import (
ProductAd,
Expand All @@ -24,7 +15,7 @@
ProductTargeting,
SponsoredProductCampaignNegativeKeywordsModel,
SponsoredProductKeywordsModel,
SponsoredProductNegativeKeywordsModel
SponsoredProductNegativeKeywordsModel,
)

__all__ = [
Expand Down Expand Up @@ -52,5 +43,5 @@
"AttributionReportModel",
"SponsoredProductCampaignNegativeKeywordsModel",
"SponsoredProductKeywordsModel",
"SponsoredProductNegativeKeywordsModel"
"SponsoredProductNegativeKeywordsModel",
]
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,7 @@
SponsoredDisplayReportStream,
SponsoredProductsReportStream,
)
from .sponsored_brands import (
SponsoredBrandsAdGroups,
SponsoredBrandsCampaigns,
SponsoredBrandsKeywords
)
from .sponsored_brands import SponsoredBrandsAdGroups, SponsoredBrandsCampaigns, SponsoredBrandsKeywords
from .sponsored_display import (
SponsoredDisplayAdGroups,
SponsoredDisplayBudgetRules,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from .attribution_report_request_builder import AttributionReportRequestBuilder
from .oauth_request_builder import OAuthRequestBuilder
from .profiles_request_builder import ProfilesRequestBuilder
from .sponsored_brands_request_builder import SponsoredBrandsRequestBuilder
from .attribution_report_request_builder import AttributionReportRequestBuilder
from .sponsored_display_report_request_builder import SponsoredDisplayReportRequestBuilder
from .report_check_status_request_builer import ReportCheckStatusRequestBuilder
from .report_download_request_builder import ReportDownloadRequestBuilder
from .sponsored_products_report_request_builder import SponsoredProductsReportRequestBuilder
from .sponsored_brands_video_report_request_builder import SponsoredBrandsVideoReportRequestBuilder
from .sponsored_brands_report_request_builder import SponsoredBrandsReportRequestBuilder
from .sponsored_brands_report_v3_request_builder import SponsoredBrandsV3ReportRequestBuilder
from .sponsored_brands_request_builder import SponsoredBrandsRequestBuilder
from .sponsored_brands_video_report_request_builder import SponsoredBrandsVideoReportRequestBuilder
from .sponsored_display_report_request_builder import SponsoredDisplayReportRequestBuilder
from .sponsored_products_report_request_builder import SponsoredProductsReportRequestBuilder
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from .sponsored_brands_response_builder import SponsoredBrandsResponseBuilder
from .profiles_response_builder import ProfilesResponseBuilder
from .oauth_response_builder import OAuthResponseBuilder
from .error_response_builder import ErrorResponseBuilder
from .attribution_report_response_builder import AttributionReportResponseBuilder
from .report_init_response_builder import ReportInitResponseBuilder
from .error_response_builder import ErrorResponseBuilder
from .oauth_response_builder import OAuthResponseBuilder
from .profiles_response_builder import ProfilesResponseBuilder
from .report_check_status_response_builder import ReportCheckStatusResponseBuilder
from .report_download_response_builder import ReportDownloadResponseBuilder
from .report_init_response_builder import ReportInitResponseBuilder
from .sponsored_brands_response_builder import SponsoredBrandsResponseBuilder
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from .profiles_record_builder import ProfilesRecordBuilder
from .sponsored_brands_record_builder import SponsoredBrandsRecordBuilder
from .error_record_builder import ErrorRecordBuilder
from .attribution_report_record_builder import AttributionReportRecordBuilder
from .report_init_response_record_builder import ReportInitResponseRecordBuilder
from .error_record_builder import ErrorRecordBuilder
from .profiles_record_builder import ProfilesRecordBuilder
from .report_check_status_record_builder import ReportCheckStatusRecordBuilder
from .report_file_recod_builder import ReportFileRecordBuilder
from .report_init_response_record_builder import ReportInitResponseRecordBuilder
from .sponsored_brands_record_builder import SponsoredBrandsRecordBuilder
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
from .list_template_path import ListTemplatePath
from .dict_template_path import DictTemplatePath
from .list_template_path import ListTemplatePath
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@
from .config_builder import ConfigBuilder
from .request_builders.api import CustomObjectsRequestBuilder, OAuthRequestBuilder, PropertiesRequestBuilder, ScopesRequestBuilder
from .request_builders.streams import CRMStreamRequestBuilder, IncrementalCRMStreamRequestBuilder, WebAnalyticsRequestBuilder
from .response_builder.helpers import RootHttpResponseBuilder
from .response_builder.api import ScopesResponseBuilder
from .response_builder.helpers import RootHttpResponseBuilder
from .response_builder.streams import GenericResponseBuilder, HubspotStreamResponseBuilder


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from dagger import Container



async def pre_connector_install(base_image_container: Container) -> Container:
"""This function will run before the connector installation.
We set these environment variable to match what was originally in the Dockerfile.
Expand Down
Loading
Loading