diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 854591041fb68..2305dbfb6154e 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -789,7 +789,8 @@ airbyte-ci connectors --language=low-code migrate-to-manifest-only ## Changelog | Version | PR | Description | -|---------| ---------------------------------------------------------- |------------------------------------------------------------------------------------------------------------------------------| +| ------- | ---------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | +| 4.33.0 | [#44377](https://github.com/airbytehq/airbyte/pull/44377) | Upload connector SBOM to metadata service bucket on publish. | | 4.32.5 | [#44173](https://github.com/airbytehq/airbyte/pull/44173) | Bug fix for live tests' --should-read-with-state handling. | | 4.32.4 | [#44025](https://github.com/airbytehq/airbyte/pull/44025) | Ignore third party connectors on `publish`. | | 4.32.3 | [#44118](https://github.com/airbytehq/airbyte/pull/44118) | Improve error handling in live tests. | diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py index a606e7e68fb84..88cd617f3d412 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py @@ -3,6 +3,7 @@ # import json +import os import uuid from datetime import datetime from typing import Dict, List, Tuple @@ -290,6 +291,52 @@ async def _run(self, built_connector: Container) -> StepResult: return StepResult(step=self, status=StepStatus.SUCCESS, stdout="Uploaded connector spec to spec cache bucket.") +class UploadSbom(Step): + context: PublishConnectorContext + title = "Upload SBOM to metadata service bucket" + SBOM_KEY_PREFIX = "sbom" + SYFT_DOCKER_IMAGE = "anchore/syft:v1.6.0" + SBOM_FORMAT = "spdx-json" + IN_CONTAINER_SBOM_PATH = "sbom.json" + SBOM_EXTENSION = "spdx.json" + + def get_syft_container(self) -> Container: + home_dir = os.path.expanduser("~") + config_path = os.path.join(home_dir, ".docker", "config.json") + config_file = self.dagger_client.host().file(config_path) + return ( + self.dagger_client.container() + .from_(self.SYFT_DOCKER_IMAGE) + .with_mounted_file("/config/config.json", config_file) + .with_env_variable("DOCKER_CONFIG", "/config") + # Syft requires access to the docker daemon. We share the host's docker socket with the Syft container. + .with_unix_socket("/var/run/docker.sock", self.dagger_client.host().unix_socket("/var/run/docker.sock")) + ) + + async def _run(self) -> StepResult: + try: + syft_container = self.get_syft_container() + sbom_file = await syft_container.with_exec( + [self.context.docker_image, "-o", f"{self.SBOM_FORMAT}={self.IN_CONTAINER_SBOM_PATH}"] + ).file(self.IN_CONTAINER_SBOM_PATH) + except ExecError as e: + return StepResult(step=self, status=StepStatus.FAILURE, stderr=str(e), exc_info=e) + + # This will lead to a key like: sbom/airbyte/source-faker/0.1.0.json + key = f"{self.SBOM_KEY_PREFIX}/{self.context.docker_image.replace(':', '/')}.{self.SBOM_EXTENSION}" + exit_code, stdout, stderr = await upload_to_gcs( + self.context.dagger_client, + sbom_file, + key, + self.context.metadata_bucket_name, + self.context.metadata_service_gcs_credentials, + flags=['--cache-control="no-cache"', "--content-type=application/json"], + ) + if exit_code != 0: + return StepResult(step=self, status=StepStatus.FAILURE, stdout=stdout, stderr=stderr) + return StepResult(step=self, status=StepStatus.SUCCESS, stdout="Uploaded SBOM to metadata service bucket.") + + # Pipeline @@ -317,6 +364,10 @@ async def run_connector_publish_pipeline(context: PublishConnectorContext, semap pre_release_tag=context.docker_image_tag, ) + upload_spec_to_cache_step = UploadSpecToCache(context) + + upload_sbom_step = UploadSbom(context) + def create_connector_report(results: List[StepResult]) -> ConnectorReport: report = ConnectorReport(context, results, name="PUBLISH RESULTS") context.report = report @@ -324,7 +375,6 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport: async with semaphore: async with context: - # TODO add a strucutre to hold the results of each step. and perform skips and failures results = [] @@ -349,10 +399,16 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport: "The connector version is already published. Let's upload metadata.yaml and spec to GCS even if no version bump happened." ) already_published_connector = context.dagger_client.container().from_(context.docker_image) - upload_to_spec_cache_results = await UploadSpecToCache(context).run(already_published_connector) + upload_to_spec_cache_results = await upload_spec_to_cache_step.run(already_published_connector) results.append(upload_to_spec_cache_results) if upload_to_spec_cache_results.status is not StepStatus.SUCCESS: return create_connector_report(results) + + upload_sbom_results = await upload_sbom_step.run() + results.append(upload_sbom_results) + if upload_sbom_results.status is not StepStatus.SUCCESS: + return create_connector_report(results) + metadata_upload_results = await metadata_upload_step.run() results.append(metadata_upload_results) @@ -388,11 +444,16 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport: if pull_connector_image_results.status is not StepStatus.SUCCESS: return create_connector_report(results) - upload_to_spec_cache_results = await UploadSpecToCache(context).run(built_connector_platform_variants[0]) + upload_to_spec_cache_results = await upload_spec_to_cache_step.run(built_connector_platform_variants[0]) results.append(upload_to_spec_cache_results) if upload_to_spec_cache_results.status is not StepStatus.SUCCESS: return create_connector_report(results) + upload_sbom_results = await upload_sbom_step.run() + results.append(upload_sbom_results) + if upload_sbom_results.status is not StepStatus.SUCCESS: + return create_connector_report(results) + metadata_upload_results = await metadata_upload_step.run() results.append(metadata_upload_results) connector_report = create_connector_report(results) diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 59cf72cb99925..8ecfc79c5a2a2 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "4.32.5" +version = "4.33.0" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index 67d4d16ebc0e1..2e5c9ea58fd50 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -155,6 +155,7 @@ def test_parse_spec_output_no_spec(self, publish_context): (publish_pipeline, "PullConnectorImageFromRegistry"), (publish_pipeline.steps, "run_connector_build"), (publish_pipeline, "CheckPythonRegistryPackageDoesNotExist"), + (publish_pipeline, "UploadSbom"), ] @@ -203,9 +204,11 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker run_metadata_validation = publish_pipeline.MetadataValidation.return_value.run run_metadata_validation.return_value = mocker.Mock(status=StepStatus.SUCCESS) - # ensure spec always succeeds + # ensure spec and sbom upload always succeeds run_upload_spec_to_cache = publish_pipeline.UploadSpecToCache.return_value.run run_upload_spec_to_cache.return_value = mocker.Mock(status=StepStatus.SUCCESS) + run_upload_sbom = publish_pipeline.UploadSbom.return_value.run + run_upload_sbom.return_value = mocker.Mock(status=StepStatus.SUCCESS) run_check_connector_image_does_not_exist = publish_pipeline.CheckConnectorImageDoesNotExist.return_value.run run_check_connector_image_does_not_exist.return_value = mocker.Mock(status=check_image_exists_status) @@ -219,7 +222,7 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker # Check that nothing else is called for module, to_mock in STEPS_TO_PATCH: - if to_mock not in ["MetadataValidation", "MetadataUpload", "CheckConnectorImageDoesNotExist", "UploadSpecToCache"]: + if to_mock not in ["MetadataValidation", "MetadataUpload", "CheckConnectorImageDoesNotExist", "UploadSpecToCache", "UploadSbom"]: getattr(module, to_mock).return_value.run.assert_not_called() if check_image_exists_status is StepStatus.SKIPPED: @@ -231,6 +234,7 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker run_metadata_validation.return_value, run_check_connector_image_does_not_exist.return_value, run_upload_spec_to_cache.return_value, + run_upload_sbom.return_value, run_metadata_upload.return_value, ] )