Skip to content

Commit 8665eaf

Browse files
authored
airbyte-ci: reduce required env var when running in CI (#37765)
1 parent f16632f commit 8665eaf

File tree

8 files changed

+28
-24
lines changed

8 files changed

+28
-24
lines changed

.github/workflows/community_ci.yml

+2
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,8 @@ jobs:
103103
git_branch: ${{ github.head_ref }}
104104
git_revision: ${{ github.event.pull_request.head.sha }}
105105
github_token: ${{ github.token }}
106+
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
107+
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
106108
- name: Upload pipeline reports
107109
id: upload-artifact
108110
uses: actions/upload-artifact@v4

airbyte-ci/connectors/pipelines/README.md

+1
Original file line numberDiff line numberDiff line change
@@ -676,6 +676,7 @@ E.G.: running Poe tasks on the modified internal packages of the current branch:
676676

677677
| Version | PR | Description |
678678
|---------| ---------------------------------------------------------- |----------------------------------------------------------------------------------------------------------------------------|
679+
| 4.12.1 | [#37765](https://github.com/airbytehq/airbyte/pull/37765) | Relax the required env var to run in CI and handle their absence gracefully. |
679680
| 4.12.0 | [#37690](https://github.com/airbytehq/airbyte/pull/37690) | Pass custom CI status name in `connectors test` |
680681
| 4.11.0 | [#37641](https://github.com/airbytehq/airbyte/pull/37641) | Updates to run regression tests in GitHub Actions. |
681682
| 4.10.5 | [#37641](https://github.com/airbytehq/airbyte/pull/37641) | Reintroduce changes from 4.10.0 with a fix. |

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py

-2
Original file line numberDiff line numberDiff line change
@@ -99,8 +99,6 @@ def validate_environment(is_local: bool) -> None:
9999
raise click.UsageError("You need to run this command from the repository root.")
100100
else:
101101
required_env_vars_for_ci = [
102-
"GCP_GSM_CREDENTIALS",
103-
"CI_REPORT_BUCKET_NAME",
104102
"CI_GITHUB_ACCESS_TOKEN",
105103
"DOCKER_HUB_USERNAME",
106104
"DOCKER_HUB_PASSWORD",

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/reports.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -147,12 +147,12 @@ async def save_html_report(self) -> None:
147147
await html_report_artifact.save_to_local_path(html_report_path)
148148
absolute_path = html_report_path.absolute()
149149
self.pipeline_context.logger.info(f"Report saved locally at {absolute_path}")
150-
if self.remote_storage_enabled and self.pipeline_context.ci_gcs_credentials_secret and self.pipeline_context.ci_report_bucket:
150+
if self.pipeline_context.remote_storage_enabled:
151151
gcs_url = await html_report_artifact.upload_to_gcs(
152152
dagger_client=self.pipeline_context.dagger_client,
153-
bucket=self.pipeline_context.ci_report_bucket,
153+
bucket=self.pipeline_context.ci_report_bucket, # type: ignore
154154
key=self.html_report_remote_storage_key,
155-
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
155+
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret, # type: ignore
156156
)
157157
self.pipeline_context.logger.info(f"HTML report uploaded to {gcs_url}")
158158

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/common.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -380,7 +380,7 @@ def regression_tests_command(self) -> List[str]:
380380
exit $pytest_exit
381381
"""
382382
)
383-
return [f"bash", "-c", f"'{run_pytest_with_proxy}'"]
383+
return ["bash", "-c", f"'{run_pytest_with_proxy}'"]
384384

385385
def __init__(self, context: ConnectorContext) -> None:
386386
"""Create a step to run regression tests for a connector.

airbyte-ci/connectors/pipelines/pipelines/models/contexts/pipeline_context.py

+8-3
Original file line numberDiff line numberDiff line change
@@ -160,9 +160,10 @@ def report(self, report: Report | ConnectorReport) -> None:
160160
self._report = report
161161

162162
@property
163-
def ci_gcs_credentials_secret(self) -> Secret:
164-
assert self.ci_gcs_credentials is not None, "The ci_gcs_credentials was not set on this PipelineContext."
165-
return self.dagger_client.set_secret("ci_gcs_credentials", self.ci_gcs_credentials)
163+
def ci_gcs_credentials_secret(self) -> Secret | None:
164+
if self.ci_gcs_credentials is not None:
165+
return self.dagger_client.set_secret("ci_gcs_credentials", self.ci_gcs_credentials)
166+
return None
166167

167168
@property
168169
def ci_github_access_token_secret(self) -> Secret:
@@ -210,6 +211,10 @@ def dagger_cloud_url(self) -> Optional[str]:
210211

211212
return f"https://alpha.dagger.cloud/changeByPipelines?filter=dagger.io/git.ref:{self.git_revision}"
212213

214+
@property
215+
def remote_storage_enabled(self) -> bool:
216+
return self.is_ci is True and self.ci_report_bucket is not None and self.ci_gcs_credentials_secret is not None
217+
213218
def get_repo_file(self, file_path: str) -> File:
214219
"""Get a file from the current repository.
215220

airbyte-ci/connectors/pipelines/pipelines/models/reports.py

+12-14
Original file line numberDiff line numberDiff line change
@@ -84,10 +84,6 @@ def lead_duration(self) -> timedelta:
8484
assert self.pipeline_context.stopped_at is not None, "The pipeline stopped_at timestamp must be set to save reports."
8585
return self.pipeline_context.stopped_at - self.pipeline_context.created_at
8686

87-
@property
88-
def remote_storage_enabled(self) -> bool:
89-
return self.pipeline_context.is_ci
90-
9187
async def save(self) -> None:
9288
self.report_dir_path.mkdir(parents=True, exist_ok=True)
9389
await self.save_json_report()
@@ -103,14 +99,16 @@ async def save_json_report(self) -> None:
10399
await json_report_artifact.save_to_local_path(json_report_path)
104100
absolute_path = json_report_path.absolute()
105101
self.pipeline_context.logger.info(f"Report saved locally at {absolute_path}")
106-
if self.remote_storage_enabled and self.pipeline_context.ci_report_bucket and self.pipeline_context.ci_gcs_credentials_secret:
102+
if self.pipeline_context.remote_storage_enabled:
107103
gcs_url = await json_report_artifact.upload_to_gcs(
108104
dagger_client=self.pipeline_context.dagger_client,
109-
bucket=self.pipeline_context.ci_report_bucket,
105+
bucket=self.pipeline_context.ci_report_bucket, # type: ignore
110106
key=self.json_report_remote_storage_key,
111-
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
107+
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret, # type: ignore
112108
)
113109
self.pipeline_context.logger.info(f"JSON Report uploaded to {gcs_url}")
110+
else:
111+
self.pipeline_context.logger.info("JSON Report not uploaded to GCS because remote storage is disabled.")
114112

115113
async def save_step_result_artifacts(self) -> None:
116114
local_artifacts_dir = self.report_dir_path / "artifacts"
@@ -121,19 +119,19 @@ async def save_step_result_artifacts(self) -> None:
121119
step_artifacts_dir = local_artifacts_dir / slugify(step_result.step.title)
122120
step_artifacts_dir.mkdir(parents=True, exist_ok=True)
123121
await artifact.save_to_local_path(step_artifacts_dir / artifact.name)
124-
if (
125-
self.remote_storage_enabled
126-
and self.pipeline_context.ci_report_bucket
127-
and self.pipeline_context.ci_gcs_credentials_secret
128-
):
122+
if self.pipeline_context.remote_storage_enabled:
129123
upload_time = int(time.time())
130124
gcs_url = await artifact.upload_to_gcs(
131125
dagger_client=self.pipeline_context.dagger_client,
132-
bucket=self.pipeline_context.ci_report_bucket,
126+
bucket=self.pipeline_context.ci_report_bucket, # type: ignore
133127
key=f"{self.report_output_prefix}/artifacts/{slugify(step_result.step.title)}/{upload_time}_{artifact.name}",
134-
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
128+
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret, # type: ignore
135129
)
136130
self.pipeline_context.logger.info(f"Artifact {artifact.name} for {step_result.step.title} uploaded to {gcs_url}")
131+
else:
132+
self.pipeline_context.logger.info(
133+
f"Artifact {artifact.name} for {step_result.step.title} not uploaded to GCS because remote storage is disabled."
134+
)
137135

138136
def to_json(self) -> str:
139137
"""Create a JSON representation of the report.

airbyte-ci/connectors/pipelines/pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
44

55
[tool.poetry]
66
name = "pipelines"
7-
version = "4.12.0"
7+
version = "4.12.1"
88
description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines"
99
authors = ["Airbyte <[email protected]>"]
1010

0 commit comments

Comments
 (0)