Skip to content

Commit d0b01b5

Browse files
committed
Move ValidatorOpts higher in call
1 parent e48e80f commit d0b01b5

File tree

4 files changed

+27
-30
lines changed

4 files changed

+27
-30
lines changed

airbyte-ci/connectors/metadata_service/lib/metadata_service/commands.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
import click
77
from metadata_service.gcs_upload import upload_metadata_to_gcs, MetadataUploadInfo
8-
from metadata_service.validators.metadata_validator import PRE_UPLOAD_VALIDATORS, validate_and_load
8+
from metadata_service.validators.metadata_validator import PRE_UPLOAD_VALIDATORS, validate_and_load, ValidatorOptions
99
from metadata_service.constants import METADATA_FILE_NAME
1010
from pydantic import ValidationError
1111

@@ -54,9 +54,9 @@ def validate(file_path: pathlib.Path):
5454
@click.option("--prerelease", type=click.STRING, required=False, default=None, help="The prerelease tag of the connector.")
5555
def upload(metadata_file_path: pathlib.Path, bucket_name: str, prerelease: str):
5656
metadata_file_path = metadata_file_path if not metadata_file_path.is_dir() else metadata_file_path / METADATA_FILE_NAME
57-
57+
validator_opts = ValidatorOptions(prerelease_tag=prerelease)
5858
try:
59-
upload_info = upload_metadata_to_gcs(bucket_name, metadata_file_path, prerelease)
59+
upload_info = upload_metadata_to_gcs(bucket_name, metadata_file_path, validator_opts)
6060
log_metadata_upload_info(upload_info)
6161
except (ValidationError, FileNotFoundError) as e:
6262
click.secho(f"The metadata file could not be uploaded: {str(e)}", color="red")

airbyte-ci/connectors/metadata_service/lib/metadata_service/gcs_upload.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from google.oauth2 import service_account
1616

1717
from metadata_service.constants import METADATA_FILE_NAME, METADATA_FOLDER, ICON_FILE_NAME
18-
from metadata_service.validators.metadata_validator import POST_UPLOAD_VALIDATORS, validate_and_load, ValidatorContext
18+
from metadata_service.validators.metadata_validator import POST_UPLOAD_VALIDATORS, validate_and_load, ValidatorOptions
1919
from metadata_service.utils import to_json_sanitized_dict
2020
from metadata_service.models.generated.ConnectorMetadataDefinitionV0 import ConnectorMetadataDefinitionV0
2121

@@ -122,30 +122,30 @@ def _icon_upload(metadata: ConnectorMetadataDefinitionV0, bucket: storage.bucket
122122
return upload_file_if_changed(local_icon_path, bucket, latest_icon_path)
123123

124124

125-
def create_prerelease_metadata_file(metadata_file_path: Path, prerelease: str) -> Path:
126-
metadata, error = validate_and_load(metadata_file_path, [], prerelease=prerelease)
125+
def create_prerelease_metadata_file(metadata_file_path: Path, validator_opts: ValidatorOptions) -> Path:
126+
metadata, error = validate_and_load(metadata_file_path, [], validator_opts)
127127
if metadata is None:
128128
raise ValueError(f"Metadata file {metadata_file_path} is invalid for uploading: {error}")
129129

130130
# replace any dockerImageTag references with the actual tag
131131
# this includes metadata.data.dockerImageTag, metadata.data.registries[].dockerImageTag
132132
# where registries is a dictionary of registry name to registry object
133133
metadata_dict = to_json_sanitized_dict(metadata, exclude_none=True)
134-
metadata_dict["data"]["dockerImageTag"] = prerelease
134+
metadata_dict["data"]["dockerImageTag"] = validator_opts.prerelease_tag
135135
for registry in get(metadata_dict, "data.registries", {}).values():
136136
if "dockerImageTag" in registry:
137-
registry["dockerImageTag"] = prerelease
137+
registry["dockerImageTag"] = validator_opts.prerelease_tag
138138

139139
# write metadata to yaml file in system tmp folder
140-
tmp_metadata_file_path = Path("/tmp") / metadata.data.dockerRepository / prerelease / METADATA_FILE_NAME
140+
tmp_metadata_file_path = Path("/tmp") / metadata.data.dockerRepository / validator_opts.prerelease_tag / METADATA_FILE_NAME
141141
tmp_metadata_file_path.parent.mkdir(parents=True, exist_ok=True)
142142
with open(tmp_metadata_file_path, "w") as f:
143143
yaml.dump(metadata_dict, f)
144144

145145
return tmp_metadata_file_path
146146

147147

148-
def upload_metadata_to_gcs(bucket_name: str, metadata_file_path: Path, prerelease: Optional[str] = None) -> MetadataUploadInfo:
148+
def upload_metadata_to_gcs(bucket_name: str, metadata_file_path: Path, validator_opts: ValidatorOptions = ValidatorOptions()) -> MetadataUploadInfo:
149149
"""Upload a metadata file to a GCS bucket.
150150
151151
If the per 'version' key already exists it won't be overwritten.
@@ -155,14 +155,14 @@ def upload_metadata_to_gcs(bucket_name: str, metadata_file_path: Path, prereleas
155155
bucket_name (str): Name of the GCS bucket to which the metadata file will be uploade.
156156
metadata_file_path (Path): Path to the metadata file.
157157
service_account_file_path (Path): Path to the JSON file with the service account allowed to read and write on the bucket.
158-
prerelease (Optional[str]): Whether the connector is a prerelease or not.
158+
prerelease_tag (Optional[str]): Whether the connector is a prerelease_tag or not.
159159
Returns:
160160
Tuple[bool, str]: Whether the metadata file was uploaded and its blob id.
161161
"""
162-
if prerelease:
163-
metadata_file_path = create_prerelease_metadata_file(metadata_file_path, prerelease)
162+
if validator_opts.prerelease_tag:
163+
metadata_file_path = create_prerelease_metadata_file(metadata_file_path, validator_opts)
164164

165-
metadata, error = validate_and_load(metadata_file_path, POST_UPLOAD_VALIDATORS, prerelease=prerelease)
165+
metadata, error = validate_and_load(metadata_file_path, POST_UPLOAD_VALIDATORS, validator_opts)
166166

167167
if metadata is None:
168168
raise ValueError(f"Metadata file {metadata_file_path} is invalid for uploading: {error}")
@@ -175,7 +175,7 @@ def upload_metadata_to_gcs(bucket_name: str, metadata_file_path: Path, prereleas
175175
icon_uploaded, icon_blob_id = _icon_upload(metadata, bucket, metadata_file_path)
176176

177177
version_uploaded, version_blob_id = _version_upload(metadata, bucket, metadata_file_path)
178-
if not prerelease:
178+
if not validator_opts.prerelease_tag:
179179
latest_uploaded, latest_blob_id = _latest_upload(metadata, bucket, metadata_file_path)
180180
else:
181181
latest_uploaded, latest_blob_id = False, None

airbyte-ci/connectors/metadata_service/lib/metadata_service/validators/metadata_validator.py

Lines changed: 10 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,12 @@
1111

1212

1313
@dataclass(frozen=True)
14-
class ValidatorContext:
15-
prerelease: Optional[str] = None
14+
class ValidatorOptions:
15+
prerelease_tag: Optional[str] = None
1616

1717

1818
ValidationResult = Tuple[bool, Optional[Union[ValidationError, str]]]
19-
Validator = Callable[[ConnectorMetadataDefinitionV0, ValidatorContext], ValidationResult]
19+
Validator = Callable[[ConnectorMetadataDefinitionV0, ValidatorOptions], ValidationResult]
2020

2121
# TODO: Remove these when each of these connectors ship any new version
2222
ALREADY_ON_MAJOR_VERSION_EXCEPTIONS = [
@@ -34,7 +34,7 @@ class ValidatorContext:
3434

3535

3636
def validate_metadata_images_in_dockerhub(
37-
metadata_definition: ConnectorMetadataDefinitionV0, validator_context: ValidatorContext
37+
metadata_definition: ConnectorMetadataDefinitionV0, validator_opts: ValidatorOptions
3838
) -> ValidationResult:
3939
metadata_definition_dict = metadata_definition.dict()
4040
base_docker_image = get(metadata_definition_dict, "data.dockerRepository")
@@ -58,7 +58,7 @@ def validate_metadata_images_in_dockerhub(
5858
(normalization_docker_image, normalization_docker_version),
5959
]
6060

61-
if not validator_context.prerelease:
61+
if not validator_opts.prerelease_tag:
6262
possible_docker_images.extend([(base_docker_image, version) for version in breaking_change_versions])
6363

6464
# Filter out tuples with None and remove duplicates
@@ -73,7 +73,7 @@ def validate_metadata_images_in_dockerhub(
7373

7474

7575
def validate_at_least_one_language_tag(
76-
metadata_definition: ConnectorMetadataDefinitionV0, _validator_context: ValidatorContext
76+
metadata_definition: ConnectorMetadataDefinitionV0, _validator_opts: ValidatorOptions
7777
) -> ValidationResult:
7878
"""Ensure that there is at least one tag in the data.tags field that matches language:<LANG>."""
7979
tags = get(metadata_definition, "data.tags", [])
@@ -84,7 +84,7 @@ def validate_at_least_one_language_tag(
8484

8585

8686
def validate_all_tags_are_keyvalue_pairs(
87-
metadata_definition: ConnectorMetadataDefinitionV0, _validator_context: ValidatorContext
87+
metadata_definition: ConnectorMetadataDefinitionV0, _validator_opts: ValidatorOptions
8888
) -> ValidationResult:
8989
"""Ensure that all tags are of the form <KEY>:<VALUE>."""
9090
tags = get(metadata_definition, "data.tags", [])
@@ -102,7 +102,7 @@ def is_major_version(version: str) -> bool:
102102

103103

104104
def validate_major_version_bump_has_breaking_change_entry(
105-
metadata_definition: ConnectorMetadataDefinitionV0, _validator_context: ValidatorContext
105+
metadata_definition: ConnectorMetadataDefinitionV0, _validator_opts: ValidatorOptions
106106
) -> ValidationResult:
107107
"""Ensure that if the major version is incremented, there is a breaking change entry for that version."""
108108
metadata_definition_dict = metadata_definition.dict()
@@ -147,18 +147,14 @@ def validate_major_version_bump_has_breaking_change_entry(
147147
def validate_and_load(
148148
file_path: pathlib.Path,
149149
validators_to_run: List[Validator],
150-
**kwargs,
150+
validator_opts: ValidatorOptions = ValidatorOptions(),
151151
) -> Tuple[Optional[ConnectorMetadataDefinitionV0], Optional[ValidationError]]:
152152
"""Load a metadata file from a path (runs jsonschema validation) and run optional extra validators.
153153
154154
Returns a tuple of (metadata_model, error_message).
155155
If the metadata file is valid, metadata_model will be populated.
156156
Otherwise, error_message will be populated with a string describing the error.
157157
"""
158-
159-
# Initialize the validator context with the kwargs only if it is not empty
160-
validator_context = ValidatorContext(**kwargs) if kwargs else ValidatorContext()
161-
162158
try:
163159
# Load the metadata file - this implicitly runs jsonschema validation
164160
metadata = yaml.safe_load(file_path.read_text())
@@ -167,7 +163,7 @@ def validate_and_load(
167163
return None, f"Validation error: {e}"
168164

169165
for validator in validators_to_run:
170-
is_valid, error = validator(metadata_model, validator_context)
166+
is_valid, error = validator(metadata_model, validator_opts)
171167
if not is_valid:
172168
return None, f"Validation error: {error}"
173169

airbyte-ci/connectors/metadata_service/lib/tests/test_gcs_upload.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from pydash.objects import get
1010

1111
from metadata_service import gcs_upload
12+
from metadata_service.validators.metadata_validator import ValidatorOptions
1213
from metadata_service.models.generated.ConnectorMetadataDefinitionV0 import ConnectorMetadataDefinitionV0
1314
from metadata_service.constants import METADATA_FILE_NAME
1415
from metadata_service.utils import to_json_sanitized_dict
@@ -185,7 +186,7 @@ def test_upload_metadata_to_gcs_with_prerelease(mocker, valid_metadata_upload_fi
185186
gcs_upload.upload_metadata_to_gcs(
186187
"my_bucket",
187188
metadata_file_path,
188-
prerelease_image_tag,
189+
ValidatorOptions(prerelease_tag=prerelease_image_tag),
189190
)
190191

191192
gcs_upload._latest_upload.assert_not_called()

0 commit comments

Comments
 (0)