Skip to content

Commit 98adff8

Browse files
authored
bump cdktf base 6.0 and refactor (#72)
* fix: add missing deletion_protection to validate plan * feat: use er-base-cdktf 0.6.0 * refactor: move aws api to utils * refactor: Dockerfile run test from builder * chore: Bump base image to cdktf-0.20.11-tf-1.6.6-py-3.12-v0.6.0-2 * fix(konflux): remove taskRunSpecs overrides --------- Signed-off-by: Di Wang <[email protected]>
1 parent 6fa0a94 commit 98adff8

14 files changed

+166
-209
lines changed

.tekton/main-pull-request.yaml

-21
Original file line numberDiff line numberDiff line change
@@ -30,27 +30,6 @@ spec:
3030
value: .
3131
- name: target-stage
3232
value: test
33-
- name: build-platforms
34-
value:
35-
- linux/x86_64
36-
# - linux-m2xlarge/arm64
37-
taskRunSpecs:
38-
- pipelineTaskName: build-container
39-
stepSpecs:
40-
- name: build
41-
computeResources:
42-
requests:
43-
memory: 20Gi
44-
limits:
45-
memory: 20Gi
46-
- pipelineTaskName: ecosystem-cert-preflight-checks
47-
stepSpecs:
48-
- name: check-container
49-
computeResources:
50-
requests:
51-
memory: 4Gi
52-
limits:
53-
memory: 4Gi
5433
pipelineRef:
5534
resolver: git
5635
params:

.tekton/main-push.yaml

-21
Original file line numberDiff line numberDiff line change
@@ -27,27 +27,6 @@ spec:
2727
value: .
2828
- name: target-stage
2929
value: prod
30-
- name: build-platforms
31-
value:
32-
- linux/x86_64
33-
# - linux-m2xlarge/arm64
34-
taskRunSpecs:
35-
- pipelineTaskName: build-container
36-
stepSpecs:
37-
- name: build
38-
computeResources:
39-
requests:
40-
memory: 20Gi
41-
limits:
42-
memory: 20Gi
43-
- pipelineTaskName: ecosystem-cert-preflight-checks
44-
stepSpecs:
45-
- name: check-container
46-
computeResources:
47-
requests:
48-
memory: 4Gi
49-
limits:
50-
memory: 4Gi
5130
pipelineRef:
5231
resolver: git
5332
params:

Dockerfile

+20-28
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,27 @@
1-
FROM quay.io/redhat-services-prod/app-sre-tenant/er-base-cdktf-main/er-base-cdktf-main:cdktf-0.20.11-tf-1.6.6-py-3.11-v0.5.0-1@sha256:0c6f11a1a4057ceb6ea9fb9f5ec7eb0a5a2bdf7730416cbddda87b0f509e59a5 AS base
1+
FROM quay.io/redhat-services-prod/app-sre-tenant/er-base-cdktf-main/er-base-cdktf-main:cdktf-0.20.11-tf-1.6.6-py-3.12-v0.6.0-2 AS base
22
# keep in sync with pyproject.toml
3-
LABEL konflux.additional-tags="0.3.0"
3+
LABEL konflux.additional-tags="0.3.1"
44

55
FROM base AS builder
66
COPY --from=ghcr.io/astral-sh/uv:0.5.25@sha256:a73176b27709bff700a1e3af498981f31a83f27552116f21ae8371445f0be710 /uv /bin/uv
77

8-
COPY cdktf.json ./
9-
# Download all necessary CDKTF providers and build the python cdktf modules.
10-
# The python modules must be stored in the .gen directory because cdktf needs them there.
11-
RUN cdktf-provider-sync .gen
12-
138
# Python and UV related variables
149
ENV \
1510
# compile bytecode for faster startup
1611
UV_COMPILE_BYTECODE="true" \
1712
# disable uv cache. it doesn't make sense in a container
1813
UV_NO_CACHE=true \
19-
UV_NO_PROGRESS=true
14+
UV_NO_PROGRESS=true \
15+
VIRTUAL_ENV="${APP}/.venv" \
16+
PATH="${APP}/.venv/bin:${PATH}"
2017

21-
COPY pyproject.toml uv.lock ./
18+
COPY cdktf.json pyproject.toml uv.lock ./
2219
# Test lock file is up to date
2320
RUN uv lock --locked
2421
# Install dependencies
2522
RUN uv sync --frozen --no-group dev --no-install-project --python /usr/bin/python3
23+
# Download all necessary terraform providers
24+
RUN cdktf-provider-sync
2625

2726
# the source code
2827
COPY README.md ./
@@ -31,29 +30,22 @@ COPY er_aws_rds ./er_aws_rds
3130
# Sync the project
3231
RUN uv sync --frozen --no-group dev
3332

34-
FROM base AS prod
35-
# get cdktf providers
36-
COPY --from=builder ${TF_PLUGIN_CACHE_DIR} ${TF_PLUGIN_CACHE_DIR}
37-
# get our app with the dependencies
38-
COPY --from=builder ${APP} ${APP}
39-
40-
ENV \
41-
# Use the virtual environment
42-
PATH="${APP}/.venv/bin:${PATH}" \
43-
# cdktf python modules path
44-
PYTHONPATH="$APP/.gen"
45-
46-
FROM prod AS test
47-
COPY --from=ghcr.io/astral-sh/uv:0.5.25@sha256:a73176b27709bff700a1e3af498981f31a83f27552116f21ae8371445f0be710 /uv /bin/uv
48-
33+
FROM builder as test
4934
# install test dependencies
5035
RUN uv sync --frozen
5136

5237
COPY Makefile ./
5338
COPY tests ./tests
39+
# Empty $JSII_RUNTIME_PACKAGE_CACHE_ROOT (/tmp/jsii-runtime-cache) again because the test stage created files there,
40+
# and we want to run this test image in the dev environment, requires files owned by a random uid
41+
RUN make test && rm -rf "$JSII_RUNTIME_PACKAGE_CACHE_ROOT"
5442

55-
RUN make test
43+
FROM base AS prod
44+
# get cdktf providers
45+
COPY --from=builder ${TF_PLUGIN_CACHE_DIR} ${TF_PLUGIN_CACHE_DIR}
46+
# get our app with the dependencies
47+
COPY --from=builder ${APP} ${APP}
5648

57-
# Empty /tmp again because the test stage might have created files there, e.g. JSII_RUNTIME_PACKAGE_CACHE_ROOT
58-
# and we want to run this test image in the dev environment
59-
RUN rm -rf /tmp/*
49+
ENV \
50+
VIRTUAL_ENV="${APP}/.venv" \
51+
PATH="${APP}/.venv/bin:${PATH}"

Makefile

+8-15
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
SITE_PACKAGES_DIR ?= $(shell .venv/bin/python3 -c 'import sysconfig; print(sysconfig.get_paths()["purelib"])')
21
CONTAINER_ENGINE ?= $(shell which podman >/dev/null 2>&1 && echo podman || echo docker)
32

43
.PHONY: format
@@ -8,8 +7,8 @@ format:
87

98
.PHONY: image_tests
109
image_tests:
11-
# test /tmp must be empty
12-
[ -z "$(shell ls -A /tmp)" ]
10+
# test /tmp/jsii-runtime-cache not created
11+
[ ! -d "/tmp/jsii-runtime-cache" ]
1312
# validate_plan.py must exist
1413
[ -f "hooks/validate_plan.py" ]
1514

@@ -20,23 +19,17 @@ code_tests:
2019
uv run mypy
2120
uv run pytest -vv --cov=er_aws_rds --cov-report=term-missing --cov-report xml
2221

23-
.PHONY: dependency_tests
24-
dependency_tests:
25-
python -c "import cdktf_cdktf_provider_random"
26-
python -c "import cdktf_cdktf_provider_aws"
27-
2822
.PHONY: test
29-
test: image_tests code_tests dependency_tests
23+
test: image_tests code_tests
24+
25+
.PHONY: build_test
26+
build_test:
27+
$(CONTAINER_ENGINE) build --progress plain --target test -t er-aws-rds:test .
3028

3129
.PHONY: build
3230
build:
33-
$(CONTAINER_ENGINE) build --progress plain -t er-aws-rds:test .
31+
$(CONTAINER_ENGINE) build --progress plain --target prod -t er-aws-rds:prod .
3432

3533
.PHONY: dev
3634
dev:
37-
# Prepare local development environment
3835
uv sync
39-
# The CDKTF python module generation needs at least 12GB of memory!
40-
mkdir -p .gen
41-
$(CONTAINER_ENGINE) run --rm -it -v $(PWD)/:/home/app/src -v $(PWD)/.gen:/cdktf-providers:z --entrypoint cdktf-provider-sync quay.io/redhat-services-prod/app-sre-tenant/er-base-cdktf-main/er-base-cdktf-main:latest /cdktf-providers
42-
cp sitecustomize.py $(SITE_PACKAGES_DIR)

cdktf.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,6 @@
22
"language": "python",
33
"app": "er-aws-rds",
44
"sendCrashReports": "false",
5-
"terraformProviders": ["random@ = 3.6.3", "aws@ = 5.50.0"],
5+
"terraformProviders": [],
66
"projectId": "49778ce7-523f-482c-a0e7-bad21132b58c"
77
}

hooks/__init__.py

Whitespace-only changes.

hooks/utils/__init__.py

Whitespace-only changes.

hooks/utils/aws_api.py

+42
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
from collections.abc import Mapping
2+
from typing import TYPE_CHECKING, Any
3+
4+
from boto3 import Session
5+
from botocore.config import Config
6+
from mypy_boto3_rds import RDSClient
7+
8+
if TYPE_CHECKING:
9+
from mypy_boto3_rds.type_defs import FilterTypeDef
10+
11+
12+
class AWSApi:
13+
"""AWS Api Class"""
14+
15+
def __init__(self, config_options: Mapping[str, Any]) -> None:
16+
self.session = Session()
17+
self.config = Config(**config_options)
18+
19+
def get_rds_client(self) -> RDSClient:
20+
"""Gets a boto RDS client"""
21+
return self.session.client("rds", config=self.config)
22+
23+
def get_rds_valid_update_versions(self, engine: str, version: str) -> set[str]:
24+
"""Gets the valid update versions"""
25+
data = self.get_rds_client().describe_db_engine_versions(
26+
Engine=engine, EngineVersion=version, IncludeAll=True
27+
)
28+
29+
if data["DBEngineVersions"] and len(data["DBEngineVersions"]) == 1:
30+
return {
31+
item.get("EngineVersion", "-1")
32+
for item in data["DBEngineVersions"][0].get("ValidUpgradeTarget", [])
33+
}
34+
return set[str]()
35+
36+
def get_rds_parameter_groups(self, engine: str) -> set[str]:
37+
"""Gets the existing parameter groups by engine"""
38+
filters: list[FilterTypeDef] = [
39+
{"Name": "db-parameter-group-family", "Values": [engine]},
40+
]
41+
resp = self.get_rds_client().describe_db_parameter_groups(Filters=filters)
42+
return {group["DBParameterGroupName"] for group in resp["DBParameterGroups"]}

hooks/validate_plan.py

+2-42
Original file line numberDiff line numberDiff line change
@@ -2,65 +2,24 @@
22

33
import logging
44
import sys
5-
from collections.abc import Mapping
6-
from typing import TYPE_CHECKING, Any
75

86
import semver
9-
from boto3 import Session
10-
from botocore.config import Config
117
from external_resources_io.input import parse_model, read_input_from_file
128
from external_resources_io.terraform import (
139
Action,
1410
Plan,
1511
ResourceChange,
1612
TerraformJsonPlanParser,
1713
)
18-
from mypy_boto3_rds import RDSClient
19-
20-
if TYPE_CHECKING:
21-
from mypy_boto3_rds.type_defs import FilterTypeDef
22-
2314

2415
from er_aws_rds.input import AppInterfaceInput
16+
from hooks.utils.aws_api import AWSApi
2517

2618
logging.basicConfig(level=logging.INFO)
2719
logger = logging.getLogger("botocore")
2820
logger.setLevel(logging.ERROR)
2921

3022

31-
class AWSApi:
32-
"""AWS Api Class"""
33-
34-
def __init__(self, config_options: Mapping[str, Any]) -> None:
35-
self.session = Session()
36-
self.config = Config(**config_options)
37-
38-
def get_rds_client(self) -> RDSClient:
39-
"""Gets a boto RDS client"""
40-
return self.session.client("rds", config=self.config)
41-
42-
def get_rds_valid_update_versions(self, engine: str, version: str) -> set[str]:
43-
"""Gets the valid update versions"""
44-
data = self.get_rds_client().describe_db_engine_versions(
45-
Engine=engine, EngineVersion=version, IncludeAll=True
46-
)
47-
48-
if data["DBEngineVersions"] and len(data["DBEngineVersions"]) == 1:
49-
return {
50-
item.get("EngineVersion", "-1")
51-
for item in data["DBEngineVersions"][0].get("ValidUpgradeTarget", [])
52-
}
53-
return set[str]()
54-
55-
def get_rds_parameter_groups(self, engine: str) -> set[str]:
56-
"""Gets the existing parameter groups by engine"""
57-
filters: list[FilterTypeDef] = [
58-
{"Name": "db-parameter-group-family", "Values": [engine]},
59-
]
60-
resp = self.get_rds_client().describe_db_parameter_groups(Filters=filters)
61-
return {group["DBParameterGroupName"] for group in resp["DBParameterGroups"]}
62-
63-
6423
class RDSPlanValidator:
6524
"""The plan validator class"""
6625

@@ -139,6 +98,7 @@ def _validate_deletion_protection_not_enabled_on_destroy(self) -> None:
13998
def validate(self) -> bool:
14099
"""Validate method"""
141100
self._validate_major_version_upgrade()
101+
self._validate_deletion_protection_not_enabled_on_destroy()
142102
return not self.errors
143103

144104

pyproject.toml

+5-3
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,16 @@ description = "ERv2 module for managing AWS rds instances"
55
authors = [{ name = "AppSRE", email = "[email protected]" }]
66
license = { text = "Apache 2.0" }
77
readme = "README.md"
8-
requires-python = "~= 3.11.0"
8+
requires-python = "~= 3.12.0"
99
dependencies = [
1010
"boto3 ~=1.35.47",
1111
"cdktf ==0.20.10",
12+
"cdktf-cdktf-provider-aws==19.19.0",
13+
"cdktf-cdktf-provider-random==11.0.3",
1214
"external-resources-io ~=0.4.0",
13-
"pydantic ~=2.10.0",
14-
# the cdktf provider are generated by cdktf-provider-sync. See Makefile and Dockerfile
1515
"mypy_boto3_rds ~=1.35.72",
16+
"pip>=25.0",
17+
"pydantic ~=2.10.0",
1618
"semver>=3.0.2",
1719
]
1820

sitecustomize.py

-4
This file was deleted.

tests/test_module.py

+8-6
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import json
2+
import tempfile
23
from typing import Any
34

45
import pytest
@@ -16,12 +17,13 @@
1617

1718
def build_synth(additional_data: dict[str, Any] | None = None) -> str:
1819
"""Create Synth"""
19-
stack = Stack(
20-
Testing.app(),
21-
"CDKTF",
22-
input_object(additional_data=additional_data),
23-
)
24-
return Testing.synth(stack)
20+
with tempfile.TemporaryDirectory() as outdir:
21+
stack = Stack(
22+
Testing.app(outdir=outdir),
23+
"CDKTF",
24+
input_object(additional_data=additional_data),
25+
)
26+
return Testing.synth(stack)
2527

2628

2729
def test_should_contain_rds_instance() -> None:

tests/test_plan_validation.py

+1-5
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,3 @@
1-
import sys
2-
from pathlib import Path
3-
4-
sys.path.append(str(Path(__file__).parent.parent))
51
from external_resources_io.terraform import Action, Plan
62
from hooks.validate_plan import RDSPlanValidator
73

@@ -29,7 +25,7 @@ def test_validate_deletion_protection_not_enabled_on_destroy() -> None:
2925
})
3026

3127
validator = RDSPlanValidator(plan, input_object())
32-
validator._validate_deletion_protection_not_enabled_on_destroy() # noqa: SLF001
28+
validator.validate()
3329
assert validator.errors == [
3430
"Deletion protection cannot be enabled on destroy. Disable deletion_protection first to remove the instance"
3531
]

0 commit comments

Comments
 (0)