Skip to content

Commit 8e40c0a

Browse files
committed
RHOAIENG-18400: chore(.tekton/): implement computation of "pipelinesascode.tekton.dev/on-cel-expression" values
1 parent fe43b80 commit 8e40c0a

File tree

4 files changed

+78
-12
lines changed

4 files changed

+78
-12
lines changed

.tekton/jupyter-minimal-ubi9-python-3-11-pull-request.yaml

+5-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,11 @@ metadata:
1111
pipelinesascode.tekton.dev/cancel-in-progress: "true"
1212
pipelinesascode.tekton.dev/max-keep-runs: '3'
1313
pipelinesascode.tekton.dev/on-cel-expression: event == "pull_request" && target_branch
14-
== "main" && has(body.repository) && body.repository.full_name == "opendatahub-io/notebooks"
14+
== "main" && ( "jupyter/minimal/ubi9-python-3.11/Pipfile.lock".pathChanged()
15+
|| "jupyter/minimal/ubi9-python-3.11/start-notebook.sh".pathChanged() || "jupyter/utils/***".pathChanged()
16+
|| ".tekton/jupyter-minimal-ubi9-python-3-11-pull-request.yaml".pathChanged()
17+
|| "jupyter/minimal/ubi9-python-3.11/Dockerfile.cpu".pathChanged() ) && has(body.repository)
18+
&& body.repository.full_name == "opendatahub-io/notebooks"
1519
creationTimestamp: null
1620
labels:
1721
appstudio.openshift.io/application: notebooks

ci/cached-builds/konflux_generate_component_build_pipelines.py

+62-5
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,15 @@
22

33
import re
44
import pathlib
5+
56
import yaml
67

78
import gen_gha_matrix_jobs
89
import gha_pr_changed_files
10+
import scripts.sandbox
11+
12+
# test dependencies
13+
import pyfakefs.fake_filesystem
914

1015
ROOT_DIR = pathlib.Path(__file__).parent.parent.parent
1116

@@ -21,7 +26,7 @@
2126
2227
Usage:
2328
24-
$ poetry run ci/cached-builds/konflux_generate_component_build_pipelines.py
29+
$ PYTHONPATH=. poetry run ci/cached-builds/konflux_generate_component_build_pipelines.py
2530
"""
2631

2732

@@ -108,6 +113,13 @@ def component_build_pipeline(component_name, dockerfile_path,
108113
This is general enough to create PR pipeline as well as push pipeline.
109114
"""
110115
name = component_name + ("-on-pull-request" if is_pr else "-on-push")
116+
files_changed_cel_expression = ""
117+
if is_pr:
118+
files_changed_cel_expression = ' || '.join((
119+
compute_cel_expression(dockerfile_path),
120+
f'".tekton/{component_name}-pull-request.yaml".pathChanged()',
121+
f'"{dockerfile_path}".pathChanged()'
122+
))
111123
return {
112124
"apiVersion": "tekton.dev/v1",
113125
"kind": "PipelineRun",
@@ -120,7 +132,8 @@ def component_build_pipeline(component_name, dockerfile_path,
120132
"pipelinesascode.tekton.dev/cancel-in-progress": "true" if is_pr else "false",
121133
"pipelinesascode.tekton.dev/max-keep-runs": "3",
122134
"pipelinesascode.tekton.dev/on-cel-expression": (
123-
f'event == "{"pull_request" if is_pr else "push"}" && target_branch == "main"'
135+
f'event == "{"pull_request" if is_pr else "push"}" && target_branch == "main"'
136+
+ (' && ( ' + files_changed_cel_expression + ' )' if files_changed_cel_expression else "")
124137
+ ' && has(body.repository) && body.repository.full_name == "opendatahub-io/notebooks"'
125138
),
126139
},
@@ -799,10 +812,11 @@ def main():
799812
print("# yamllint disable-file", file=yaml_file)
800813
print("# This file is autogenerated by ci/cached-builds/konflux_generate_component_build_pipelines.py",
801814
file=yaml_file)
802-
print(yaml.dump(component_build_pipeline(component_name=task_name, dockerfile_path=dockerfile,
815+
print(
816+
yaml.dump(component_build_pipeline(component_name=task_name, dockerfile_path=dockerfile,
803817
build_container_tasks=[build_container()], is_pr=False)),
804-
end="",
805-
file=yaml_file)
818+
end="",
819+
file=yaml_file)
806820
with open(ROOT_DIR / ".tekton" / (task_name + "-pull-request.yaml"), "w") as yaml_file:
807821
print("# yamllint disable-file", file=yaml_file)
808822
print("# This file is autogenerated by ci/cached-builds/konflux_generate_component_build_pipelines.py",
@@ -813,5 +827,48 @@ def main():
813827
file=yaml_file)
814828

815829

830+
def compute_cel_expression(dockerfile: pathlib.Path) -> str:
831+
return cel_expression(root_dir=ROOT_DIR, files=scripts.sandbox.buildinputs(dockerfile))
832+
833+
834+
def cel_expression(root_dir: pathlib.Path, files: list[pathlib.Path]) -> str:
835+
"""
836+
Generate a CEL expression for file change detection.
837+
838+
Args:
839+
root_dir (pathlib.Path): Docker build context.
840+
files (list[pathlib.Path]): List of file paths to check for changes.
841+
842+
Returns:
843+
str: A CEL expression that checks if any of the given files have changed.
844+
"""
845+
expressions = []
846+
for file in files:
847+
relative_path = file.relative_to(root_dir) if file.is_absolute() else file
848+
if file.is_dir():
849+
expressions.append(f'"{relative_path}/***".pathChanged()')
850+
else:
851+
expressions.append(f'"{relative_path}".pathChanged()')
852+
853+
return " || ".join(expressions)
854+
855+
816856
if __name__ == "__main__":
817857
main()
858+
859+
860+
class Tests:
861+
862+
def test_compute_cel_expression(self, fs: pyfakefs.fake_filesystem.FakeFilesystem):
863+
fs.cwd = ROOT_DIR
864+
ROOT_DIR.mkdir(parents=True)
865+
pathlib.Path("a/").mkdir()
866+
pathlib.Path("b/").mkdir()
867+
pathlib.Path("b/c.txt").write_text("")
868+
869+
assert cel_expression(
870+
ROOT_DIR,
871+
files=[
872+
pathlib.Path("a"),
873+
pathlib.Path("b") / "c.txt"
874+
]) == '"a/***".pathChanged() || "b/c.txt".pathChanged()'

scripts/__init__.py

Whitespace-only changes.

scripts/sandbox.py

+11-6
Original file line numberDiff line numberDiff line change
@@ -38,12 +38,7 @@ def main() -> int:
3838
print("must give a `{};` parameter that will be replaced with new build context")
3939
return 1
4040

41-
if not (ROOT_DIR / "bin/buildinputs").exists():
42-
subprocess.check_call([MAKE, "bin/buildinputs"], cwd=ROOT_DIR)
43-
stdout = subprocess.check_output([ROOT_DIR / "bin/buildinputs", str(args.dockerfile)],
44-
text=True, cwd=ROOT_DIR)
45-
prereqs = [pathlib.Path(file) for file in json.loads(stdout)] if stdout != "\n" else []
46-
print(f"{prereqs=}")
41+
prereqs = buildinputs(dockerfile=args.dockerfile)
4742

4843
with tempfile.TemporaryDirectory(delete=True) as tmpdir:
4944
setup_sandbox(prereqs, pathlib.Path(tmpdir))
@@ -57,6 +52,16 @@ def main() -> int:
5752
return 0
5853

5954

55+
def buildinputs(dockerfile: pathlib.Path | str) -> list[pathlib.Path]:
56+
if not (ROOT_DIR / "bin/buildinputs").exists():
57+
subprocess.check_call([MAKE, "bin/buildinputs"], cwd=ROOT_DIR)
58+
stdout = subprocess.check_output([ROOT_DIR / "bin/buildinputs", str(dockerfile)],
59+
text=True, cwd=ROOT_DIR)
60+
prereqs = [pathlib.Path(file) for file in json.loads(stdout)] if stdout != "\n" else []
61+
print(f"{prereqs=}")
62+
return prereqs
63+
64+
6065
def setup_sandbox(prereqs: list[pathlib.Path], tmpdir: pathlib.Path):
6166
# always adding .gitignore
6267
gitignore = ROOT_DIR / ".gitignore"

0 commit comments

Comments
 (0)