Skip to content

Commit 46c08a2

Browse files
chore: re-enable HW processing for hermetic build scripts (#2505)
re-enables the changes from #2342 as part of [milestone 3.2](https://docs.google.com/document/d/1v-sJBmdNEVBRryL8n90XK8OIiqVphaJemXVhANw14Jk/edit?pli=1&resourcekey=0-QGUb2do-JBlDWKvptWBp_g&tab=t.0#bookmark=id.kl5fzqdav6u7) of Hermetic Build ### Changes: - add support for HW libs in `integration_tests.py` - adapt configuration of docker volumes - adapt usage of `fix-poms.py` to use monorepo and split_repo modes - fix temp_destination_path in `generate_composed_library.py` in order to avoid collisions - For example `google/bigtable/v2` and `google/bigtable/admin/v2` would both be sent to a temporal `java-bigtable-v2` folder, spoiling the files being transferred via `copy-code`. - The fix is to have a `proto_path`-based folder name to achieve uniqueness ### After this PR: - [ ] Reconfigure the nightly CI in google-cloud-java to use the updated volume mapping
1 parent 363a354 commit 46c08a2

File tree

22 files changed

+223
-121
lines changed

22 files changed

+223
-121
lines changed

.gitignore

+2-1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ target/
2525
**/output/
2626
**/googleapis
2727
library_generation/test/**/golden*/
28-
library_generation/test/resources/test_repo_level_postprocess/
28+
library_generation/test/resources/test_monorepo_postprocessing/
2929
**/*egg-info/
3030
**/build/
31+
**/dist/

library_generation/generate_composed_library.py

+2-6
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,6 @@ def generate_composed_library(
6262
config=config, library=library, output_folder=output_folder
6363
)
6464

65-
is_monorepo = util.check_monorepo(config=config)
6665
base_arguments = __construct_tooling_arg(config=config)
6766
owlbot_cli_source_folder = util.sh_util("mktemp -d")
6867
os.makedirs(f"{library_path}", exist_ok=True)
@@ -80,10 +79,7 @@ def generate_composed_library(
8079
transport=gapic_inputs.transport,
8180
library_path=library_path,
8281
)
83-
service_version = gapic.proto_path.split("/")[-1]
84-
temp_destination_path = (
85-
f"java-{util.get_library_name(library)}-{service_version}"
86-
)
82+
temp_destination_path = f"java-{gapic.proto_path.replace('/','-')}"
8783
effective_arguments = __construct_effective_arg(
8884
base_arguments=base_arguments,
8985
gapic=gapic,
@@ -115,7 +111,7 @@ def generate_composed_library(
115111
owlbot_cli_source_folder,
116112
config.owlbot_cli_image,
117113
config.synthtool_commitish,
118-
str(is_monorepo).lower(),
114+
str(config.is_monorepo).lower(),
119115
config.path_to_yaml,
120116
],
121117
"Library postprocessing",

library_generation/generate_repo.py

+11-1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515

1616
import click
1717
import library_generation.utilities as util
18+
import os
1819
from library_generation.generate_composed_library import generate_composed_library
1920
from library_generation.model.generation_config import from_yaml
2021

@@ -78,6 +79,11 @@ def generate_from_yaml(
7879
Parses a config yaml and generates libraries via
7980
generate_composed_library.py
8081
"""
82+
# convert paths to absolute paths so they can be correctly referenced in
83+
# downstream scripts
84+
generation_config_yaml = os.path.abspath(generation_config_yaml)
85+
repository_path = os.path.abspath(repository_path)
86+
8187
config = from_yaml(generation_config_yaml)
8288
target_libraries = config.libraries
8389
if target_library_api_shortname is not None:
@@ -102,7 +108,11 @@ def generate_from_yaml(
102108
versions_file=repo_config.versions_file,
103109
)
104110

105-
util.repo_level_post_process(
111+
# we skip monorepo_postprocessing if not in a monorepo
112+
if not config.is_monorepo:
113+
return
114+
115+
util.monorepo_postprocessing(
106116
repository_path=repository_path, versions_file=repo_config.versions_file
107117
)
108118

library_generation/model/generation_config.py

+5
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,8 @@ def __init__(
4646
self.libraries = libraries
4747
self.grpc_version = grpc_version
4848
self.protobuf_version = protobuf_version
49+
# monorepos have more than one library defined in the config yaml
50+
self.is_monorepo = len(libraries) > 1
4951

5052

5153
def from_yaml(path_to_yaml: str) -> GenerationConfig:
@@ -92,6 +94,9 @@ def from_yaml(path_to_yaml: str) -> GenerationConfig:
9294
rpc_documentation=__optional(library, "rpc_documentation", None),
9395
cloud_api=__optional(library, "cloud_api", True),
9496
requires_billing=__optional(library, "requires_billing", True),
97+
extra_versioned_modules=__optional(
98+
library, "extra_versioned_modules", None
99+
),
95100
)
96101
parsed_libraries.append(new_library)
97102

library_generation/model/library_config.py

+2
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ def __init__(
4646
rpc_documentation: Optional[str] = None,
4747
cloud_api: Optional[bool] = True,
4848
requires_billing: Optional[bool] = True,
49+
extra_versioned_modules: Optional[str] = None,
4950
):
5051
self.api_shortname = api_shortname
5152
self.api_description = api_description
@@ -69,3 +70,4 @@ def __init__(
6970
self.rpc_documentation = rpc_documentation
7071
self.cloud_api = cloud_api
7172
self.requires_billing = requires_billing
73+
self.extra_versioned_modules = extra_versioned_modules

library_generation/owlbot/bin/entrypoint.sh

+2-10
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ scripts_root=$1
2828
versions_file=$2
2929
configuration_yaml=$3
3030

31+
3132
# This script can be used to process HW libraries and monorepo
3233
# (google-cloud-java) libraries, which require a slightly different treatment
3334
# monorepo folders have an .OwlBot.yaml file in the module folder (e.g.
@@ -65,7 +66,7 @@ echo "...done"
6566

6667
# write or restore pom.xml files
6768
echo "Generating missing pom.xml..."
68-
python3 "${scripts_root}/owlbot/src/fix-poms.py" "${versions_file}" "true"
69+
python3 "${scripts_root}/owlbot/src/fix-poms.py" "${versions_file}" "${monorepo}"
6970
echo "...done"
7071

7172
# write or restore clirr-ignored-differences.xml
@@ -78,16 +79,7 @@ echo "Fixing missing license headers..."
7879
python3 "${scripts_root}/owlbot/src/fix-license-headers.py"
7980
echo "...done"
8081

81-
# TODO: re-enable this once we resolve thrashing
82-
# restore license headers years
83-
# echo "Restoring copyright years..."
84-
# /owlbot/bin/restore_license_headers.sh
85-
# echo "...done"
86-
8782
# ensure formatting on all .java files in the repository
8883
echo "Reformatting source..."
8984
mvn fmt:format -V --batch-mode --no-transfer-progress
9085
echo "...done"
91-
92-
93-

library_generation/owlbot/src/fix-poms.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -559,7 +559,5 @@ def main(versions_file, monorepo):
559559

560560
if __name__ == "__main__":
561561
versions_file = sys.argv[1]
562-
monorepo = sys.argv[2]
563-
if monorepo == "true":
564-
monorepo = True
562+
monorepo = True if sys.argv[2].lower() == "true" else False
565563
main(versions_file, monorepo)

library_generation/postprocess_library.sh

+10-5
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,8 @@ else
6868
fi
6969

7070
# Default values for running copy-code directly from host
71-
repo_binding="${postprocessing_target}"
72-
repo_workspace="/repo"
71+
repo_bindings="-v ${postprocessing_target}:/workspace"
72+
repo_workspace="/workspace"
7373
preprocessed_libraries_binding="${owlbot_cli_source_folder}"
7474

7575
# When running docker inside docker, we run into the issue of volume bindings
@@ -86,16 +86,20 @@ preprocessed_libraries_binding="${owlbot_cli_source_folder}"
8686

8787
if [[ -n "${RUNNING_IN_DOCKER}" ]]; then
8888
set -u # temporarily fail on unset variables
89-
repo_binding="${REPO_BINDING_VOLUME}"
89+
repo_bindings="${REPO_BINDING_VOLUMES}"
9090
set +u
91+
library_name=$(echo "${postprocessing_target}" | rev | cut -d'/' -f1 | rev)
92+
repo_workspace="/workspace/"
9193
if [[ "${is_monorepo}" == "true" ]]; then
92-
repo_workspace="/repo/$(echo "${postprocessing_target}" | rev | cut -d'/' -f1 | rev)"
94+
monorepo_name=$(echo "${postprocessing_target}" | rev | cut -d'/' -f2 | rev)
95+
repo_workspace+="${monorepo_name}/"
9396
fi
97+
repo_workspace+="${library_name}"
9498
fi
9599

96100
docker run --rm \
97101
--user "$(id -u)":"$(id -g)" \
98-
-v "${repo_binding}:/repo" \
102+
${repo_bindings} \
99103
-v "/tmp:/tmp" \
100104
-w "${repo_workspace}" \
101105
--env HOME=/tmp \
@@ -115,6 +119,7 @@ fi
115119
git config --global --add safe.directory /tmp/synthtool/synthtool
116120
pushd "synthtool"
117121

122+
git fetch --all
118123
git reset --hard "${synthtool_commitish}"
119124

120125
python3 -m pip install -e .

library_generation/test/container_integration_tests.sh

+30-20
Original file line numberDiff line numberDiff line change
@@ -8,32 +8,42 @@ if [[ -z "${TEST_IMAGE_ID}" ]]; then
88
exit 1
99
fi
1010

11-
if [[ ! -d google-cloud-java ]]; then
12-
git clone https://github.com/googleapis/google-cloud-java
13-
fi
14-
pushd google-cloud-java
15-
git reset --hard main
16-
popd
11+
repo_volumes=""
12+
for repo in google-cloud-java java-bigtable; do
13+
if [[ ! -d "${repo}" ]]; then
14+
git clone "https://github.com/googleapis/${repo}"
15+
fi
16+
pushd "${repo}"
17+
git reset --hard main
18+
popd
1719

18-
# We use a volume to hold the google-cloud-java repository used in the
19-
# integration tests. This is because the test container creates a child
20-
# container using the host machine's docker socket, meaning that we can only
21-
# reference volumes created from within the host machine (i.e. the machine
22-
# running this script)
23-
#
24-
# To summarize, we create a special volume that can be referenced both in the
25-
# main container and in any child containers created by this one.
26-
if [[ $(docker volume inspect repo) != '[]' ]]; then
27-
docker volume rm repo
28-
fi
29-
docker volume create --name "repo" --opt "type=none" --opt "device=$(pwd)/google-cloud-java" --opt "o=bind"
20+
# We use a volume to hold the repositories used in the
21+
# integration tests. This is because the test container creates a child
22+
# container using the host machine's docker socket, meaning that we can only
23+
# reference volumes created from within the host machine (i.e. the machine
24+
# running this script)
25+
#
26+
# To summarize, we create a special volume that can be referenced both in the
27+
# main container and in any child containers created by this one.
28+
volume_name="repo-${repo}"
29+
if [[ $(docker volume inspect "${volume_name}") != '[]' ]]; then
30+
docker volume rm "${volume_name}"
31+
fi
32+
docker volume create \
33+
--name "${volume_name}" \
34+
--opt "type=none" \
35+
--opt "device=$(pwd)/${repo}" \
36+
--opt "o=bind"
37+
38+
repo_volumes="${repo_volumes} -v ${volume_name}:/workspace/${repo}"
39+
done
3040

3141
docker run --rm \
32-
-v repo:/workspace \
42+
${repo_volumes} \
3343
-v /tmp:/tmp \
3444
-v /var/run/docker.sock:/var/run/docker.sock \
3545
-e "RUNNING_IN_DOCKER=true" \
36-
-e "REPO_BINDING_VOLUME=repo" \
46+
-e "REPO_BINDING_VOLUMES=${repo_volumes}" \
3747
-w "/src" \
3848
"${TEST_IMAGE_ID}" \
3949
python -m unittest /src/test/integration_tests.py

0 commit comments

Comments
 (0)