Skip to content

Commit 01aa3fc

Browse files
committed
Build legacy swarm with any netplugin branch
During make build, user can specify: * NETPLUGIN_BRANCH - the branch to compile instead of latest release * NETPLUGIN_OWNER (if not contiv) - the username for the netplugin fork When the branch is specified, only the full installer is built. A lot of the environment / config for the build has moved to the Makefile and enviornment variables so it's easy to find and set, and they are largely prefixed with CONTIV_ Some decomposition of the build has happened (ansible and netplugin split out into separate scripts and targets), getting us part way to build.sh being only an assembling of assets gathered by other scripts and dropped in artifact_staging directory. Preparing the release tarball is done in a random temp directory that is cleaned up automatically. build.sh no longer takes a bunch of CLI arguments, instead relies on enviornment variables, as this script is intended to be run by the Makefile. The legacy swarm mode prefers the full installer vs. the over the internet installer. Created a reusable installer unpacker that later can be used by other install scripts. Drive-by: - update the netplugin versions to 1.1.5
1 parent ac54a9f commit 01aa3fc

File tree

6 files changed

+114
-103
lines changed

6 files changed

+114
-103
lines changed

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,5 @@ cluster/.vagrant
55
cluster/export
66
cluster/*.log
77
release
8+
artifact_staging/
89

Makefile

+34-3
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,44 @@
1+
# backwards compatibility name for CONTIV_INSTALLER_VERSION
2+
export BUILD_VERSION ?= devbuild
3+
# sets the version for the installer output artifacts
4+
export CONTIV_INSTALLER_VERSION ?= $(BUILD_VERSION)
5+
# downloaded and built assets intended to go in installer by build.sh
6+
export CONTIV_ARTIFACT_STAGING := $(PWD)/artifact_staging
7+
# some assets are retrieved from GitHub, this is the default version to fetch
8+
export DEFAULT_DOWNLOAD_CONTIV_VERSION := 1.1.5
9+
export NETPLUGIN_OWNER ?= contiv
10+
# setting NETPLUGIN_BRANCH compiles that commit on demand,
11+
# setting CONTIV_NETPLUGIN_VERSION will download that released version
12+
ifeq ($(NETPLUGIN_BRANCH),)
13+
export CONTIV_NETPLUGIN_VERSION ?= $(DEFAULT_DOWNLOAD_CONTIV_VERSION)
14+
else
15+
export CONTIV_NETPLUGIN_VERSION := $(NETPLUGIN_OWNER)-$(NETPLUGIN_BRANCH)
16+
endif
17+
export CONTIV_NETPLUGIN_TARBALL_NAME := netplugin-$(CONTIV_NETPLUGIN_VERSION).tar.bz2
18+
export CONTIV_ANSIBLE_COMMIT ?= 4e67f54a8042debfc3d8b504046d0a1d4ea38c37
19+
export CONTIV_ANSIBLE_OWNER ?= contiv
20+
121
# this is the classic first makefile target, and it's also the default target
222
# run when `make` is invoked with no specific target.
323
all: build
424
rel_ver = $(shell ./scripts/get_latest_release.sh)
525

26+
# accepts CONTIV_ANSIBLE_COMMIT and CONTIV_ANSIBLE_OWNER environment vars
27+
download-ansible-repo:
28+
@scripts/download_ansible_repo.sh
29+
30+
# set NETPLUGIN_OWNER (default contiv) and NETPLUGIN_BRANCH make variables
31+
# to compile locally
32+
# e.g. make NETPLUGIN_OWNER=contiv NETPLUGIN_BRANCH=master
33+
prepare-netplugin-tarball:
34+
@scripts/prepare_netplugin_tarball.sh
35+
36+
assemble-build:
37+
@bash ./scripts/build.sh
38+
639
# build creates a release package for contiv.
740
# It uses a pre-built image specified by BUILD_VERSION.
8-
build:
9-
rm -rf release/
10-
@bash ./scripts/build.sh
41+
build: download-ansible-repo prepare-netplugin-tarball assemble-build
1142

1243
# ansible-image creates the docker image for ansible container
1344
# It uses the version specified by BUILD_VERSION or creates an image with the latest tag.

scripts/build.sh

+47-84
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
#!/bin/bash
22

3+
# Required environment variables:
4+
# * CONTIV_INSTALLER_VERSION - sets the tarball artifact filenames
5+
# * CONTIV_NETPLUGIN_VERSION - updates config files to locate contiv tarball
6+
37
set -xeuo pipefail
48

59
# ensure this script wasn't called from the directory where this script
@@ -10,126 +14,73 @@ if [ "$script_dir" == "." ]; then
1014
exit 1
1115
fi
1216

13-
DEV_IMAGE_NAME="devbuild"
14-
VERSION=${BUILD_VERSION-$DEV_IMAGE_NAME}
15-
16-
contiv_version=${CONTIV_VERSION:-"1.0.3"}
1717
pull_images=${CONTIV_CI_HOST:-"false"}
1818
aci_gw_version=${CONTIV_ACI_GW_VERSION:-"latest"}
19-
ansible_image_version=${CONTIV_ANSIBLE_IMAGE_VERSION:-$contiv_version}
20-
auth_proxy_version=${CONTIV_API_PROXY_VERSION:-$contiv_version}
19+
ansible_image_version=${CONTIV_ANSIBLE_IMAGE_VERSION:-$DEFAULT_DOWNLOAD_CONTIV_VERSION}
20+
auth_proxy_version=${CONTIV_API_PROXY_VERSION:-$DEFAULT_DOWNLOAD_CONTIV_VERSION}
2121
docker_version=${CONTIV_DOCKER_VERSION:-1.12.6}
2222
etcd_version=${CONTIV_ETCD_VERSION:-v2.3.8}
23-
contiv_ansible_commit=${CONTIV_ANSIBLE_COMMIT:-4e67f54a8042debfc3d8b504046d0a1d4ea38c37}
24-
contiv_ansible_owner=${CONTIV_ANSIBLE_OWNER:-contiv}
2523

2624
# the installer currently pulls the v2plugin image directly from Docker Hub, but
2725
# this will change to being downloaded from the Docker Store in the future.
2826
# because of this, the default value for this variable will become the latest
2927
# version that is available in the Docker Store and should be considered
3028
# independent of $contiv_version above.
31-
v2plugin_version=${CONTIV_V2PLUGIN_VERSION:-"1.0.3"}
29+
v2plugin_version=${CONTIV_V2PLUGIN_VERSION:-"1.1.5"}
3230

33-
function usage() {
34-
echo "Usage:"
35-
echo "./release.sh -a <ACI gateway image> -c <contiv version> -e <etcd version> -p <API proxy image version> "
36-
exit 1
37-
}
31+
# where everything is assembled, always start with a clean dir and clean it up
32+
output_tmp_dir="$(mktemp -d)"
33+
output_dir="${output_tmp_dir}/contiv-${CONTIV_INSTALLER_VERSION}"
34+
mkdir -p ${output_dir}
35+
trap 'rm -rf ${output_tmp_dir}' EXIT
3836

39-
function error_ret() {
40-
echo ""
41-
echo $1
42-
exit 1
43-
}
44-
45-
while getopts ":a:p:c:e:v:" opt; do
46-
case $opt in
47-
a)
48-
aci_gw_version=$OPTARG
49-
;;
50-
c)
51-
contiv_version=$OPTARG
52-
;;
53-
e)
54-
etcd_version=$OPTARG
55-
;;
56-
p)
57-
auth_proxy_version=$OPTARG
58-
;;
59-
v)
60-
v2plugin_version=$OPTARG
61-
;;
62-
:)
63-
echo "An argument required for $OPTARG was not passed"
64-
usage
65-
;;
66-
?)
67-
usage
68-
;;
69-
esac
70-
done
71-
72-
release_dir="release"
73-
output_dir="$release_dir/contiv-$VERSION/"
74-
output_file="$release_dir/contiv-$VERSION.tgz"
75-
tmp_output_file="contiv-$VERSION.tgz"
76-
full_output_file="$release_dir/contiv-full-$VERSION.tgz"
77-
tmp_full_output_file="contiv-full-$VERSION.tgz"
78-
79-
# Clean older dist folders and release binaries
80-
rm -rf $output_dir
81-
rm -rf $output_file
37+
release_dir=release
38+
mkdir -p $release_dir
39+
output_file="${release_dir}/contiv-${CONTIV_INSTALLER_VERSION}.tgz"
40+
full_output_file="$release_dir/contiv-full-${CONTIV_INSTALLER_VERSION}.tgz"
8241

8342
# Release files
8443
# k8s - install.sh to take the args and construct contiv.yaml as required and to launch kubectl
8544
# swarm - install.sh launches the container to do the actual installation
8645
# Top level install.sh which will either take k8s/swarm install params and do the required.
87-
mkdir -p $output_dir
88-
cp -rf install $output_dir
89-
cp README.md $output_dir
46+
cp -rf install README.md $output_dir
9047
cp -rf scripts/generate-certificate.sh $output_dir/install
9148

9249
# Get the ansible support files
9350
chmod +x $output_dir/install/genInventoryFile.py
9451
chmod +x $output_dir/install/generate-certificate.sh
9552

96-
# This is maybe optional - but assume we need it for
97-
curl -sSL https://github.com/contiv/netplugin/releases/download/$contiv_version/netplugin-$contiv_version.tar.bz2 -o $output_dir/netplugin-$contiv_version.tar.bz2
98-
pushd $output_dir
99-
tar oxf netplugin-$contiv_version.tar.bz2 netctl
100-
rm -f netplugin-$contiv_version.tar.bz2
101-
popd
102-
# add ansible repo contents where final tarball will include
103-
mkdir $output_dir/ansible
104-
curl -sL https://api.github.com/repos/${contiv_ansible_owner}/ansible/tarball/$contiv_ansible_commit |
105-
tar --strip-components 1 -C $output_dir/ansible -z -x
53+
cp -a ${CONTIV_ARTIFACT_STAGING}/ansible ${output_dir}/
10654

10755
# Replace versions
10856
files=$(find $output_dir -type f -name "*.yaml" -or -name "*.sh" -or -name "*.json")
10957
sed -i.bak 's/__ACI_GW_VERSION__/'"$aci_gw_version"'/g' $files
11058
sed -i.bak 's/__API_PROXY_VERSION__/'"$auth_proxy_version"'/g' $files
11159
sed -i.bak 's/__CONTIV_INSTALL_VERSION__/'"$ansible_image_version"'/g' $files
112-
sed -i.bak 's/__CONTIV_VERSION__/'"$contiv_version"'/g' $files
60+
sed -i.bak 's/__CONTIV_VERSION__/'"$CONTIV_NETPLUGIN_VERSION"'/g' $files
11361
sed -i.bak 's/__DOCKER_VERSION__/'"$docker_version"'/g' $files
11462
sed -i.bak 's/__ETCD_VERSION__/'"$etcd_version"'/g' $files
11563
sed -i.bak 's/__CONTIV_V2PLUGIN_VERSION__/'"$v2plugin_version"'/g' $files
11664

11765
# Make all shell script files executable
11866
chmod +x $(find $output_dir -type f -name "*.sh")
11967

120-
# Cleanup the backup files
121-
rm -rf $output_dir/scripts
122-
rm -rf $(find $output_dir -type f -name "*.bak")
123-
12468
# Clean up the Dockerfile, it is not part of the release bits.
12569
rm -f $output_dir/install/ansible/Dockerfile
12670

12771
# Create the binary cache folder
12872
binary_cache=$output_dir/contiv_cache
12973
mkdir -p $binary_cache
13074

131-
# Create the minimal tar bundle
132-
tar czf $tmp_output_file -C $release_dir contiv-$VERSION
75+
# only build installer that pulls artifacts over internet if not building
76+
# a specific commit of netplugin
77+
if [ -z "${NETPLUGIN_BRANCH:-}" ]; then
78+
# Create the minimal tar bundle
79+
tar czf $output_file -C $output_tmp_dir contiv-${CONTIV_INSTALLER_VERSION}
80+
echo -n "Contiv Installer version '$CONTIV_INSTALLER_VERSION' with "
81+
echo "netplugin version '$CONTIV_NETPLUGIN_VERSION' is available "
82+
echo "at '$output_file'"
83+
fi
13384

13485
# Save the auth proxy & aci-gw images for packaging the full docker images with contiv install binaries
13586
if [[ "$(docker images -q contiv/auth_proxy:$auth_proxy_version 2>/dev/null)" == "" || "$pull_images" == "true" ]]; then
@@ -147,17 +98,29 @@ curl --fail -sL -o $binary_cache/openvswitch-2.5.0-2.el7.x86_64.rpm http://cbs.c
14798
curl --fail -sL -o $binary_cache/ovs-common.deb http://mirrors.kernel.org/ubuntu/pool/main/o/openvswitch/openvswitch-common_2.5.2-0ubuntu0.16.04.3_amd64.deb
14899
curl --fail -sL -o $binary_cache/ovs-switch.deb http://mirrors.kernel.org/ubuntu/pool/main/o/openvswitch/openvswitch-switch_2.5.2-0ubuntu0.16.04.3_amd64.deb
149100

101+
# Copy the netplugin release into the binary cache for "full" installer
102+
# Netplugin releases built locally based on a branch are named by their SHA,
103+
# but there is a symlink to point to the SHA named tarball by it's branch name
104+
plugin_tball=${CONTIV_ARTIFACT_STAGING}/$CONTIV_NETPLUGIN_TARBALL_NAME
105+
if [[ -L "${plugin_tball}" ]]; then
106+
# copy the link (so other processes can find the tarball) and the tarball
107+
target_plugin_tball=$(readlink ${plugin_tball})
108+
cp -a ${plugin_tball} ${binary_cache}/
109+
plugin_tball=${CONTIV_ARTIFACT_STAGING}/${target_plugin_tball}
110+
fi
111+
cp ${plugin_tball} ${binary_cache}/
112+
150113
env_file=$output_dir/install/ansible/env.json
151114
sed -i.bak 's#__AUTH_PROXY_LOCAL_INSTALL__#true#g' "$env_file"
152115
sed -i.bak 's#__CONTIV_NETWORK_LOCAL_INSTALL__#true#g' "$env_file"
153116

154117
echo "Ansible extra vars from env.json:"
155118
cat $env_file
156119
# Create the full tar bundle
157-
tar czf $tmp_full_output_file -C $release_dir contiv-$VERSION
158-
159-
mv $tmp_output_file $output_file
160-
mv $tmp_full_output_file $full_output_file
161-
rm -rf $output_dir
162-
163-
echo "Success: Contiv Installer version $VERSION is available at $output_file"
120+
tar czf $full_output_file -C $output_tmp_dir contiv-${CONTIV_INSTALLER_VERSION}
121+
echo -n "Contiv Installer version '$CONTIV_INSTALLER_VERSION' with "
122+
echo "netplugin version '$CONTIV_NETPLUGIN_VERSION' is available "
123+
echo "at '$full_output_file', it includes all contiv assets "
124+
echo "required for installation"
125+
echo
126+
echo -e "\nSuccess"

scripts/download_ansible_repo.sh

+13
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
#!/bin/bash
2+
3+
set -euo pipefail
4+
5+
ANSIBLE_REPO_DIR=${CONTIV_ARTIFACT_STAGING}/ansible
6+
7+
rm -rf $ANSIBLE_REPO_DIR
8+
9+
mkdir -p $ANSIBLE_REPO_DIR $CONTIV_ARTIFACT_STAGING
10+
11+
echo downloading ${CONTIV_ANSIBLE_OWNER}/ansible commit: $CONTIV_ANSIBLE_COMMIT
12+
curl --fail -sL https://api.github.com/repos/${CONTIV_ANSIBLE_OWNER}/ansible/tarball/$CONTIV_ANSIBLE_COMMIT \
13+
| tar --strip-components 1 -C $ANSIBLE_REPO_DIR -z -x

scripts/legacy_swarm_test.sh

+5-9
Original file line numberDiff line numberDiff line change
@@ -24,16 +24,12 @@ if [ "$ssh_key" == "" ]; then
2424
ssh_key=$(vagrant ssh-config legacy-swarm-master | grep IdentityFile | awk '{print $2}' | xargs)
2525
fi
2626
popd
27+
28+
./scripts/unpack-installer.sh
29+
2730
# Extract and launch the installer
28-
mkdir -p release
29-
cd release
30-
if [ ! -f "${install_version}.tgz" ]; then
31-
# For release builds, get the build from github releases
32-
curl -L -O https://github.com/contiv/install/releases/download/${BUILD_VERSION}/${install_version}.tgz
33-
fi
3431

35-
tar oxf $install_version.tgz
36-
cd $install_version
32+
cd release/$install_version
3733
./install/ansible/install_swarm.sh -f ../../cluster/.cfg_legacy-swarm.yaml -e $ssh_key -u $user -i
3834

3935
# Wait for CONTIV to start for up to 10 minutes
@@ -46,7 +42,7 @@ for i in {0..20}; do
4642
cat <<EOF
4743
NOTE: Because the Contiv Admin Console is using a self-signed certificate for this demo,
4844
you will see a security warning when the page loads. You can safely dismiss it.
49-
45+
5046
You can access the Contiv master node with:
5147
cd cluster && vagrant ssh legacy-swarm-master
5248
EOF

scripts/swarm_mode_test.sh

+14-7
Original file line numberDiff line numberDiff line change
@@ -16,24 +16,31 @@ else
1616
fi
1717
user=${CONTIV_SSH_USER:-"$def_user"}
1818

19-
# If BUILD_VERSION is not defined, we use a local dev build, that must have been created with make release
20-
install_version="contiv-${BUILD_VERSION:-devbuild}"
2119
pushd cluster
2220
ssh_key=${CONTIV_SSH_KEY:-"$def_key"}
2321
if [ "$ssh_key" == "" ]; then
2422
ssh_key=$(vagrant ssh-config swarm-mode-master | grep IdentityFile | awk '{print $2}' | xargs)
2523
fi
2624
popd
25+
2726
# Extract and launch the installer
2827
mkdir -p release
2928
cd release
30-
if [ ! -f "${install_version}.tgz" ]; then
31-
# For release builds, get the build from github releases
32-
curl -L -O https://github.com/contiv/install/releases/download/${BUILD_VERSION}/${install_version}.tgz
29+
# If BUILD_VERSION is not defined, we use a local dev build, that must have been created with make release
30+
release_name="contiv-${BUILD_VERSION:-devbuild}"
31+
release_tarball="${release_name}.tgz"
32+
release_local_tarball="contiv-full-${BUILD_VERSION}.tgz"
33+
if [ -f "${release_local_tarball}" ]; then
34+
tar oxf "${release_local_tarball}"
35+
else
36+
if [ ! -f "${release_tarball}" ]; then
37+
# For release builds, get the build from github releases
38+
curl -L -O https://github.com/contiv/install/releases/download/${BUILD_VERSION}/${release_name}.tgz
39+
fi
40+
tar oxf "${release_name}.tgz"
3341
fi
3442

35-
tar oxf $install_version.tgz
36-
cd $install_version
43+
cd $release_name
3744
./install/ansible/install_swarm.sh -f ../../cluster/.cfg_swarm-mode.yaml -e $ssh_key -u $user -p
3845

3946
# Wait for CONTIV to start for up to 10 minutes

0 commit comments

Comments
 (0)