Skip to content

Commit 8310221

Browse files
authored
Enable aws-nuke to delete specified AWS resources (#283)
1 parent 5ceb10d commit 8310221

File tree

4 files changed

+90
-2
lines changed

4 files changed

+90
-2
lines changed

.circleci/integration-tests/Dockerfile

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
FROM quay.io/astronomer/ap-airflow:2.2.5 as staging
22

33
ENV AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION=False
4+
ENV AWS_NUKE_VERSION=v2.17.0
45

56
USER root
67

@@ -31,7 +32,13 @@ ENV HADOOP_LIBRARY_VERSION=hadoop-2.10.1
3132
# install AWS CLI
3233
RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
3334
&& unzip awscliv2.zip \
34-
&& ./aws/install
35+
&& ./aws/install;
36+
37+
# install AWS nuke
38+
RUN wget --quiet \
39+
"https://github.com/rebuy-de/aws-nuke/releases/download/${AWS_NUKE_VERSION}/aws-nuke-${AWS_NUKE_VERSION}-linux-amd64.tar.gz" \
40+
&& tar -xzvf aws-nuke-${AWS_NUKE_VERSION}-linux-amd64.tar.gz -C /usr/local/bin \
41+
&& mv /usr/local/bin/aws-nuke-${AWS_NUKE_VERSION}-linux-amd64 /usr/local/bin/aws-nuke
3542

3643
# install eksctl
3744
RUN curl --silent --location "https://github.com/weaveworks/eksctl/releases/latest/download/eksctl_$(uname -s)_amd64.tar.gz" | tar xz -C /tmp \
@@ -64,6 +71,7 @@ RUN mkdir -p ${AIRFLOW_HOME}/dags
6471
COPY . .
6572
RUN cp -r example_* ${AIRFLOW_HOME}/dags
6673
RUN cp master_dag.py ${AIRFLOW_HOME}/dags/
74+
RUN cp nuke-config.yml ${AIRFLOW_HOME}/dags/
6775

6876
USER astro
6977

@@ -101,7 +109,13 @@ ENV HADOOP_LIBRARY_VERSION=hadoop-2.10.1
101109
# install AWS CLI
102110
RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
103111
&& unzip awscliv2.zip \
104-
&& ./aws/install
112+
&& ./aws/install;
113+
114+
# install AWS nuke
115+
RUN wget --quiet \
116+
"https://github.com/rebuy-de/aws-nuke/releases/download/${AWS_NUKE_VERSION}/aws-nuke-${AWS_NUKE_VERSION}-linux-amd64.tar.gz" \
117+
&& tar -xzvf aws-nuke-${AWS_NUKE_VERSION}-linux-amd64.tar.gz -C /usr/local/bin \
118+
&& mv /usr/local/bin/aws-nuke-${AWS_NUKE_VERSION}-linux-amd64 /usr/local/bin/aws-nuke
105119

106120
# install eksctl
107121
RUN curl --silent --location "https://github.com/weaveworks/eksctl/releases/latest/download/eksctl_$(uname -s)_amd64.tar.gz" | tar xz -C /tmp \
@@ -134,5 +148,6 @@ RUN mkdir -p ${AIRFLOW_HOME}/dags
134148
COPY . .
135149
RUN cp -r example_* ${AIRFLOW_HOME}/dags
136150
RUN cp master_dag.py ${AIRFLOW_HOME}/dags/
151+
RUN cp nuke-config.yml ${AIRFLOW_HOME}/dags/
137152

138153
USER astro

.circleci/integration-tests/master_dag.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ def prepare_dag_dependency(task_info, execution_time):
9494
{"s3_sensor_dag": "example_s3_sensor"},
9595
{"redshift_sql_dag": "example_async_redshift_sql"},
9696
{"redshift_cluster_mgmt_dag": "example_async_redshift_cluster_management"},
97+
{"aws_nuke_dag": "example_aws_nuke"},
9798
]
9899
amazon_trigger_tasks, ids = prepare_dag_dependency(amazon_task_info, "{{ ds }}")
99100
dag_run_ids.extend(ids)
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
---
2+
regions:
3+
- us-east-2
4+
- us-east-1
5+
- us-west-1
6+
- us-west-2
7+
- ap-south-1
8+
- ap-northeast-2
9+
- ap-northeast-3
10+
- ap-southeast-1
11+
- ap-southeast-2
12+
- ap-northeast-1
13+
- ca-central-1
14+
- eu-central-1
15+
- eu-west-1
16+
- eu-west-2
17+
- eu-west-3
18+
- eu-north-1
19+
- sa-east-1
20+
account-blocklist:
21+
- '999999999999'
22+
resource-types:
23+
targets:
24+
- EMRCluster
25+
- EC2Instance
26+
- EKSCluster
27+
- S3MultipartUpload
28+
- RedshiftCluster
29+
accounts:
30+
'475538383708': # aws-oss-main
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
import os
2+
from datetime import datetime, timedelta
3+
4+
from airflow import DAG
5+
from airflow.operators.bash import BashOperator
6+
from airflow.operators.dummy import DummyOperator
7+
8+
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "**********")
9+
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "***********")
10+
AWS_DEFAULT_REGION = os.environ.get("AWS_DEFAULT_REGION", "us-east-2")
11+
AWS_CONN_ID = os.environ.get("ASTRO_AWS_CONN_ID", "aws_default")
12+
EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
13+
14+
default_args = {
15+
"execution_timeout": timedelta(hours=EXECUTION_TIMEOUT),
16+
}
17+
18+
with DAG(
19+
dag_id="example_aws_nuke",
20+
start_date=datetime(2022, 1, 1),
21+
schedule_interval="30 20 * * *",
22+
catchup=False,
23+
default_args=default_args,
24+
tags=["example", "aws-nuke"],
25+
) as dag:
26+
start = DummyOperator(task_id="start")
27+
28+
set_aws_config = BashOperator(
29+
task_id="aws_config",
30+
bash_command=f"aws configure set aws_access_key_id {AWS_ACCESS_KEY_ID}; "
31+
f"aws configure set aws_secret_access_key {AWS_SECRET_ACCESS_KEY}; "
32+
f"aws configure set default.region {AWS_DEFAULT_REGION}; ",
33+
)
34+
35+
execute_aws_nuke = BashOperator(
36+
task_id="execute_aws_nuke",
37+
bash_command="aws-nuke -c /usr/local/airflow/dags/nuke-config.yml --profile default --force --no-dry-run; ",
38+
)
39+
40+
end = DummyOperator(task_id="end")
41+
42+
start >> set_aws_config >> execute_aws_nuke >> end

0 commit comments

Comments
 (0)