Skip to content

Commit ffd5a19

Browse files
authored
Add github actions for tf lint check (#296)
Add github actions for tf lint check
1 parent 13be33c commit ffd5a19

File tree

12 files changed

+54
-19
lines changed

12 files changed

+54
-19
lines changed

.github/workflows/ci.yaml

+35
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
name: Terraform CI
2+
on:
3+
push:
4+
branches:
5+
- main
6+
pull_request:
7+
branches:
8+
- main
9+
jobs:
10+
Terraform-Lint-Check:
11+
runs-on: ubuntu-latest
12+
steps:
13+
- uses: actions/checkout@v4
14+
- uses: hashicorp/setup-terraform@v3
15+
with:
16+
terraform_version: "1.5.7"
17+
18+
- name: Terraform fmt
19+
id: fmt
20+
run: terraform fmt -check -recursive
21+
22+
- name: Terraform Init
23+
id: init
24+
run: |
25+
terraform -chdir=applications/rag init
26+
terraform -chdir=applications/ray init
27+
terraform -chdir=applications/jupyter init
28+
29+
- name: Terraform Validate
30+
id: validate
31+
run: |
32+
terraform -chdir=applications/rag validate -no-color
33+
terraform -chdir=applications/ray validate -no-color
34+
terraform -chdir=applications/jupyter validate -no-color
35+

applications/jupyter/workloads-without-iap.example.tfvars

+4-4
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,10 @@ cluster_membership_id = "" # required only for private clusters, default: cluste
2626
#######################################################
2727

2828
## JupyterHub variables
29-
namespace = "jupyter"
30-
gcs_bucket = "<gcs-bucket>"
31-
create_gcs_bucket = true
32-
workload_identity_service_account = "jupyter-service-account"
29+
namespace = "jupyter"
30+
gcs_bucket = "<gcs-bucket>"
31+
create_gcs_bucket = true
32+
workload_identity_service_account = "jupyter-service-account"
3333

3434
# Jupyterhub without IAP
3535
add_auth = false

applications/rag/variables.tf

+2-2
Original file line numberDiff line numberDiff line change
@@ -265,9 +265,9 @@ variable "autopilot_cluster" {
265265
}
266266

267267
variable "cloudsql_instance" {
268-
type = string
268+
type = string
269269
description = "Name of the CloudSQL instance for RAG VectorDB"
270-
default = "pgvector-instance"
270+
default = "pgvector-instance"
271271
}
272272

273273
variable "cpu_pools" {

applications/rag/workloads.tfvars

+1-1
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ rag_service_account = "rag-system-account"
3838

3939
# Creates a google service account & k8s service account & configures workload identity with appropriate permissions.
4040
# Set to false & update the variable `jupyter_service_account` to use an existing IAM service account.
41-
jupyter_service_account = "jupyter-system-account"
41+
jupyter_service_account = "jupyter-system-account"
4242

4343
## Embeddings table name - change this to the TABLE_NAME used in the notebook.
4444
dataset_embeddings_table_name = "googlemaps_reviews_db"

applications/ray/raytrain-examples/raytrain-with-gcsfusecsi/kuberaytf/user/modules/service_accounts/versions.tf

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
terraform {
1616
required_providers {
1717
google = {
18-
source = "hashicorp/google"
18+
source = "hashicorp/google"
1919
}
2020
kubernetes = {
2121
source = "hashicorp/kubernetes"

applications/ray/versions.tf

+2-2
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,10 @@
1515
terraform {
1616
required_providers {
1717
google = {
18-
source = "hashicorp/google"
18+
source = "hashicorp/google"
1919
}
2020
google-beta = {
21-
source = "hashicorp/google-beta"
21+
source = "hashicorp/google-beta"
2222
}
2323
helm = {
2424
source = "hashicorp/helm"

benchmarks/benchmark/tools/locust-load-inference/sample-terraform.tfvars

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,5 +21,5 @@ tokenizer = "tiiuae/falcon-7b"
2121
# Benchmark configuration for triggering single test via Locust Runner
2222
test_duration = 60
2323
# Increase test_users to allow more parallelism (especially when testing HPA)
24-
test_users = 1
25-
test_rate = 5
24+
test_users = 1
25+
test_rate = 5

infrastructure/tfvars_tests/standard-gke-public.platform.tfvars

+2-2
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,8 @@ gpu_pools = [{
6666
accelerator_count = 2
6767
accelerator_type = "nvidia-tesla-t4"
6868
gpu_driver_version = "LATEST"
69-
},
70-
{
69+
},
70+
{
7171
name = "gpu-pool-l4"
7272
machine_type = "g2-standard-24"
7373
node_locations = "us-central1-a"

modules/iap/iap.tf

+2-2
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ resource "helm_release" "iap_jupyter" {
3636
name = "iap-jupyter"
3737
chart = "${path.module}/charts/iap/"
3838
namespace = var.namespace
39-
create_namespace = true
39+
create_namespace = true
4040
# timeout increased to support autopilot scaling resources, and give enough time to complete the deployment
4141
timeout = 1200
4242
set {
@@ -108,7 +108,7 @@ resource "helm_release" "iap_frontend" {
108108
name = "iap-frontend"
109109
chart = "${path.module}/charts/iap/"
110110
namespace = var.namespace
111-
create_namespace = true
111+
create_namespace = true
112112
# timeout increased to support autopilot scaling resources, and give enough time to complete the deployment
113113
timeout = 1200
114114
set {

modules/iap/variables.tf

+1-1
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ variable "jupyter_k8s_backend_service_name" {
138138
variable "jupyter_k8s_backend_service_port" {
139139
type = number
140140
description = "Name of the Backend Service Port"
141-
default = 80
141+
default = 80
142142
}
143143

144144
variable "jupyter_url_domain_addr" {

modules/kuberay-monitoring/main.tf

+1-1
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ resource "helm_release" "grafana" {
4747
}
4848

4949
data "kubernetes_service" "example" {
50-
count = var.enable_grafana_on_ray_dashboard ? 1 : 0
50+
count = var.enable_grafana_on_ray_dashboard ? 1 : 0
5151
metadata {
5252
name = "grafana"
5353
namespace = var.namespace

tutorials/hf-tgi/outputs.tf

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,5 +24,5 @@ output "inference_service_namespace" {
2424

2525
output "inference_service_endpoint" {
2626
description = "Endpoint of model inference service"
27-
value = kubernetes_service.inference_service.status != null ? (kubernetes_service.inference_service.status[0].load_balancer != null ? "${kubernetes_service.inference_service.status[0].load_balancer[0].ingress[0].ip}" : "") : ""
27+
value = kubernetes_service.inference_service.status != null ? (kubernetes_service.inference_service.status[0].load_balancer != null ? "${kubernetes_service.inference_service.status[0].load_balancer[0].ingress[0].ip}" : "") : ""
2828
}

0 commit comments

Comments
 (0)