Skip to content

Commit 40f93e8

Browse files
feat: add a workflow to poll other workflows for branch protection (#289)
## Summary - branch protection requires explicitly adding github actions that need to succeed before a user can merge a PR. Some github workflows do not qualify to run for a PR based on path filtering etc. Skipped workflows will still be required to "succeed" for a branch protection rule. This PR adds a blanked workflow that will poll for workflows explicitly triggered to succeed instead. ## Checklist - [ ] Added Unit Tests - [ ] Covered by existing CI - [ ] Integration tested - [ ] Documentation update <!-- This is an auto-generated comment: release notes by coderabbit.ai --> ## Summary by CodeRabbit - **Chores** - Added a new GitHub Actions workflow to enforce status checks on pull requests. - Reformatted test file for improved code readability without changing functionality. <!-- end of auto-generated comment: release notes by coderabbit.ai --> <!-- av pr metadata This information is embedded by the av CLI when creating PRs to track the status of stacks when using Aviator. Please do not delete or edit this section of the PR. ``` {"parent":"main","parentHead":"","trunk":"main"} ``` --> --------- Co-authored-by: Thomas Chow <[email protected]>
1 parent 3b70f7d commit 40f93e8

File tree

2 files changed

+38
-17
lines changed

2 files changed

+38
-17
lines changed
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
name: branch_protection
2+
on:
3+
pull_request:
4+
jobs:
5+
enforce_triggered_workflows:
6+
runs-on: ubuntu-latest
7+
permissions:
8+
checks: read
9+
steps:
10+
- name: GitHub Checks
11+
uses: poseidon/[email protected]
12+
with:
13+
token: ${{ secrets.GITHUB_TOKEN }}

cloud_gcp/src/test/scala/ai/chronon/integrations/cloud_gcp/DataprocSubmitterTest.scala

Lines changed: 25 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -39,11 +39,11 @@ class DataprocSubmitterTest extends AnyFlatSpec with MockitoSugar {
3939

4040
// Test starts here.
4141

42-
val submitter = new DataprocSubmitter(
43-
mockJobControllerClient,
44-
SubmitterConf("test-project", "test-region", "test-cluster"))
42+
val submitter =
43+
new DataprocSubmitter(mockJobControllerClient, SubmitterConf("test-project", "test-region", "test-cluster"))
4544

46-
val submittedJobId = submitter.submit(spark.SparkJob, Map(MainClass -> "test-main-class", JarURI -> "test-jar-uri"), List.empty)
45+
val submittedJobId =
46+
submitter.submit(spark.SparkJob, Map(MainClass -> "test-main-class", JarURI -> "test-jar-uri"), List.empty)
4747
assertEquals(submittedJobId, jobId)
4848
}
4949

@@ -54,29 +54,36 @@ class DataprocSubmitterTest extends AnyFlatSpec with MockitoSugar {
5454
it should "test flink job locally" ignore {
5555

5656
val submitter = DataprocSubmitter()
57-
submitter.submit(spark.FlinkJob,
58-
Map(MainClass -> "ai.chronon.flink.FlinkJob",
57+
submitter.submit(
58+
spark.FlinkJob,
59+
Map(
60+
MainClass -> "ai.chronon.flink.FlinkJob",
5961
FlinkMainJarURI -> "gs://zipline-jars/flink-assembly-0.1.0-SNAPSHOT.jar",
60-
JarURI -> "gs://zipline-jars/cloud_gcp_bigtable.jar"),
62+
JarURI -> "gs://zipline-jars/cloud_gcp_bigtable.jar"
63+
),
6164
List.empty,
6265
"--online-class=ai.chronon.integrations.cloud_gcp.GcpApiImpl",
6366
"--groupby-name=e2e-count",
6467
"-ZGCP_PROJECT_ID=bigtable-project-id",
65-
"-ZGCP_INSTANCE_ID=bigtable-instance-id")
68+
"-ZGCP_INSTANCE_ID=bigtable-instance-id"
69+
)
6670
}
6771

6872
it should "test flink kafka ingest job locally" ignore {
6973

7074
val submitter = DataprocSubmitter()
7175
val submittedJobId =
72-
submitter.submit(spark.FlinkJob,
73-
Map(MainClass -> "ai.chronon.flink.FlinkKafkaBeaconEventDriver",
76+
submitter.submit(
77+
spark.FlinkJob,
78+
Map(
79+
MainClass -> "ai.chronon.flink.FlinkKafkaBeaconEventDriver",
7480
FlinkMainJarURI -> "gs://zipline-jars/flink_kafka_ingest-assembly-0.1.0-SNAPSHOT.jar",
75-
JarURI -> "gs://zipline-jars/cloud_gcp_bigtable.jar"),
81+
JarURI -> "gs://zipline-jars/cloud_gcp_bigtable.jar"
82+
),
7683
List.empty,
7784
"--kafka-bootstrap=bootstrap.zipline-kafka-cluster.us-central1.managedkafka.canary-443022.cloud.goog:9092",
7885
"--kafka-topic=test-beacon-main",
79-
"--data-file-name=gs://zl-warehouse/beacon_events/beacon-output.avro",
86+
"--data-file-name=gs://zl-warehouse/beacon_events/beacon-output.avro"
8087
)
8188
println(submittedJobId)
8289
}
@@ -88,10 +95,10 @@ class DataprocSubmitterTest extends AnyFlatSpec with MockitoSugar {
8895
submitter.submit(
8996
spark.SparkJob,
9097
Map(MainClass -> "ai.chronon.spark.Driver",
91-
JarURI -> "gs://zipline-jars/cloud_gcp-assembly-0.1.0-SNAPSHOT.jar"),
98+
JarURI -> "gs://zipline-jars/cloud_gcp-assembly-0.1.0-SNAPSHOT.jar"),
9299
List("gs://zipline-jars/training_set.v1",
93-
"gs://zipline-jars/dataproc-submitter-conf.yaml",
94-
"gs://zipline-jars/additional-confs.yaml"),
100+
"gs://zipline-jars/dataproc-submitter-conf.yaml",
101+
"gs://zipline-jars/additional-confs.yaml"),
95102
"join",
96103
"--end-date=2024-12-10",
97104
"--additional-conf-path=additional-confs.yaml",
@@ -107,7 +114,7 @@ class DataprocSubmitterTest extends AnyFlatSpec with MockitoSugar {
107114
submitter.submit(
108115
spark.SparkJob,
109116
Map(MainClass -> "ai.chronon.spark.Driver",
110-
JarURI -> "gs://zipline-jars/cloud_gcp-assembly-0.1.0-SNAPSHOT.jar"),
117+
JarURI -> "gs://zipline-jars/cloud_gcp-assembly-0.1.0-SNAPSHOT.jar"),
111118
List.empty,
112119
"groupby-upload-bulk-load",
113120
"-ZGCP_PROJECT_ID=bigtable-project-id",
@@ -116,7 +123,8 @@ class DataprocSubmitterTest extends AnyFlatSpec with MockitoSugar {
116123
"--online-class=ai.chronon.integrations.cloud_gcp.GcpApiImpl",
117124
"--src-offline-table=data.test_gbu",
118125
"--groupby-name=quickstart.purchases.v1",
119-
"--partition-string=2024-01-01")
126+
"--partition-string=2024-01-01"
127+
)
120128
println(submittedJobId)
121129
assertEquals(submittedJobId, "mock-job-id")
122130
}

0 commit comments

Comments
 (0)