Skip to content

Commit 3ec1286

Browse files
feat: basic hudi support (#437)
## Summary ## Checklist - [ ] Added Unit Tests - [ ] Covered by existing CI - [ ] Integration tested - [ ] Documentation update <!-- This is an auto-generated comment: release notes by coderabbit.ai --> ## Summary by CodeRabbit - **Chores** - Consolidated and streamlined build dependencies for improved integration with AWS services and data processing libraries. - Expanded the set of supported third-party libraries, including new artifacts for enhanced performance and compatibility. - Added new dependencies for Hudi, Jackson, and Zookeeper to enhance functionality. - Introduced additional Hudi artifacts for Scala 2.12 and 2.13 to broaden available functionalities. - **Tests** - Added a new test class to verify reliable write/read operations on Hudi tables using a Spark session. - **Refactor** - Enhanced serialization registration to support a broader range of data types, improving overall processing stability. - Introduced a new variable for shared library dependencies to simplify dependency management. <!-- end of auto-generated comment: release notes by coderabbit.ai --> <!-- av pr metadata This information is embedded by the av CLI when creating PRs to track the status of stacks when using Aviator. Please do not delete or edit this section of the PR. ``` {"parent":"main","parentHead":"","trunk":"main"} ``` --> --------- Co-authored-by: Thomas Chow <[email protected]>
1 parent 9eff8d8 commit 3ec1286

File tree

7 files changed

+3120
-376
lines changed

7 files changed

+3120
-376
lines changed

cloud_aws/BUILD.bazel

Lines changed: 27 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,24 @@
1+
shared_libs = [
2+
maven_artifact("software.amazon.awssdk:dynamodb"),
3+
maven_artifact("software.amazon.awssdk:regions"),
4+
maven_artifact("software.amazon.awssdk:aws-core"),
5+
maven_artifact("software.amazon.awssdk:sdk-core"),
6+
maven_artifact("software.amazon.awssdk:utils"),
7+
maven_artifact("com.google.guava:guava"),
8+
maven_artifact("org.slf4j:slf4j-api"),
9+
maven_artifact("org.apache.hudi:hudi-aws-bundle"),
10+
maven_artifact("junit:junit"),
11+
maven_artifact("com.novocode:junit-interface"),
12+
maven_artifact("com.fasterxml.jackson.module:jackson-module-afterburner"),
13+
scala_artifact_with_suffix("org.apache.hudi:hudi-spark3.5-bundle"),
14+
scala_artifact_with_suffix("org.scala-lang.modules:scala-collection-compat"),
15+
"//api:lib",
16+
"//api:thrift_java",
17+
"//online:lib",
18+
"//spark:lib",
19+
"//tools/build_rules/spark:spark-exec",
20+
]
21+
122
scala_library(
223
name = "cloud_aws_lib",
324
srcs = glob(["src/main/**/*.scala"]),
@@ -6,54 +27,24 @@ scala_library(
627
"//conditions:default": True, # Enable for other versions
728
}),
829
visibility = ["//visibility:public"],
9-
deps = [
10-
maven_artifact("software.amazon.awssdk:dynamodb"),
11-
maven_artifact("software.amazon.awssdk:regions"),
12-
maven_artifact("software.amazon.awssdk:aws-core"),
13-
maven_artifact("software.amazon.awssdk:sdk-core"),
14-
maven_artifact("software.amazon.awssdk:utils"),
15-
maven_artifact("com.google.guava:guava"),
16-
maven_artifact("org.slf4j:slf4j-api"),
17-
scala_artifact_with_suffix("org.scala-lang.modules:scala-collection-compat"),
18-
"//api:lib",
19-
"//api:thrift_java",
20-
"//online:lib",
21-
"//spark:lib",
22-
],
30+
deps = shared_libs,
2331
)
2432

2533
test_deps = [
26-
":cloud_aws_lib",
27-
"//api:lib",
28-
"//online:lib",
29-
maven_artifact("software.amazon.awssdk:dynamodb"),
30-
maven_artifact("software.amazon.awssdk:regions"),
31-
maven_artifact("software.amazon.awssdk:aws-core"),
32-
maven_artifact("software.amazon.awssdk:sdk-core"),
33-
maven_artifact("software.amazon.awssdk:utils"),
34+
maven_artifact("com.amazonaws:DynamoDBLocal"),
35+
maven_artifact("org.mockito:mockito-core"),
3436
maven_artifact("software.amazon.awssdk:auth"),
3537
maven_artifact("software.amazon.awssdk:identity-spi"),
36-
scala_artifact_with_suffix("org.typelevel:cats-core"),
37-
maven_artifact("com.amazonaws:DynamoDBLocal"),
3838
scala_artifact_with_suffix("com.chuusai:shapeless"),
39+
scala_artifact_with_suffix("org.typelevel:cats-core"),
40+
scala_artifact_with_suffix("org.mockito:mockito-scala"),
3941
] + _CIRCE_DEPS + _SCALA_TEST_DEPS
4042

41-
scala_library(
42-
name = "test_lib",
43-
srcs = glob(["src/test/**/*.scala"]),
44-
format = select({
45-
"//tools/config:scala_2_13": False, # Disable for 2.13
46-
"//conditions:default": True, # Enable for other versions
47-
}),
48-
visibility = ["//visibility:public"],
49-
deps = test_deps,
50-
)
51-
5243
scala_test_suite(
5344
name = "tests",
5445
srcs = glob(["src/test/**/*.scala"]),
5546
# defined in prelude_bazel file
5647
jvm_flags = _JVM_FLAGS_FOR_ACCESSING_BASE_JAVA_CLASSES,
5748
visibility = ["//visibility:public"],
58-
deps = test_deps + [":test_lib"],
49+
deps = shared_libs + test_deps + [":cloud_aws_lib"],
5950
)
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
package ai.chronon.integrations.aws
2+
3+
4+
import ai.chronon.spark.{SparkSessionBuilder, TableUtils, ChrononHudiKryoRegistrator}
5+
import org.apache.spark.sql.{SaveMode, SparkSession}
6+
import org.junit.Assert.assertEquals
7+
import org.scalatest.flatspec.AnyFlatSpec
8+
import org.scalatestplus.mockito.MockitoSugar
9+
10+
class GlueCatalogTest extends AnyFlatSpec with MockitoSugar {
11+
12+
lazy val spark: SparkSession = SparkSessionBuilder.build(
13+
classOf[GlueCatalogTest].getSimpleName,
14+
local = true,
15+
additionalConfig = Some(
16+
Map(
17+
"spark.sql.catalog.spark_catalog" -> "org.apache.spark.sql.hudi.catalog.HoodieCatalog",
18+
"spark.sql.extensions" -> "org.apache.spark.sql.hudi.HoodieSparkSessionExtension",
19+
"spark.kryo.registrator"-> classOf[ChrononHudiKryoRegistrator].getName,
20+
))
21+
)
22+
lazy val tableUtils: TableUtils = TableUtils(spark)
23+
24+
"basic round trip hudi table" should "work with local metastore" in {
25+
import spark.implicits._
26+
27+
val input = Set(1,2,3,4)
28+
val sourceDF = spark.sparkContext.parallelize(input.toSeq).toDF("id")
29+
30+
sourceDF.write.format("hudi")
31+
.mode(SaveMode.Overwrite)
32+
.saveAsTable("test_hudi_table")
33+
34+
val back = spark.table("test_hudi_table").select("id").as[Int].collect()
35+
assertEquals(input, back.toSet)
36+
37+
}
38+
}

0 commit comments

Comments
 (0)