Skip to content

Commit 59e6b5b

Browse files
szehon-hodongjoon-hyun
authored andcommitted
[SPARK-52475][TESTS] Remove deprecated table.schema method from tests
### What changes were proposed in this pull request? Fix many compile warnings like ``` [warn] /Users/szehon.ho/repos/apache-spark/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogSuite.scala:282:18: method schema in trait Table is deprecated (since 3.4.0) [warn] Applicable -Wconf / nowarn filters for this warning: msg=<part of the message>, cat=deprecation, site=org.apache.spark.sql.connector.catalog.CatalogSuite, origin=org.apache.spark.sql.connector.catalog.Table.schema, version=3.4.0 ``` ### Why are the changes needed? Reduce compiler warnings ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Test only ### Was this patch authored or co-authored using generative AI tooling? No Closes #51173 from szehon-ho/fix_warnings_other_tests. Authored-by: Szehon Ho <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent a108d29 commit 59e6b5b

File tree

14 files changed

+342
-294
lines changed

14 files changed

+342
-294
lines changed

sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogSuite.scala

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -279,7 +279,7 @@ class CatalogSuite extends SparkFunSuite {
279279
val loaded = catalog.loadTable(testIdent)
280280

281281
assert(table.name == loaded.name)
282-
assert(table.schema == loaded.schema)
282+
assert(table.columns == loaded.columns)
283283
assert(table.properties == loaded.properties)
284284
}
285285

@@ -307,7 +307,7 @@ class CatalogSuite extends SparkFunSuite {
307307
val loaded = catalog.loadTable(testIdent)
308308

309309
assert(table.name == loaded.name)
310-
assert(table.schema == loaded.schema)
310+
assert(table.columns == loaded.columns)
311311
assert(table.properties == loaded.properties)
312312
}
313313

@@ -544,8 +544,8 @@ class CatalogSuite extends SparkFunSuite {
544544

545545
val updated = catalog.alterTable(testIdent, TableChange.updateColumnType(Array("id"), LongType))
546546

547-
val expectedSchema = new StructType().add("id", LongType).add("data", StringType)
548-
assert(updated.schema == expectedSchema)
547+
val expectedColumns = Array(Column.create("id", LongType), Column.create("data", StringType))
548+
assert(updated.columns sameElements expectedColumns)
549549
}
550550

551551
test("alterTable: update column nullability") {
@@ -566,8 +566,9 @@ class CatalogSuite extends SparkFunSuite {
566566
val updated = catalog.alterTable(testIdent,
567567
TableChange.updateColumnNullability(Array("id"), true))
568568

569-
val expectedSchema = new StructType().add("id", IntegerType).add("data", StringType)
570-
assert(updated.schema == expectedSchema)
569+
val expectedColumns = Array(
570+
Column.create("id", IntegerType, true), Column.create("data", StringType))
571+
assert(updated.columns sameElements expectedColumns)
571572
}
572573

573574
test("alterTable: update missing column fails") {
@@ -606,10 +607,11 @@ class CatalogSuite extends SparkFunSuite {
606607
val updated = catalog.alterTable(testIdent,
607608
TableChange.updateColumnComment(Array("id"), "comment text"))
608609

609-
val expectedSchema = new StructType()
610-
.add("id", IntegerType, nullable = true, "comment text")
611-
.add("data", StringType)
612-
assert(updated.schema == expectedSchema)
610+
val expectedColumns = Array(
611+
Column.create("id", IntegerType, true, "comment text", null),
612+
Column.create("data", StringType)
613+
)
614+
assert(updated.columns sameElements expectedColumns)
613615
}
614616

615617
test("alterTable: replace comment") {
@@ -626,14 +628,14 @@ class CatalogSuite extends SparkFunSuite {
626628

627629
catalog.alterTable(testIdent, TableChange.updateColumnComment(Array("id"), "comment text"))
628630

629-
val expectedSchema = new StructType()
630-
.add("id", IntegerType, nullable = true, "replacement comment")
631-
.add("data", StringType)
632-
631+
val expectedColumns = Array(
632+
Column.create("id", IntegerType, true, "replacement comment", null),
633+
Column.create("data", StringType)
634+
)
633635
val updated = catalog.alterTable(testIdent,
634636
TableChange.updateColumnComment(Array("id"), "replacement comment"))
635637

636-
assert(updated.schema == expectedSchema)
638+
assert(updated.columns sameElements expectedColumns)
637639
}
638640

639641
test("alterTable: add comment to missing column fails") {
@@ -671,9 +673,9 @@ class CatalogSuite extends SparkFunSuite {
671673

672674
val updated = catalog.alterTable(testIdent, TableChange.renameColumn(Array("id"), "some_id"))
673675

674-
val expectedSchema = new StructType().add("some_id", IntegerType).add("data", StringType)
675-
676-
assert(updated.schema == expectedSchema)
676+
val expectedColumns = Array(
677+
Column.create("some_id", IntegerType), Column.create("data", StringType))
678+
assert(updated.columns sameElements expectedColumns)
677679
}
678680

679681
test("alterTable: rename nested column") {
@@ -785,8 +787,8 @@ class CatalogSuite extends SparkFunSuite {
785787
val updated = catalog.alterTable(testIdent,
786788
TableChange.deleteColumn(Array("id"), false))
787789

788-
val expectedSchema = new StructType().add("data", StringType)
789-
assert(updated.schema == expectedSchema)
790+
val expectedColumns = Array(Column.create("data", StringType))
791+
assert(updated.columns sameElements expectedColumns)
790792
}
791793

792794
test("alterTable: delete nested column") {

sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryAtomicPartitionTable.scala

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,12 +30,19 @@ import org.apache.spark.util.ArrayImplicits._
3030
*/
3131
class InMemoryAtomicPartitionTable (
3232
name: String,
33-
schema: StructType,
33+
columns: Array[Column],
3434
partitioning: Array[Transform],
3535
properties: util.Map[String, String])
36-
extends InMemoryPartitionTable(name, schema, partitioning, properties)
36+
extends InMemoryPartitionTable(name, columns, partitioning, properties)
3737
with SupportsAtomicPartitionManagement {
3838

39+
def this(
40+
name: String,
41+
schema: StructType,
42+
partitioning: Array[Transform],
43+
properties: util.Map[String, String]) =
44+
this(name, CatalogV2Util.structTypeToV2Columns(schema), partitioning, properties)
45+
3946
override def createPartition(
4047
ident: InternalRow,
4148
properties: util.Map[String, String]): Unit = {

sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryPartitionTable.scala

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,19 @@ import org.apache.spark.sql.types.StructType
3333
*/
3434
class InMemoryPartitionTable(
3535
name: String,
36-
schema: StructType,
36+
columns: Array[Column],
3737
partitioning: Array[Transform],
3838
properties: util.Map[String, String])
39-
extends InMemoryTable(name, schema, partitioning, properties) with SupportsPartitionManagement {
39+
extends InMemoryTable(name, columns, partitioning, properties) with SupportsPartitionManagement {
4040
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
4141

42+
def this(
43+
name: String,
44+
schema: StructType,
45+
partitioning: Array[Transform],
46+
properties: util.Map[String, String]
47+
) = this(name, CatalogV2Util.structTypeToV2Columns(schema), partitioning, properties)
48+
4249
protected val memoryTablePartitions: util.Map[InternalRow, util.Map[String, String]] =
4350
new ConcurrentHashMap[InternalRow, util.Map[String, String]]()
4451

sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/SupportsAtomicPartitionManagementSuite.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ class SupportsAtomicPartitionManagementSuite extends SparkFunSuite {
5353
test("createPartitions") {
5454
val table = catalog.loadTable(ident)
5555
val partTable = new InMemoryAtomicPartitionTable(
56-
table.name(), table.schema(), table.partitioning(), table.properties())
56+
table.name(), table.columns(), table.partitioning(), table.properties())
5757
assert(!hasPartitions(partTable))
5858

5959
val partIdents = Array(InternalRow.apply("3"), InternalRow.apply("4"))
@@ -72,7 +72,7 @@ class SupportsAtomicPartitionManagementSuite extends SparkFunSuite {
7272
test("createPartitions failed if partition already exists") {
7373
val table = catalog.loadTable(ident)
7474
val partTable = new InMemoryAtomicPartitionTable(
75-
table.name(), table.schema(), table.partitioning(), table.properties())
75+
table.name(), table.columns(), table.partitioning(), table.properties())
7676
assert(!hasPartitions(partTable))
7777

7878
val partIdent = InternalRow.apply("4")
@@ -94,7 +94,7 @@ class SupportsAtomicPartitionManagementSuite extends SparkFunSuite {
9494
test("dropPartitions") {
9595
val table = catalog.loadTable(ident)
9696
val partTable = new InMemoryAtomicPartitionTable(
97-
table.name(), table.schema(), table.partitioning(), table.properties())
97+
table.name(), table.columns(), table.partitioning(), table.properties())
9898
assert(!hasPartitions(partTable))
9999

100100
val partIdents = Array(InternalRow.apply("3"), InternalRow.apply("4"))
@@ -112,7 +112,7 @@ class SupportsAtomicPartitionManagementSuite extends SparkFunSuite {
112112
test("purgePartitions") {
113113
val table = catalog.loadTable(ident)
114114
val partTable = new InMemoryAtomicPartitionTable(
115-
table.name(), table.schema(), table.partitioning(), table.properties())
115+
table.name(), table.columns(), table.partitioning(), table.properties())
116116
val partIdents = Array(InternalRow.apply("3"), InternalRow.apply("4"))
117117
partTable.createPartitions(
118118
partIdents,
@@ -129,7 +129,7 @@ class SupportsAtomicPartitionManagementSuite extends SparkFunSuite {
129129
test("dropPartitions failed if partition not exists") {
130130
val table = catalog.loadTable(ident)
131131
val partTable = new InMemoryAtomicPartitionTable(
132-
table.name(), table.schema(), table.partitioning(), table.properties())
132+
table.name(), table.columns(), table.partitioning(), table.properties())
133133
assert(!hasPartitions(partTable))
134134

135135
val partIdent = InternalRow.apply("4")
@@ -147,7 +147,7 @@ class SupportsAtomicPartitionManagementSuite extends SparkFunSuite {
147147
test("truncatePartitions") {
148148
val table = catalog.loadTable(ident)
149149
val partTable = new InMemoryAtomicPartitionTable(
150-
table.name(), table.schema(), table.partitioning(), table.properties())
150+
table.name(), table.columns(), table.partitioning(), table.properties())
151151
assert(!hasPartitions(partTable))
152152

153153
partTable.createPartitions(

sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/SupportsPartitionManagementSuite.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ class SupportsPartitionManagementSuite extends SparkFunSuite {
5555
test("createPartition") {
5656
val table = catalog.loadTable(ident)
5757
val partTable = new InMemoryPartitionTable(
58-
table.name(), table.schema(), table.partitioning(), table.properties())
58+
table.name(), table.columns(), table.partitioning(), table.properties())
5959
assert(!hasPartitions(partTable))
6060

6161
val partIdent = InternalRow.apply("3")
@@ -70,7 +70,7 @@ class SupportsPartitionManagementSuite extends SparkFunSuite {
7070
test("dropPartition") {
7171
val table = catalog.loadTable(ident)
7272
val partTable = new InMemoryPartitionTable(
73-
table.name(), table.schema(), table.partitioning(), table.properties())
73+
table.name(), table.columns(), table.partitioning(), table.properties())
7474
assert(!hasPartitions(partTable))
7575

7676
val partIdent = InternalRow.apply("3")
@@ -88,7 +88,7 @@ class SupportsPartitionManagementSuite extends SparkFunSuite {
8888
test("purgePartition") {
8989
val table = catalog.loadTable(ident)
9090
val partTable = new InMemoryPartitionTable(
91-
table.name(), table.schema(), table.partitioning(), table.properties())
91+
table.name(), table.columns(), table.partitioning(), table.properties())
9292
checkError(
9393
exception = intercept[SparkUnsupportedOperationException] {
9494
partTable.purgePartition(InternalRow.apply("3"))
@@ -101,7 +101,7 @@ class SupportsPartitionManagementSuite extends SparkFunSuite {
101101
test("replacePartitionMetadata") {
102102
val table = catalog.loadTable(ident)
103103
val partTable = new InMemoryPartitionTable(
104-
table.name(), table.schema(), table.partitioning(), table.properties())
104+
table.name(), table.columns(), table.partitioning(), table.properties())
105105
assert(!hasPartitions(partTable))
106106

107107
val partIdent = InternalRow.apply("3")
@@ -123,7 +123,7 @@ class SupportsPartitionManagementSuite extends SparkFunSuite {
123123
test("loadPartitionMetadata") {
124124
val table = catalog.loadTable(ident)
125125
val partTable = new InMemoryPartitionTable(
126-
table.name(), table.schema(), table.partitioning(), table.properties())
126+
table.name(), table.columns(), table.partitioning(), table.properties())
127127
assert(!hasPartitions(partTable))
128128

129129
val partIdent = InternalRow.apply("3")
@@ -140,7 +140,7 @@ class SupportsPartitionManagementSuite extends SparkFunSuite {
140140
test("listPartitionIdentifiers") {
141141
val table = catalog.loadTable(ident)
142142
val partTable = new InMemoryPartitionTable(
143-
table.name(), table.schema(), table.partitioning(), table.properties())
143+
table.name(), table.columns(), table.partitioning(), table.properties())
144144
assert(!hasPartitions(partTable))
145145

146146
val partIdent = InternalRow.apply("3")
@@ -248,7 +248,7 @@ class SupportsPartitionManagementSuite extends SparkFunSuite {
248248
test("truncatePartition") {
249249
val table = catalog.loadTable(ident)
250250
val partTable = new InMemoryPartitionTable(
251-
table.name(), table.schema(), table.partitioning(), table.properties())
251+
table.name(), table.columns(), table.partitioning(), table.properties())
252252
assert(!hasPartitions(partTable))
253253

254254
val partIdent = InternalRow.apply("3")

sql/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ import org.apache.spark.sql.connect.dsl.MockRemoteSession
4040
import org.apache.spark.sql.connect.dsl.commands._
4141
import org.apache.spark.sql.connect.dsl.expressions._
4242
import org.apache.spark.sql.connect.dsl.plans._
43-
import org.apache.spark.sql.connector.catalog.{Identifier, InMemoryTableCatalog, TableCatalog}
43+
import org.apache.spark.sql.connector.catalog.{Column => ColumnV2, Identifier, InMemoryTableCatalog, TableCatalog}
4444
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.CatalogHelper
4545
import org.apache.spark.sql.execution.arrow.ArrowConverters
4646
import org.apache.spark.sql.functions._
@@ -822,7 +822,9 @@ class SparkConnectProtoSuite extends PlanTest with SparkConnectPlanTest {
822822
.asTableCatalog
823823
.loadTable(Identifier.of(Array(), "table_name"))
824824
assert(table.name === "testcat.table_name")
825-
assert(table.schema === new StructType().add("id", LongType).add("data", StringType))
825+
assert(
826+
table.columns sameElements
827+
Array(ColumnV2.create("id", LongType), ColumnV2.create("data", StringType)))
826828
assert(table.partitioning.isEmpty)
827829
assert(table.properties === (Map("provider" -> "foo") ++ defaultOwnership).asJava)
828830
}

sql/core/src/test/scala/org/apache/spark/sql/CharVarcharTestSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
2424
import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, Filter, Project}
2525
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
2626
import org.apache.spark.sql.connector.SchemaRequiredDataSource
27-
import org.apache.spark.sql.connector.catalog.InMemoryPartitionTableCatalog
27+
import org.apache.spark.sql.connector.catalog.{CatalogV2Util, InMemoryPartitionTableCatalog}
2828
import org.apache.spark.sql.execution.datasources.LogicalRelation
2929
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
3030
import org.apache.spark.sql.internal.SQLConf
@@ -923,7 +923,7 @@ class BasicCharVarcharTestSuite extends QueryTest with SharedSparkSession {
923923
def checkSchema(df: DataFrame): Unit = {
924924
val schemas = df.queryExecution.analyzed.collect {
925925
case l: LogicalRelation => l.relation.schema
926-
case d: DataSourceV2Relation => d.table.schema()
926+
case d: DataSourceV2Relation => CatalogV2Util.v2ColumnsToStructType(d.table.columns())
927927
}
928928
assert(schemas.length == 1)
929929
assert(schemas.head.map(_.dataType) == Seq(StringType))

sql/core/src/test/scala/org/apache/spark/sql/collation/CollationSuite.scala

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import org.apache.spark.sql.catalyst.ExtendedAnalysisException
2727
import org.apache.spark.sql.catalyst.expressions._
2828
import org.apache.spark.sql.catalyst.util.CollationFactory
2929
import org.apache.spark.sql.connector.{DatasourceV2SQLBase, FakeV2ProviderWithCustomSchema}
30-
import org.apache.spark.sql.connector.catalog.{Identifier, InMemoryTable}
30+
import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Identifier, InMemoryTable}
3131
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.CatalogHelper
3232
import org.apache.spark.sql.connector.catalog.CatalogV2Util.withDefaultOwnership
3333
import org.apache.spark.sql.errors.DataTypeErrors.toSQLType
@@ -610,16 +610,16 @@ class CollationSuite extends DatasourceV2SQLBase with AdaptiveSparkPlanHelper {
610610
sql(s"ALTER TABLE $tableName ALTER COLUMN c3.value TYPE STRING COLLATE UTF8_BINARY")
611611
sql(s"ALTER TABLE $tableName ALTER COLUMN c4.t TYPE STRING COLLATE UNICODE")
612612
val testCatalog = catalog("testcat").asTableCatalog
613-
val tableSchema = testCatalog.loadTable(Identifier.of(Array(), "alter_column_tbl")).schema()
614-
val c1Metadata = tableSchema.find(_.name == "c1").get.metadata
615-
assert(c1Metadata === createMetadata("c1"))
616-
val c2Metadata = tableSchema.find(_.name == "c2").get.metadata
617-
assert(c2Metadata === createMetadata("c2"))
618-
val c3Metadata = tableSchema.find(_.name == "c3").get.metadata
619-
assert(c3Metadata === createMetadata("c3"))
620-
val c4Metadata = tableSchema.find(_.name == "c4").get.metadata
621-
assert(c4Metadata === createMetadata("c4"))
622-
val c4tMetadata = tableSchema.find(_.name == "c4").get.dataType
613+
val columns = testCatalog.loadTable(Identifier.of(Array(), "alter_column_tbl")).columns()
614+
val c1Metadata = columns.find(_.name() == "c1").get.metadataInJSON()
615+
assert(c1Metadata === createMetadata("c1").json)
616+
val c2Metadata = columns.find(_.name() == "c2").get.metadataInJSON()
617+
assert(c2Metadata === createMetadata("c2").json)
618+
val c3Metadata = columns.find(_.name() == "c3").get.metadataInJSON()
619+
assert(c3Metadata === createMetadata("c3").json)
620+
val c4Metadata = columns.find(_.name() == "c4").get.metadataInJSON()
621+
assert(c4Metadata === createMetadata("c4").json)
622+
val c4tMetadata = columns.find(_.name() == "c4").get.dataType()
623623
.asInstanceOf[StructType].find(_.name == "t").get.metadata
624624
assert(c4tMetadata === createMetadata("c4t"))
625625
}
@@ -864,7 +864,8 @@ class CollationSuite extends DatasourceV2SQLBase with AdaptiveSparkPlanHelper {
864864
assert(table.columns().head.dataType() == StringType(collationId))
865865

866866
val rdd = spark.sparkContext.parallelize(table.asInstanceOf[InMemoryTable].rows)
867-
checkAnswer(spark.internalCreateDataFrame(rdd, table.schema), Seq.empty)
867+
checkAnswer(spark.internalCreateDataFrame(rdd,
868+
CatalogV2Util.v2ColumnsToStructType(table.columns)), Seq.empty)
868869

869870
sql(s"INSERT INTO $tableName VALUES ('a'), ('A')")
870871

0 commit comments

Comments
 (0)