File tree 2 files changed +1
-21
lines changed
main/scala/org/apache/spark/sql/execution
test/scala/org/apache/spark/sql/execution
2 files changed +1
-21
lines changed Original file line number Diff line number Diff line change @@ -64,8 +64,7 @@ trait ColumnarToRowTransition extends UnaryExecNode
64
64
* [[MapPartitionsInRWithArrowExec ]]. Eventually this should replace those implementations.
65
65
*/
66
66
case class ColumnarToRowExec (child : SparkPlan ) extends ColumnarToRowTransition with CodegenSupport {
67
- // supportsColumnar requires to be only called on driver side, see also SPARK-37779.
68
- assert(TaskContext .get != null || child.supportsColumnar)
67
+ assert(child.supportsColumnar)
69
68
70
69
override def output : Seq [Attribute ] = child.output
71
70
Original file line number Diff line number Diff line change @@ -88,23 +88,4 @@ class SparkPlanSuite extends QueryTest with SharedSparkSession {
88
88
test(" SPARK-30780 empty LocalTableScan should use RDD without partitions" ) {
89
89
assert(LocalTableScanExec (Nil , Nil ).execute().getNumPartitions == 0 )
90
90
}
91
-
92
- test(" SPARK-37779: ColumnarToRowExec should be canonicalizable after being (de)serialized" ) {
93
- withSQLConf(SQLConf .USE_V1_SOURCE_LIST .key -> " parquet" ) {
94
- withTempPath { path =>
95
- spark.range(1 ).write.parquet(path.getAbsolutePath)
96
- val df = spark.read.parquet(path.getAbsolutePath)
97
- val columnarToRowExec =
98
- df.queryExecution.executedPlan.collectFirst { case p : ColumnarToRowExec => p }.get
99
- try {
100
- spark.range(1 ).foreach { _ =>
101
- columnarToRowExec.canonicalized
102
- ()
103
- }
104
- } catch {
105
- case e : Throwable => fail(" ColumnarToRowExec was not canonicalizable" , e)
106
- }
107
- }
108
- }
109
- }
110
91
}
You can’t perform that action at this time.
0 commit comments