Skip to content

Allow distribute scala 2.12 and update to spark 2.4.3 #1308

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 11 commits into from
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,4 @@ metastore_db
/spark/sql-13/with_meta_*
out/
localRepo/
.DS_Store
4 changes: 3 additions & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,14 @@ jodaVersion = 1.6
jacksonVersion = 1.8.8
# Spark
spark13Version = 1.6.2
spark20Version = 2.3.0
spark20Version = 2.4.3
# same as Spark's
scala210Version = 2.10.7
scala210MajorVersion = 2.10
scala211Version = 2.11.12
scala211MajorVersion = 2.11
scala212Version = 2.12.8
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

2.12.10 is the most recent

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok, I will change.

scala212MajorVersion = 2.12

stormVersion = 1.0.6

Expand Down
2 changes: 1 addition & 1 deletion qa/kerberos/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ dependencies {
compile project(":elasticsearch-hadoop-mr")
compile project(":elasticsearch-storm")

compile 'org.scala-lang:scala-library:2.11.8'
compile 'org.scala-lang:scala-library:2.12.8'
compile project(":elasticsearch-spark-20")

if (!localRepo) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@
*/
package org.elasticsearch.spark.rdd;

import scala.collection.JavaConversions.collectionAsScalaIterable
import scala.collection.JavaConversions.mapAsJavaMap
import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import org.apache.commons.logging.LogFactory
import org.apache.spark.Partition
Expand All @@ -45,7 +44,7 @@ private[spark] abstract class AbstractEsRDD[T: ClassTag](
@transient protected lazy val logger = LogFactory.getLog(this.getClass())

override def getPartitions: Array[Partition] = {
esPartitions.zipWithIndex.map { case(esPartition, idx) =>
esPartitions.asScala.zipWithIndex.map { case(esPartition, idx) =>
new EsPartition(id, idx, esPartition)
}.toArray
}
Expand All @@ -70,7 +69,7 @@ private[spark] abstract class AbstractEsRDD[T: ClassTag](

@transient private[spark] lazy val esCfg = {
val cfg = new SparkSettingsManager().load(sc.getConf).copy();
cfg.merge(params)
cfg.merge(params.asJava)
InitializationUtils.setUserProviderIfNotSet(cfg, classOf[HadoopUserProvider], logger)
cfg
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,10 @@ private[spark] class EsRDDWriter[T: ClassTag](val serializedSettings: String,
def write(taskContext: TaskContext, data: Iterator[T]): Unit = {
val writer = RestService.createWriter(settings, taskContext.partitionId.toLong, -1, log)

taskContext.addTaskCompletionListener((TaskContext) => writer.close())
taskContext.addTaskCompletionListener(TaskContext => {
writer.close()
Unit
})

if (runtimeMetadata) {
writer.repository.addRuntimeFieldExtractor(metaExtractor)
Expand Down
4 changes: 2 additions & 2 deletions spark/sql-20/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ apply plugin: 'es.hadoop.build.integration'
apply plugin: 'scala.variants'

variants {
defaultVersion '2.11.12'
targetVersions '2.10.7', '2.11.12'
defaultVersion '2.12.8'
targetVersions '2.10.7', '2.11.12', '2.12.8'
}

println "Compiled using Scala ${project.ext.scalaMajorVersion} [${project.ext.scalaVersion}]"
Expand Down