|
| 1 | + |
| 2 | +description = "Elasticsearch Spark (for Spark 3.X)" |
| 3 | + |
| 4 | +evaluationDependsOn(':elasticsearch-hadoop-mr') |
| 5 | + |
| 6 | +apply plugin: 'java-library' |
| 7 | +apply plugin: 'scala' |
| 8 | +apply plugin: 'es.hadoop.build.integration' |
| 9 | +apply plugin: 'scala.variants' |
| 10 | + |
| 11 | +variants { |
| 12 | + defaultVersion '2.12.10' |
| 13 | + targetVersions '2.12.10' |
| 14 | +} |
| 15 | + |
| 16 | +configurations { |
| 17 | + embedded { |
| 18 | + transitive = false |
| 19 | + canBeResolved = true |
| 20 | + } |
| 21 | + implementation { |
| 22 | + extendsFrom project.configurations.embedded |
| 23 | + } |
| 24 | + if (project.ext.scalaMajorVersion != '2.10') { |
| 25 | + scalaCompilerPlugin { |
| 26 | + defaultDependencies { dependencies -> |
| 27 | + dependencies.add(project.dependencies.create( "com.typesafe.genjavadoc:genjavadoc-plugin_${scalaVersion}:0.16")) |
| 28 | + } |
| 29 | + } |
| 30 | + } |
| 31 | +} |
| 32 | + |
| 33 | +println "Compiled using Scala ${project.ext.scalaMajorVersion} [${project.ext.scalaVersion}]" |
| 34 | +String sparkVersion = spark30Version |
| 35 | + |
| 36 | +// Revert to spark 2.2.0 for scala 2.10 as 2.3+ does not support scala 2.10 |
| 37 | +if (project.ext.scalaMajorVersion == '2.10') { |
| 38 | + sparkVersion = '2.2.0' |
| 39 | +} |
| 40 | + |
| 41 | +tasks.withType(ScalaCompile) { ScalaCompile task -> |
| 42 | + task.sourceCompatibility = project.ext.minimumRuntimeVersion |
| 43 | + task.targetCompatibility = project.ext.minimumRuntimeVersion |
| 44 | + task.options.forkOptions.executable = new File(project.ext.runtimeJavaHome, 'bin/java').absolutePath |
| 45 | +} |
| 46 | + |
| 47 | +compileScala { |
| 48 | + configure(scalaCompileOptions.forkOptions) { |
| 49 | + memoryMaximumSize = '1g' |
| 50 | + jvmArgs = ['-XX:MaxPermSize=512m'] |
| 51 | + } |
| 52 | + scalaCompileOptions.additionalParameters = [ |
| 53 | + "-feature", |
| 54 | + "-unchecked", |
| 55 | + "-deprecation", |
| 56 | + "-Xfuture", |
| 57 | + "-Yno-adapted-args", |
| 58 | + "-Ywarn-dead-code", |
| 59 | + "-Ywarn-numeric-widen", |
| 60 | + "-Xfatal-warnings" |
| 61 | + ] |
| 62 | +} |
| 63 | + |
| 64 | +String coreSrc = file("$projectDir/../core").absolutePath.replace('\\','/') |
| 65 | + |
| 66 | +sourceSets { |
| 67 | + main.scala.srcDirs += "$coreSrc/main/scala" |
| 68 | + test.scala.srcDirs += "$coreSrc/test/scala" |
| 69 | + itest.java.srcDirs += "$coreSrc/itest/java" |
| 70 | + itest.scala.srcDirs += "$coreSrc/itest/scala" |
| 71 | + itest.resources.srcDirs += "$coreSrc/itest/resources" |
| 72 | +} |
| 73 | + |
| 74 | +def javaFilesOnly = { FileTreeElement spec -> |
| 75 | + spec.file.name.endsWith('.java') || spec.isDirectory() |
| 76 | +} |
| 77 | + |
| 78 | +artifacts { |
| 79 | + sourceElements(project.file("$coreSrc/main/scala")) |
| 80 | + // Add java files from core source to javadocElements. |
| 81 | + project.fileTree("$coreSrc/main/scala").include(javaFilesOnly).each { |
| 82 | + javadocElements(it) |
| 83 | + } |
| 84 | + project.fileTree("src/main/scala").include(javaFilesOnly).each { |
| 85 | + javadocElements(it) |
| 86 | + } |
| 87 | +} |
| 88 | + |
| 89 | +// currently the outside project folders are transformed into linked resources however |
| 90 | +// Gradle only supports one so the project will be invalid as not all sources will be in there |
| 91 | +// as such, they are setup here manually for Eclipse. IntelliJ probably needs a similar approach |
| 92 | +eclipse { |
| 93 | + project.file.whenMerged { pj -> |
| 94 | + // eliminated resources created by gradle |
| 95 | + |
| 96 | + linkedResources.clear() |
| 97 | + linkedResources.add(new org.gradle.plugins.ide.eclipse.model.Link("core/main/scala", "2", "$coreSrc/main/scala", null)) |
| 98 | + linkedResources.add(new org.gradle.plugins.ide.eclipse.model.Link("core/test/scala", "2", "$coreSrc/test/scala", null)) |
| 99 | + linkedResources.add(new org.gradle.plugins.ide.eclipse.model.Link("core/itest/java", "2", "$coreSrc/itest/java", null)) |
| 100 | + linkedResources.add(new org.gradle.plugins.ide.eclipse.model.Link("core/itest/scala", "2", "$coreSrc/itest/scala", null)) |
| 101 | + linkedResources.add(new org.gradle.plugins.ide.eclipse.model.Link("core/itest/resources","2", "$coreSrc/itest/resources", null)) |
| 102 | + |
| 103 | + } |
| 104 | + classpath.file { |
| 105 | + whenMerged { cp -> |
| 106 | + entries.removeAll { entry -> |
| 107 | + entry.kind == 'src' && (entry.path in ["scala", "java", "resources"] || entry.path.startsWith("itest-") || entry.path.endsWith("-scala")) |
| 108 | + } |
| 109 | + |
| 110 | + entries.add(new org.gradle.plugins.ide.eclipse.model.SourceFolder("core/main/scala", null)) |
| 111 | + entries.add(new org.gradle.plugins.ide.eclipse.model.SourceFolder("core/test/scala", null)) |
| 112 | + entries.add(new org.gradle.plugins.ide.eclipse.model.SourceFolder("core/itest/java", null)) |
| 113 | + entries.add(new org.gradle.plugins.ide.eclipse.model.SourceFolder("core/itest/scala", null)) |
| 114 | + entries.add(new org.gradle.plugins.ide.eclipse.model.SourceFolder("core/itest/resources", null)) |
| 115 | + } |
| 116 | + } |
| 117 | +} |
| 118 | + |
| 119 | +dependencies { |
| 120 | + embedded(project(":elasticsearch-hadoop-mr")) |
| 121 | + |
| 122 | + api("org.scala-lang:scala-library:$scalaVersion") |
| 123 | + api("org.scala-lang:scala-reflect:$scalaVersion") |
| 124 | + api("org.apache.spark:spark-core_${project.ext.scalaMajorVersion}:$sparkVersion") { |
| 125 | + exclude group: 'javax.servlet' |
| 126 | + exclude group: 'org.apache.hadoop' |
| 127 | + } |
| 128 | + |
| 129 | + implementation("org.apache.spark:spark-sql_${project.ext.scalaMajorVersion}:$sparkVersion") { |
| 130 | + exclude group: 'org.apache.hadoop' |
| 131 | + } |
| 132 | + implementation("org.apache.spark:spark-streaming_${project.ext.scalaMajorVersion}:$sparkVersion") { |
| 133 | + exclude group: 'org.apache.hadoop' |
| 134 | + } |
| 135 | + implementation("org.slf4j:slf4j-api:1.7.6") { |
| 136 | + because 'spark exposes slf4j components in traits that we need to extend' |
| 137 | + } |
| 138 | + implementation("commons-logging:commons-logging:1.1.1") |
| 139 | + implementation("javax.xml.bind:jaxb-api:2.3.1") |
| 140 | + implementation("org.apache.spark:spark-catalyst_${project.ext.scalaMajorVersion}:$sparkVersion") |
| 141 | + implementation("org.apache.spark:spark-yarn_${project.ext.scalaMajorVersion}:$sparkVersion") { |
| 142 | + exclude group: 'org.apache.hadoop' |
| 143 | + } |
| 144 | + |
| 145 | + // Scala compiler needs these for arcane reasons, but they are not used in the api nor the runtime |
| 146 | + compileOnly("com.fasterxml.jackson.core:jackson-annotations:2.6.7") |
| 147 | + compileOnly("org.json4s:json4s-jackson_${project.ext.scalaMajorVersion}:3.2.11") |
| 148 | + compileOnly("org.json4s:json4s-ast_${project.ext.scalaMajorVersion}:3.2.11") |
| 149 | + compileOnly("org.apache.spark:spark-tags_${project.ext.scalaMajorVersion}:$sparkVersion") |
| 150 | + |
| 151 | + if ('2.10' == scalaMajorVersion) { |
| 152 | + implementation("org.apache.spark:spark-unsafe_${project.ext.scalaMajorVersion}:$sparkVersion") |
| 153 | + implementation("org.apache.avro:avro:1.7.7") |
| 154 | + implementation("log4j:log4j:1.2.17") |
| 155 | + implementation("com.google.code.findbugs:jsr305:2.0.1") |
| 156 | + implementation("org.json4s:json4s-ast_2.10:3.2.10") |
| 157 | + implementation("com.esotericsoftware.kryo:kryo:2.21") |
| 158 | + compileOnly("org.apache.hadoop:hadoop-annotations:${project.ext.hadoopVersion}") |
| 159 | + compileOnly("org.codehaus.jackson:jackson-core-asl:${project.ext.jacksonVersion}") |
| 160 | + compileOnly("org.codehaus.jackson:jackson-mapper-asl:${project.ext.jacksonVersion}") |
| 161 | + } |
| 162 | + |
| 163 | + testImplementation(project(":test:shared")) |
| 164 | + testImplementation(project.ext.hadoopClient) |
| 165 | + testImplementation("org.elasticsearch:securemock:1.2") |
| 166 | + testImplementation("org.apache.spark:spark-core_${project.ext.scalaMajorVersion}:$sparkVersion") { |
| 167 | + exclude group: 'javax.servlet' |
| 168 | + exclude group: 'org.apache.hadoop' |
| 169 | + } |
| 170 | + testImplementation("org.apache.spark:spark-sql_${project.ext.scalaMajorVersion}:$sparkVersion") { |
| 171 | + exclude group: 'org.apache.hadoop' |
| 172 | + } |
| 173 | + |
| 174 | + itestImplementation(project(":test:shared")) |
| 175 | + itestImplementation("org.apache.spark:spark-yarn_${project.ext.scalaMajorVersion}:$sparkVersion") { |
| 176 | + exclude group: 'org.apache.hadoop' |
| 177 | + } |
| 178 | + itestImplementation("org.apache.spark:spark-streaming_${project.ext.scalaMajorVersion}:$sparkVersion") { |
| 179 | + exclude group: 'org.apache.hadoop' |
| 180 | + } |
| 181 | + |
| 182 | + additionalSources(project(":elasticsearch-hadoop-mr")) |
| 183 | + javadocSources(project(":elasticsearch-hadoop-mr")) |
| 184 | +} |
| 185 | + |
| 186 | +// Export generated Java code from the genjavadoc compiler plugin |
| 187 | +artifacts { |
| 188 | + javadocElements(project.file("$buildDir/generated/java")) { |
| 189 | + builtBy compileScala |
| 190 | + } |
| 191 | +} |
| 192 | + |
| 193 | +jar { |
| 194 | + dependsOn(project.configurations.embedded) |
| 195 | + from(project.configurations.embedded.collect { it.isDirectory() ? it : zipTree(it)}) { |
| 196 | + include "org/elasticsearch/hadoop/**" |
| 197 | + include "esh-build.properties" |
| 198 | + include "META-INF/services/*" |
| 199 | + } |
| 200 | +} |
| 201 | + |
| 202 | +javadoc { |
| 203 | + dependsOn compileScala |
| 204 | + source += "$buildDir/generated/java" |
| 205 | +} |
| 206 | + |
| 207 | +scaladoc { |
| 208 | + title = "${rootProject.description} ${version} API" |
| 209 | +} |
| 210 | + |
| 211 | +if (project.ext.scalaMajorVersion != '2.10') { |
| 212 | + tasks.withType(ScalaCompile) { |
| 213 | + scalaCompileOptions.with { |
| 214 | + additionalParameters = [ |
| 215 | + "-Xplugin:" + configurations.scalaCompilerPlugin.asPath, |
| 216 | + "-P:genjavadoc:out=$buildDir/generated/java".toString() |
| 217 | + ] |
| 218 | + } |
| 219 | + } |
| 220 | +} |
0 commit comments