File tree 5 files changed +11
-10
lines changed
5 files changed +11
-10
lines changed Original file line number Diff line number Diff line change 1
1
#! /bin/bash
2
2
#
3
- # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
3
+ # Copyright (c) 2023-2024 , NVIDIA CORPORATION. All rights reserved.
4
4
#
5
5
# Licensed under the Apache License, Version 2.0 (the "License");
6
6
# you may not use this file except in compliance with the License.
20
20
21
21
set -e
22
22
23
- spark_version=${1:- " 3.1.1 " }
23
+ spark_version=${1:- " 3.2.0 " }
24
24
scala_version=${2:- " 2.12" }
25
25
# Split spark version into base version (e.g. 3.3.0) and suffix (e.g. SNAPSHOT)
26
26
PRE_IFS=$IFS
Original file line number Diff line number Diff line change @@ -33,8 +33,9 @@ export M2DIR=${M2DIR:-"$WORKSPACE/.m2"}
33
33
MVN=" mvn -Dmaven.wagon.http.retryHandler.count=3 -DretryFailedDeploymentCount=3 ${MVN_OPT} -Psource-javadoc"
34
34
35
35
DIST_PL=" dist"
36
+ # # Get the default SPARK_VER from jenkins/version-def.sh
36
37
function mvnEval {
37
- $MVN help:evaluate -q -pl $DIST_PL $MVN_URM_MIRROR -Prelease311 -Dmaven.repo.local=$M2DIR -DforceStdout -Dexpression=$1
38
+ $MVN help:evaluate -q -pl $DIST_PL $MVN_URM_MIRROR -Prelease ${SPARK_VER // . / } -Dmaven.repo.local=$M2DIR -DforceStdout -Dexpression=$1
38
39
}
39
40
40
41
ART_ID=$( mvnEval project.artifactId)
@@ -176,7 +177,7 @@ distWithReducedPom "install"
176
177
if [[ $SKIP_DEPLOY != ' true' ]]; then
177
178
distWithReducedPom " deploy"
178
179
179
- # this deploys selected submodules that is unconditionally built with Spark 3.1.1
180
+ # this deploys selected submodules that is unconditionally built with $SPARK_VER
180
181
$MVN -B deploy -pl $DEPLOY_SUBMODULES \
181
182
-Dbuildver=$SPARK_BASE_SHIM_VERSION \
182
183
-DskipTests \
Original file line number Diff line number Diff line change @@ -83,12 +83,12 @@ mvn_verify() {
83
83
# The jacoco coverage should have been collected, but because of how the shade plugin
84
84
# works and jacoco we need to clean some things up so jacoco will only report for the
85
85
# things we care about
86
- SPK_VER=${JACOCO_SPARK_VER:- " 311 " }
86
+ SPK_VER=${JACOCO_SPARK_VER:- " 320 " }
87
87
mkdir -p target/jacoco_classes/
88
88
FILE=$( ls dist/target/rapids-4-spark_2.12-* .jar | grep -v test | xargs readlink -f)
89
89
UDF_JAR=$( ls ./udf-compiler/target/spark${SPK_VER} /rapids-4-spark-udf_2.12-* -spark${SPK_VER} .jar | grep -v test | xargs readlink -f)
90
90
pushd target/jacoco_classes/
91
- jar xf $FILE com org rapids spark-shared " spark${JACOCO_SPARK_VER:- 311 } /"
91
+ jar xf $FILE com org rapids spark3xx-common " spark${JACOCO_SPARK_VER:- 320 } /"
92
92
# extract the .class files in udf jar and replace the existing ones in spark3xx-ommon and spark$SPK_VER
93
93
# because the class files in udf jar will be modified in aggregator's shade phase
94
94
jar xf " $UDF_JAR " com/nvidia/spark/udf
@@ -222,7 +222,7 @@ ci_scala213() {
222
222
}
223
223
224
224
prepare_spark () {
225
- spark_ver=${1:- ' 3.1.1 ' }
225
+ spark_ver=${1:- ' 3.2.0 ' }
226
226
scala_ver=${2:- ' 2.12' }
227
227
228
228
ARTF_ROOT=" $( pwd) /.download"
Original file line number Diff line number Diff line change @@ -59,7 +59,7 @@ $MVN_GET_CMD -DremoteRepositories=$PROJECT_TEST_REPO \
59
59
-DgroupId=com.nvidia -DartifactId=rapids-4-spark-integration-tests_$SCALA_BINARY_VER -Dversion=$PROJECT_TEST_VER -Dclassifier=pytest -Dpackaging=tar.gz
60
60
61
61
RAPIDS_INT_TESTS_HOME=" $ARTF_ROOT /integration_tests/"
62
- # The version of pytest.tar.gz that is uploaded is the one built against spark311 but its being pushed without classifier for now
62
+ # The version of pytest.tar.gz that is uploaded is the one built against spark320 but its being pushed without classifier for now
63
63
RAPIDS_INT_TESTS_TGZ=" $ARTF_ROOT /rapids-4-spark-integration-tests_${SCALA_BINARY_VER} -$PROJECT_TEST_VER -pytest.tar.gz"
64
64
65
65
tmp_info=${TMP_INFO_FILE:- ' /tmp/artifacts-build.info' }
Original file line number Diff line number Diff line change @@ -32,7 +32,7 @@ CUDA_CLASSIFIER=${CUDA_CLASSIFIER:-"cuda11"}
32
32
CLASSIFIER=${CLASSIFIER:- " $CUDA_CLASSIFIER " } # default as CUDA_CLASSIFIER for compatibility
33
33
PROJECT_VER=${PROJECT_VER:- " 24.08.0-SNAPSHOT" }
34
34
PROJECT_TEST_VER=${PROJECT_TEST_VER:- " 24.08.0-SNAPSHOT" }
35
- SPARK_VER=${SPARK_VER:- " 3.1.1 " }
35
+ SPARK_VER=${SPARK_VER:- " 3.2.0 " }
36
36
SPARK_VER_213=${SPARK_VER_213:- " 3.3.0" }
37
37
# Make a best attempt to set the default value for the shuffle shim.
38
38
# Note that SPARK_VER for non-Apache Spark flavors (i.e. databricks,
85
85
# PHASE_TYPE: CICD phase at which the script is called, to specify Spark shim versions.
86
86
# regular: noSnapshots + snapshots
87
87
# pre-release: noSnapshots only
88
- # *: shim versions to build, e.g., PHASE_TYPE="311 321"
88
+ # *: shim versions to build, e.g., PHASE_TYPE="320 321"
89
89
PHASE_TYPE=${PHASE_TYPE:- " regular" }
90
90
case $PHASE_TYPE in
91
91
# SPARK_SHIM_VERSIONS will be used for nightly artifact build
You can’t perform that action at this time.
0 commit comments