Skip to content

Commit 830409c

Browse files
committed
Drop spark-3.1.x support for spark-rapids
CI part for the issue: NVIDIA#10955 We'll drop spark-3.1.x support from branch-24.08, change the default spark version string to 3.2.0 This change needs to work together with the source code update for dropping spark-3.1.x support Will keep monitoring CI jobs status after all the related changes get merged Signed-off-by: Tim Liu <[email protected]>
1 parent e92cbd2 commit 830409c

File tree

5 files changed

+11
-10
lines changed

5 files changed

+11
-10
lines changed

jenkins/hadoop-def.sh

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#!/bin/bash
22
#
3-
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
3+
# Copyright (c) 2023-2024, NVIDIA CORPORATION. All rights reserved.
44
#
55
# Licensed under the Apache License, Version 2.0 (the "License");
66
# you may not use this file except in compliance with the License.
@@ -20,7 +20,7 @@
2020

2121
set -e
2222

23-
spark_version=${1:-"3.1.1"}
23+
spark_version=${1:-"3.2.0"}
2424
scala_version=${2:-"2.12"}
2525
# Split spark version into base version (e.g. 3.3.0) and suffix (e.g. SNAPSHOT)
2626
PRE_IFS=$IFS

jenkins/spark-nightly-build.sh

+3-2
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,9 @@ export M2DIR=${M2DIR:-"$WORKSPACE/.m2"}
3333
MVN="mvn -Dmaven.wagon.http.retryHandler.count=3 -DretryFailedDeploymentCount=3 ${MVN_OPT} -Psource-javadoc"
3434

3535
DIST_PL="dist"
36+
## Get the default SPARK_VER from jenkins/version-def.sh
3637
function mvnEval {
37-
$MVN help:evaluate -q -pl $DIST_PL $MVN_URM_MIRROR -Prelease311 -Dmaven.repo.local=$M2DIR -DforceStdout -Dexpression=$1
38+
$MVN help:evaluate -q -pl $DIST_PL $MVN_URM_MIRROR -Prelease${SPARK_VER//./} -Dmaven.repo.local=$M2DIR -DforceStdout -Dexpression=$1
3839
}
3940

4041
ART_ID=$(mvnEval project.artifactId)
@@ -176,7 +177,7 @@ distWithReducedPom "install"
176177
if [[ $SKIP_DEPLOY != 'true' ]]; then
177178
distWithReducedPom "deploy"
178179

179-
# this deploys selected submodules that is unconditionally built with Spark 3.1.1
180+
# this deploys selected submodules that is unconditionally built with $SPARK_VER
180181
$MVN -B deploy -pl $DEPLOY_SUBMODULES \
181182
-Dbuildver=$SPARK_BASE_SHIM_VERSION \
182183
-DskipTests \

jenkins/spark-premerge-build.sh

+3-3
Original file line numberDiff line numberDiff line change
@@ -83,12 +83,12 @@ mvn_verify() {
8383
# The jacoco coverage should have been collected, but because of how the shade plugin
8484
# works and jacoco we need to clean some things up so jacoco will only report for the
8585
# things we care about
86-
SPK_VER=${JACOCO_SPARK_VER:-"311"}
86+
SPK_VER=${JACOCO_SPARK_VER:-"320"}
8787
mkdir -p target/jacoco_classes/
8888
FILE=$(ls dist/target/rapids-4-spark_2.12-*.jar | grep -v test | xargs readlink -f)
8989
UDF_JAR=$(ls ./udf-compiler/target/spark${SPK_VER}/rapids-4-spark-udf_2.12-*-spark${SPK_VER}.jar | grep -v test | xargs readlink -f)
9090
pushd target/jacoco_classes/
91-
jar xf $FILE com org rapids spark-shared "spark${JACOCO_SPARK_VER:-311}/"
91+
jar xf $FILE com org rapids spark3xx-common "spark${JACOCO_SPARK_VER:-320}/"
9292
# extract the .class files in udf jar and replace the existing ones in spark3xx-ommon and spark$SPK_VER
9393
# because the class files in udf jar will be modified in aggregator's shade phase
9494
jar xf "$UDF_JAR" com/nvidia/spark/udf
@@ -222,7 +222,7 @@ ci_scala213() {
222222
}
223223

224224
prepare_spark() {
225-
spark_ver=${1:-'3.1.1'}
225+
spark_ver=${1:-'3.2.0'}
226226
scala_ver=${2:-'2.12'}
227227

228228
ARTF_ROOT="$(pwd)/.download"

jenkins/spark-tests.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ $MVN_GET_CMD -DremoteRepositories=$PROJECT_TEST_REPO \
5959
-DgroupId=com.nvidia -DartifactId=rapids-4-spark-integration-tests_$SCALA_BINARY_VER -Dversion=$PROJECT_TEST_VER -Dclassifier=pytest -Dpackaging=tar.gz
6060

6161
RAPIDS_INT_TESTS_HOME="$ARTF_ROOT/integration_tests/"
62-
# The version of pytest.tar.gz that is uploaded is the one built against spark311 but its being pushed without classifier for now
62+
# The version of pytest.tar.gz that is uploaded is the one built against spark320 but its being pushed without classifier for now
6363
RAPIDS_INT_TESTS_TGZ="$ARTF_ROOT/rapids-4-spark-integration-tests_${SCALA_BINARY_VER}-$PROJECT_TEST_VER-pytest.tar.gz"
6464

6565
tmp_info=${TMP_INFO_FILE:-'/tmp/artifacts-build.info'}

jenkins/version-def.sh

+2-2
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ CUDA_CLASSIFIER=${CUDA_CLASSIFIER:-"cuda11"}
3232
CLASSIFIER=${CLASSIFIER:-"$CUDA_CLASSIFIER"} # default as CUDA_CLASSIFIER for compatibility
3333
PROJECT_VER=${PROJECT_VER:-"24.08.0-SNAPSHOT"}
3434
PROJECT_TEST_VER=${PROJECT_TEST_VER:-"24.08.0-SNAPSHOT"}
35-
SPARK_VER=${SPARK_VER:-"3.1.1"}
35+
SPARK_VER=${SPARK_VER:-"3.2.0"}
3636
SPARK_VER_213=${SPARK_VER_213:-"3.3.0"}
3737
# Make a best attempt to set the default value for the shuffle shim.
3838
# Note that SPARK_VER for non-Apache Spark flavors (i.e. databricks,
@@ -85,7 +85,7 @@ fi
8585
# PHASE_TYPE: CICD phase at which the script is called, to specify Spark shim versions.
8686
# regular: noSnapshots + snapshots
8787
# pre-release: noSnapshots only
88-
# *: shim versions to build, e.g., PHASE_TYPE="311 321"
88+
# *: shim versions to build, e.g., PHASE_TYPE="320 321"
8989
PHASE_TYPE=${PHASE_TYPE:-"regular"}
9090
case $PHASE_TYPE in
9191
# SPARK_SHIM_VERSIONS will be used for nightly artifact build

0 commit comments

Comments
 (0)