Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions jenkins/Dockerfile.integration.centos7
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,3 @@ RUN python -m pip install pytest sre_yield

# Set ENV for mvn
ENV JAVA_HOME "/usr/lib/jvm/java-1.8.0-openjdk"

RUN groupadd --gid 30 dip
RUN adduser --uid 26576 --gid 30 --shell /bin/bash svcngcc
USER svcngcc
3 changes: 0 additions & 3 deletions jenkins/Dockerfile.ubuntu16
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,3 @@ RUN add-apt-repository ppa:deadsnakes/ppa && \

RUN ln -s /usr/bin/python3.6 /usr/bin/python
RUN python -m pip install pytest sre_yield requests

RUN adduser --uid 26576 --gid 30 --shell /bin/bash svcngcc
USER svcngcc
152 changes: 152 additions & 0 deletions jenkins/Jenkinsfile.blossom
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
#!/usr/local/env groovy
/*
* Copyright (c) 2019-2020, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/**
*
* Jenkinsfile for building rapids-plugin
*
*/

@Library('shared-libs') _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"
def CUDA_DOCKER="cuda10-1"
def CUDA_NAME=CUDA_DOCKER.replace("-", ".")
def IMAGE_NAME="${ArtifactoryConstants.ARTIFACTORY_NAME}/sw-spark-docker/plugin:dev-ubuntu16-$CUDA_NAME"

def podConf="""
apiVersion: v1
kind: Pod
spec:
containers:
- name: "$CUDA_DOCKER"
image: xgbci/rapids-plugin-build:ubuntu16-cuda10.1
resources:
limits:
nvidia.com/gpu: 1
restartPolicy: Never
backoffLimit: 4
tty: true

- name: docker
image: docker:19.03.1
command:
- sleep
args:
- 99d
env:
- name: DOCKER_HOST
value: tcp://localhost:2375
- name: docker-daemon
image: docker:19.03.1-dind
securityContext:
privileged: true
env:
- name: DOCKER_TLS_CERTDIR
value: ""

nodeSelector:
kubernetes.io/os: linux
"""
println podConf

pipeline {
agent {
kubernetes {
label 'plugin-dev'
cloud 'sc-ipp-blossom-prod'
yaml "${podConf}"
}
}

options {
ansiColor('xterm')
timeout(time: 120, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
}

parameters {
string(name: 'REF', defaultValue: '\${sha1}', description: 'Commit to build')
}

environment {
JENKINS_ROOT = 'jenkins'
BUILD_SCRIPT = '$JENKINS_ROOT/spark-premerge-build.sh'
MVN_URM_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf'
URM_URL = "${urmUrl}"
}

stages {
stage('Build docker image') {
steps {
script {
//Check if a PR has been committed using git signoff
if (!isSignedOff()) {
error "Signed-off-by check FAILED"
}

container('docker') {
def CUDA_VER="$CUDA_NAME" - "cuda"
sh "docker pull $IMAGE_NAME || true"
// Speed up Docker building via '--cache-from $IMAGE_NAME'
def buildImage=docker.build(IMAGE_NAME,
"-f jenkins/Dockerfile.ubuntu16 --build-arg CUDA_VER=$CUDA_VER --cache-from $IMAGE_NAME -t $IMAGE_NAME .")
}
}
}
}
stage('Build on Ubuntu16 CUDA10.1') {
steps {
script {
container('cuda10-1') {
sh "bash $BUILD_SCRIPT"
step([$class: 'JacocoPublisher',
execPattern: '**/target/jacoco.exec',
classPattern: 'target/jacoco_classes/',
sourcePattern: 'sql-plugin/src/main/java/,sql-plugin/src/main/scala/,shuffle-plugin/src/main/scala/',
sourceInclusionPattern: '**/*.java,**/*.scala'
])
}
}
}
}
} // end of stages
} // end of pipeline

boolean isSignedOff() {
/*
def target_rev = sh(returnStdout: true,
script: "git rev-parse refs/remotes/origin/${ghprbTargetBranch}").trim()
def revs_arr = sh(returnStdout: true,
script: "git log ${target_rev}..${ghprbActualCommit} --pretty=format:%h").split()

def signed_off = false
for( String commit : revs_arr ) {
def signed_log = sh(returnStdout: true,
script: "git show ${commit} --shortstat | grep 'Signed-off-by' || true")
echo "commit: ${commit}, signed_log: ${signed_log}"
// Find `Signed-off-by` comment in one of the commits
if (signed_log?.trim()) {
signed_off = true
break;
}
}

return signed_off
*/ return true
}
172 changes: 172 additions & 0 deletions jenkins/Jenkinsfile.it
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
#!/usr/local/env groovy
/*
* Copyright (c) 2019-2020, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/**
*
* Jenkins file for running spark3.0 integration tests
*
*/

@Library(['shared-libs', 'spark-jenkins-shared-lib']) _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"
def CUDA_DOCKER="cuda10-1"
def CUDA_NAME=CUDA_DOCKER.replace("-", ".")
def IMAGE_NAME="${ArtifactoryConstants.ARTIFACTORY_NAME}/sw-spark-docker/plugin:it-centos7-$CUDA_NAME"

def podConf="""
apiVersion: v1
kind: Pod
spec:
containers:
- name: "$CUDA_DOCKER"
image: urm.nvidia.com/sw-spark-docker/plugin:it-ubuntu16-cuda10.1
resources:
limits:
nvidia.com/gpu: 1
restartPolicy: Never
backoffLimit: 4
tty: true

- name: docker
image: docker:19.03.1
command:
- sleep
args:
- 99d
env:
- name: DOCKER_HOST
value: tcp://localhost:2375
- name: docker-daemon
image: docker:19.03.1-dind
securityContext:
privileged: true
env:
- name: DOCKER_TLS_CERTDIR
value: ""

nodeSelector:
kubernetes.io/os: linux
"""
println podConf

pipeline {
agent {
kubernetes {
label 'plugin-it'
cloud 'sc-ipp-blossom-prod'
yaml "${podConf}"
}
}

options {
ansiColor('xterm')
timestamps()
timeout(time: 240, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
}

parameters {
string(name: 'OVERWRITE_PARAMS', defaultValue: '',
description: 'parameters format XXX_VER=xxx;YYY_VER=yyy;')
string(name: 'REF', defaultValue: 'branch-0.2', description: 'Commit to build')
}

environment {
JENKINS_ROOT = 'jenkins'
TEST_SCRIPT = '$JENKINS_ROOT/spark-tests.sh'
LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf'
URM_CREDS = credentials("svcngcc_artifactory")
ARTIFACTORY_NAME = "${ArtifactoryConstants.ARTIFACTORY_NAME}"
URM_URL = "${urmUrl}"
MVN_URM_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
}

stages {
stage('Update docker image') {
steps {
script {
container('docker') {
def CUDA_VER="$CUDA_NAME" - "cuda"
sh "docker pull $IMAGE_NAME || true"
def urmImageID=sh(returnStdout: true, script: "docker inspect -f {{'.Id'}} $IMAGE_NAME")
// Speed up Docker building via '--cache-from $IMAGE_NAME'
def buildImage=docker.build(IMAGE_NAME,
"-f jenkins/Dockerfile.integration.centos7 --build-arg CUDA_VER=$CUDA_VER \
--build-arg URM_URL=$URM_URL --cache-from $IMAGE_NAME -t $IMAGE_NAME .")
def buildImageID=sh(returnStdout: true, script: "docker inspect -f {{'.Id'}} $IMAGE_NAME")
if (! buildImageID.equals(urmImageID)) {
echo "Dockerfile updated, upload docker image to URM"
uploadDocker(IMAGE_NAME)
}
}
}
}
}
stage('IT on centos7 CUDA10.1') {
steps {
script {
container('cuda10-1') {
echo "Running integration tests on centos7 $CUDA_NAME"
sh "bash $TEST_SCRIPT"
}
}
}
}
} // end of stages
post {
always {
script {
def status = "failed"
if (currentBuild.currentResult == "SUCCESS") {
status = "success"
slack("#rapidsai-spark-cicd", "Success", color: "#33CC33")
}
else {
slack("#rapidsai-spark-cicd", "Failed", color: "#FF0000")
}
}
echo 'Pipeline finished!'
}
}
} // end of pipeline

void uploadDocker(String IMAGE_NAME) {
/*
def DOCKER_CMD="docker --config $WORKSPACE/.docker"
sh """
echo $URM_CREDS_PSW | $DOCKER_CMD login $ARTIFACTORY_NAME -u $URM_CREDS_USR --password-stdin
$DOCKER_CMD push $IMAGE_NAME
$DOCKER_CMD logout $ARTIFACTORY_NAME
"""
*/
}

void slack(Map params = [:], String channel, String message) {
/*
Map defaultParams = [
color: "#000000",
baseUrl: "${SparkConstants.SLACK_API_ENDPOINT}",
tokenCredentialId: "slack_token"
]

params["channel"] = channel
params["message"] = "${BUILD_URL}\n" + message

slackSend(defaultParams << params)
*/
}
2 changes: 2 additions & 0 deletions jenkins/Jenkinsfile.premerge
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
/**
*
* Jenkinsfile for building rapids-plugin
* DO NOT REVIEW: TEST PR ONLY
* 1
*
*/
@Library('shared-libs') _
Expand Down
4 changes: 2 additions & 2 deletions jenkins/spark-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ tar zxf $SPARK_HOME.tgz -C $ARTF_ROOT && \
PARQUET_PERF="$WORKSPACE/integration_tests/src/test/resources/parquet_perf"
PARQUET_ACQ="$WORKSPACE/integration_tests/src/test/resources/parquet_acq"
OUTPUT="$WORKSPACE/output"
BASE_SPARK_SUBMIT_ARGS="--master spark://$HOSTNAME:7077 --executor-memory 32G \
BASE_SPARK_SUBMIT_ARGS="--master spark://$HOSTNAME:7077 --executor-memory 20G \
--conf spark.sql.shuffle.partitions=12 \
--conf spark.driver.extraClassPath=${CUDF_JAR}:${RAPIDS_PLUGIN_JAR} \
--conf spark.executor.extraClassPath=${CUDF_JAR}:${RAPIDS_PLUGIN_JAR} \
Expand All @@ -84,4 +84,4 @@ jps
echo "----------------------------START TEST------------------------------------"
rm -rf $OUTPUT
spark-submit $BASE_SPARK_SUBMIT_ARGS $MORTGAGE_SPARK_SUBMIT_ARGS $TEST_PARAMS
cd $RAPIDS_INT_TESTS_HOME && spark-submit $BASE_SPARK_SUBMIT_ARGS --jars $RAPIDS_TEST_JAR ./runtests.py -v -rfExXs --std_input_path="$WORKSPACE/integration_tests/src/test/resources/"
cd $RAPIDS_INT_TESTS_HOME && spark-submit $BASE_SPARK_SUBMIT_ARGS --jars $RAPIDS_TEST_JAR ./runtests.py -v src/main/python/window_function_test.py -rfExXs --std_input_path="$WORKSPACE/integration_tests/src/test/resources/"
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ abstract class RapidsShuffleInternalManagerBase(conf: SparkConf, isDriver: Boole
if (!GpuShuffleEnv.isRapidsShuffleEnabled) {
fallThroughReasons += "external shuffle is enabled"
}
if (conf.get(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key).toBoolean) {
if (SQLConf.get.adaptiveExecutionEnabled) {
fallThroughReasons += "adaptive query execution is enabled"
}
if (fallThroughReasons.nonEmpty) {
Expand Down