-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathSparkPythonWeekly_config.xml
More file actions
112 lines (98 loc) · 3.88 KB
/
SparkPythonWeekly_config.xml
File metadata and controls
112 lines (98 loc) · 3.88 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
<?xml version='1.0' encoding='UTF-8'?>
<project>
<actions/>
<description></description>
<keepDependencies>false</keepDependencies>
<properties/>
<scm class="hudson.scm.NullSCM"/>
<canRoam>true</canRoam>
<disabled>false</disabled>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<triggers>
<jenkins.triggers.ReverseBuildTrigger>
<spec></spec>
<upstreamProjects>10.77.67.159-SparkBranch-2.1-Hadoop-2.7-without-Hive-OPENJDK-WeeklyBuild</upstreamProjects>
<threshold>
<name>SUCCESS</name>
<ordinal>0</ordinal>
<color>BLUE</color>
<completeBuild>true</completeBuild>
</threshold>
</jenkins.triggers.ReverseBuildTrigger>
</triggers>
<concurrentBuild>false</concurrentBuild>
<builders>
<hudson.tasks.Shell>
<command>#!/bin/bash
echo "Started Spark-PythonUnitTest"
workDir=/root/WeeklyValidation
IPbaremetal=10.77.67.159
userName=$(grep -Po '(?<=userName=).*' ${workDir}/baremetalMachines/${IPbaremetal})
passWord=$(grep -Po '(?<=passWord=).*' ${workDir}/baremetalMachines/${IPbaremetal})
ssh ${userName}@${IPbaremetal} /bin/bash <<'EOF'
echo "These commands will be run on: $( uname -a )"
echo "They are executed by: $( whoami )"
cd WeeklyValidation
workDirR=$(pwd)
export SNAPPY_HOME=/usr/lib
export LEVELDB_HOME=${workDirR}/leveldb
export LEVELDBJNI_HOME=${workDirR}/leveldbjni
export LIBRARY_PATH=${SNAPPY_HOME}
export C_INCLUDE_PATH=${LIBRARY_PATH}
export CPLUS_INCLUDE_PATH=${LIBRARY_PATH}
IPbaremetalR=10.77.67.159
cd baremetalMachines/
FunctionalTests=$(grep -Po '(?<=FunctionalTests=).*' ${IPbaremetalR})
PythonTests=$(grep -Po '(?<=PythonTests=).*' ${IPbaremetalR})
RTests=$(grep -Po '(?<=RTests=).*' ${IPbaremetalR})
jdk_val=$(grep -Po '(?<=JDK_VAL=).*' ${IPbaremetalR})
branchClone=$(grep -Po '(?<=branchClone=).*' ${IPbaremetalR})
hiveBuild=$(grep -Po '(?<=buildWithHive=).*' ${IPbaremetalR})
hadoopVer=$(grep -Po '(?<=hadoopVer=).*' ${IPbaremetalR})
cd ${workDirR}
if [ $hiveBuild == TRUE ]
then
hiveFlag=with
elif [ $hiveBuild == FALSE ]
then
hiveFlag=without
fi
echo -en '#Creating workspace directories for jobs\n'
mkdir -p ${workDirR}/workspace/${IPbaremetalR}-SparkBranch-${branchClone}-Hadoop-${hadoopVer}-${hiveFlag}-Hive-${jdk_val}-WeeklyPythonTests
cd ${workDirR}/workspace/${IPbaremetalR}-SparkBranch-${branchClone}-Hadoop-${hadoopVer}-${hiveFlag}-Hive-${jdk_val}-WeeklyBuild/spark
if [ ${jdk_val} = "OPENJDK" ]
then
if [ "$(. /etc/os-release; echo $NAME)" = "Ubuntu" ]; then
echo -en "Setting OpenJDK path and JAVA_HOME\n"
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-ppc64el
export PATH=$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$PATH
else
echo -en "Setting OpenJDK path and JAVA_HOME\n"
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk
export PATH=$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$PATH
fi
elif [ ${jdk_val} = "IBMJDK" ]
then
export JAVA_HOME=$(grep -Po '(?<=USER_INSTALL_DIR=).*' ${workDirR}/baremetalMachines/${IPbaremetalR})
export PATH=$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$PATH
fi
java -version
# Build Spark
# This enables yarn and hadoop profiles.
# We do not specify a yarn.version and assume it is same as hadoop.version
#
# A hadoop.version must be compatible with the hadoop profile. For this
# reason, we only build against version 2.6.0.
#
python/run-tests
#R/run-tests.sh
EOF
now=$(date +"%d-%m-%Y_%H:%M:%S")
cp /var/lib/jenkins/jobs/${JOB_NAME}/builds/${BUILD_NUMBER}/log ${workDir}/logs/${JOB_NAME}/${JOB_NAME}_${now}.log
</command>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>