Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
88 changes: 0 additions & 88 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,9 @@
# Starts with a vanilla Opal docker composition, installs dsBase
# and dsBaseClient (as well as dependencies - including a fully functional
# Opal server).
# Does checks and tests then saves results to testStatus repo.
#
# Inside the root directory $(Pipeline.Workspace) will be a file tree like:
# /dsBaseClient <- Checked out version of datashield/dsBaseClient
# /testStatus <- Checked out version of datashield/testStatus
# /logs <- Where results of tests and lots are collated
#
# As of May 2020 this takes ~ 70 mins to run.
Expand Down Expand Up @@ -36,18 +34,6 @@ variables:
_r_check_system_clock_: 0


#########################################################################################
# Need to define all the GH repos and their access tokens, see:
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml
resources:
repositories:
- repository: testStatusRepo
type: github
endpoint: datashield-testing
name: datashield/testStatus
ref: master


#########################################################################################
# When and under what condition to run the pipeline.
schedules:
Expand Down Expand Up @@ -82,11 +68,6 @@ jobs:
- checkout: self
path: 'dsBaseClient'

- checkout: testStatusRepo
path: 'testStatus'
persistCredentials: true
condition: and(eq(variables['Build.Repository.Name'], 'datashield/dsBaseClient'), ne(variables['Build.Reason'], 'PullRequest'))


#####################################################################################
# The MySQL install that comes with the VM doesn't seem compatable with our set up
Expand Down Expand Up @@ -524,75 +505,6 @@ jobs:
displayName: 'Down Opal Docker Composition'
condition: succeeded()

#####################################################################################
# Windup phase
#####################################################################################

#####################################################################################
# Output some important version numbers to file. This gets added to the testStatus
# commit so it can be parsed and used on the status table.
- bash: |

echo 'branch:'$(branchName) >> $(datetime).txt
echo 'os:'$(lsb_release -ds) >> $(datetime).txt
echo 'R:'$(R --version | head -n 1) >> $(datetime).txt
echo 'opal:'$(opal system --opal localhost:8443 --user administrator --password "datashield_test&" --version) >> $(datetime).txt

workingDirectory: $(Pipeline.Workspace)/logs
displayName: 'Write versions to file'
condition: succeededOrFailed()


#####################################################################################
# Checkout the testStatus repo, add the results from here, push back to GH.
# TODO: Automatically pull in better email/name info from somewhere.
# TODO: More debug info in commit message
- bash: |

# Git needs some config set to be able to push to a repo.
git config --global user.email "you@example.com"
git config --global user.name "Azure pipeline"

# This repo is checked out in detatched head state, so reconnect it here.
git checkout master

# It is possible that other commits have been made to the testStatus repo since it
# was checked out. i.e. other pipeline runs might have finished.
git pull

# Make the directories if they dont already exist
mkdir --parents logs/$(projectName)/$(branchName)
mkdir --parents docs/$(projectName)/$(branchName)/latest

cp $(Pipeline.Workspace)/logs/coveragelist.csv logs/$(projectName)/$(branchName)/
cp $(Pipeline.Workspace)/logs/coveragelist.csv logs/$(projectName)/$(branchName)/$(datetime).csv

cp $(Pipeline.Workspace)/logs/test_results.xml logs/$(projectName)/$(branchName)/
cp $(Pipeline.Workspace)/logs/test_results.xml logs/$(projectName)/$(branchName)/$(datetime).xml

cp $(Pipeline.Workspace)/logs/$(datetime).txt logs/$(projectName)/$(branchName)/

# Run the script to parse the results and build the html pages.
# status.py JUnit_file.xml coverage_file.csv output_file.html local_repo_path remote_repo_name branch
source/status.py logs/$(projectName)/$(branchName)/$(datetime).xml logs/$(projectName)/$(branchName)/$(datetime).csv logs/$(projectName)/$(branchName)/$(datetime).txt status.html $(Pipeline.Workspace)/$(projectName) $(projectName) $(branchName)

cp status.html docs/$(projectName)/$(branchName)/latest/index.html
git add logs/$(projectName)/$(branchName)/coveragelist.csv
git add logs/$(projectName)/$(branchName)/test_results.xml
git add logs/$(projectName)/$(branchName)/$(datetime).xml
git add logs/$(projectName)/$(branchName)/$(datetime).csv
git add logs/$(projectName)/$(branchName)/$(datetime).txt
git add docs/$(projectName)/$(branchName)/latest/index.html

git commit -m "Azure auto test for $(projectName)/$(branchName) @ $(datetime)" -m "Debug info:\nProjectName:$(projectName)\nBranchName:$(branchName)\nDataTime:$(datetime)"
git push
exit 0

workingDirectory: $(Pipeline.Workspace)/testStatus
displayName: 'Parse test results'
condition: and(eq(variables['Build.Repository.Name'], 'datashield/dsBaseClient'), ne(variables['Build.Reason'], 'PullRequest'))


#####################################################################################
# Output the environment information to the console. This is useful for debugging.
# Always do this, even if some of the above has failed or the job has been cacelled.
Expand Down
2 changes: 1 addition & 1 deletion docker-compose_armadillo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ services:
hostname: armadillo
ports:
- 8080:8080
image: datashield/armadillo_citest:latest
image: datashield/armadillo_citest:5.9.4
environment:
LOGGING_CONFIG: 'classpath:logback-file.xml'
AUDIT_LOG_PATH: '/app/logs/audit.log'
Expand Down
2 changes: 1 addition & 1 deletion docker-compose_opal.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ services:
- MONGO_INITDB_ROOT_USERNAME=root
- MONGO_INITDB_ROOT_PASSWORD=foobar
rock:
image: datashield/rock-omicron-karma-permissive:devel
image: datashield/rock-lemon-donkey-permissive:draft
environment:
DEBUG: "FALSE"
26 changes: 13 additions & 13 deletions tests/testthat/perf_files/armadillo_azure-pipeline.csv
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
"refer_name","rate","lower_tolerance","upper_tolerance"
"conndisconn::perf::simple0","0.1275","0.5","2"
"ds.abs::perf::0","4.824","0.5","2"
"ds.asInteger::perf:0","4.366","0.5","2"
"ds.asList::perf:0","9.787","0.5","2"
"ds.asNumeric::perf:0","4.316","0.5","2"
"ds.assign::perf::0","8.055","0.5","2"
"ds.class::perf::combine:0","9.847","0.5","2"
"ds.colnames::perf:0","7.574","0.5","2"
"ds.exists::perf::combine:0","19.84","0.5","2"
"ds.length::perf::combine:0","19.58","0.5","2"
"ds.mean::perf::combine:0","19.66","0.5","2"
"ds.mean::perf::split:0","19.21","0.5","2"
"void::perf::void::0","41810.0","0.5","2"
"conndisconn::perf::simple0","0.1225","0.5","2"
"ds.abs::perf::0","5.820","0.5","2"
"ds.asInteger::perf:0","5.328","0.5","2"
"ds.asList::perf:0","11.71","0.5","2"
"ds.asNumeric::perf:0","5.268","0.5","2"
"ds.assign::perf::0","9.229","0.5","2"
"ds.class::perf::combine:0","10.84","0.5","2"
"ds.colnames::perf:0","7.993","0.5","2"
"ds.exists::perf::combine:0","21.68","0.5","2"
"ds.length::perf::combine:0","21.84","0.5","2"
"ds.mean::perf::combine:0","22.01","0.5","2"
"ds.mean::perf::split:0","22.22","0.5","2"
"void::perf::void::0","48520.0","0.5","2"
26 changes: 13 additions & 13 deletions tests/testthat/perf_files/armadillo_hp-laptop_quay.csv
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
"refer_name","rate","lower_tolerance","upper_tolerance"
"conndisconn::perf::simple0","0.06225260028207","0.5","2"
"ds.abs::perf::0","2.602332538044","0.5","2"
"ds.asInteger::perf:0","2.16574645464856","0.5","2"
"ds.asList::perf:0","5.125787987349","0.5","2"
"ds.asNumeric::perf:0","2.02140793909654","0.5","2"
"ds.assign::perf::0","4.25379741119624","0.5","2"
"ds.class::perf::combine:0","5.03264832363257","0.5","2"
"ds.colnames::perf:0","3.61308626946146","0.5","2"
"ds.exists::perf::combine:0","8.47538336211864","0.5","2"
"ds.length::perf::combine:0","9.49818979827918","0.5","2"
"ds.mean::perf::combine:0","9.66558925664494","0.5","2"
"ds.mean::perf::split:0","8.6304479562724","0.5","2"
"void::perf::void::0","19351.7615914652","0.5","2"
"conndisconn::perf::simple0","0.04918","0.5","2"
"ds.abs::perf::0","1.184","0.5","2"
"ds.asInteger::perf:0","1.297","0.5","2"
"ds.asList::perf:0","2.884","0.5","2"
"ds.asNumeric::perf:0","1.354","0.5","2"
"ds.assign::perf::0","2.745","0.5","2"
"ds.class::perf::combine:0","3.261","0.5","2"
"ds.colnames::perf:0","2.404","0.5","2"
"ds.exists::perf::combine:0","6.342","0.5","2"
"ds.length::perf::combine:0","7.835","0.5","2"
"ds.mean::perf::combine:0","8.127","0.5","2"
"ds.mean::perf::split:0","8.109","0.5","2"
"void::perf::void::0","20280.0","0.5","2"
26 changes: 13 additions & 13 deletions tests/testthat/perf_files/default_perf_profile.csv
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
"refer_name","rate","lower_tolerance","upper_tolerance"
"conndisconn::perf::simple0","0.2118","0.5","2"
"ds.abs::perf::0","1.718","0.5","2"
"ds.asInteger::perf:0","1.484","0.5","2"
"ds.asList::perf:0","3.050","0.5","2"
"ds.asNumeric::perf:0","1.508","0.5","2"
"ds.assign::perf::0","3.547","0.5","2"
"ds.class::perf::combine:0","3.061","0.5","2"
"ds.colnames::perf:0","2.456","0.5","2"
"ds.exists::perf::combine:0","6.904","0.5","2"
"ds.length::perf::combine:0","6.058","0.5","2"
"ds.mean::perf::combine:0","5.892","0.5","2"
"ds.mean::perf::split:0","6.881","0.5","2"
"void::perf::void::0","27070.0","0.5","2"
"conndisconn::perf::simple0","0.2725","0.5","2"
"ds.abs::perf::0","2.677","0.5","2"
"ds.asInteger::perf:0","2.294","0.5","2"
"ds.asList::perf:0","4.587","0.5","2"
"ds.asNumeric::perf:0","2.185","0.5","2"
"ds.assign::perf::0","5.490","0.5","2"
"ds.class::perf::combine:0","4.760","0.5","2"
"ds.colnames::perf:0","4.159","0.5","2"
"ds.exists::perf::combine:0","11.09","0.5","2"
"ds.length::perf::combine:0","9.479","0.5","2"
"ds.mean::perf::combine:0","9.650","0.5","2"
"ds.mean::perf::split:0","11.26","0.5","2"
"void::perf::void::0","46250.0","0.5","2"
26 changes: 13 additions & 13 deletions tests/testthat/perf_files/opal_azure-pipeline.csv
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
"refer_name","rate","lower_tolerance","upper_tolerance"
"conndisconn::perf::simple0","0.2118","0.5","2"
"ds.abs::perf::0","1.718","0.5","2"
"ds.asInteger::perf:0","1.484","0.5","2"
"ds.asList::perf:0","3.050","0.5","2"
"ds.asNumeric::perf:0","1.508","0.5","2"
"ds.assign::perf::0","3.547","0.5","2"
"ds.class::perf::combine:0","3.061","0.5","2"
"ds.colnames::perf:0","2.456","0.5","2"
"ds.exists::perf::combine:0","6.904","0.5","2"
"ds.length::perf::combine:0","6.058","0.5","2"
"ds.mean::perf::combine:0","5.892","0.5","2"
"ds.mean::perf::split:0","6.881","0.5","2"
"void::perf::void::0","27070.0","0.5","2"
"conndisconn::perf::simple0","0.2725","0.5","2"
"ds.abs::perf::0","2.677","0.5","2"
"ds.asInteger::perf:0","2.294","0.5","2"
"ds.asList::perf:0","4.587","0.5","2"
"ds.asNumeric::perf:0","2.185","0.5","2"
"ds.assign::perf::0","5.490","0.5","2"
"ds.class::perf::combine:0","4.760","0.5","2"
"ds.colnames::perf:0","4.159","0.5","2"
"ds.exists::perf::combine:0","11.09","0.5","2"
"ds.length::perf::combine:0","9.479","0.5","2"
"ds.mean::perf::combine:0","9.650","0.5","2"
"ds.mean::perf::split:0","11.26","0.5","2"
"void::perf::void::0","46250.0","0.5","2"
7 changes: 6 additions & 1 deletion tests/testthat/test-smk-ds.mice.R
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,12 @@ test_that("mice, second imputation", {
expect_length(newImp$sim2, 3)
expect_length(newImp$sim3, 3)
expect_true("character" %in% class(newImp$sim1$method))
expect_equal(as.character(newImp$sim1$method), c("pmm","norm","pmm","pmm","pmm","","","","","","polyreg"))
if (ds.test_env$driver == "OpalDriver")
expect_equal(as.character(newImp$sim1$method), c("pmm","norm","pmm","pmm","pmm","","","","","","polyreg"))
else if (ds.test_env$driver == "ArmadilloDriver")
expect_equal(as.character(newImp$sim1$method), c("pmm","norm","pmm","pmm","pmm","","","","","",""))
else
expect_equal(as.character(newImp$sim1$method), c("pmm","norm","pmm","pmm","pmm","","","","","","polyreg"))
expect_true("matrix" %in% class(newImp$sim1$predictorMatrix))
expect_true("array" %in% class(newImp$sim1$predictorMatrix))
expect_equal(as.numeric(newImp$sim1$predictorMatrix[,1]), c(0,1,1,1,1,1,1,1,1,1,1))
Expand Down