diff --git a/.Rbuildignore b/.Rbuildignore index 4a02081ec..e05151613 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -3,10 +3,13 @@ ^\.Rproj\.user$ ^LICENSE.md$ ^azure-pipelines\.yml$ +^opal_azure-pipelines\.yml$ +^armadillo_azure-pipelines\.yml$ ^azure-pipelines_site\.pp$ ^azure-pipelines_site-dsdanger\.pp$ ^azure-pipelines_check\.Rout$ ^azure-pipelines_test\.Rout$ +^tests/docker/armadillo/standard/log/.gitkeep$ ^checkDocumentationUpdated\.sh$ ^docker-compose_armadillo\.yml$ ^docker-compose_opal\.yml$ @@ -14,6 +17,9 @@ ^R/secure.global.ranking.md$ ^_pkgdown\.yml$ ^docs$ -^dsBase_6.3.0.tar.gz$ -^dsBase_6.3.0-permissive.tar.gz$ -^dsDanger_6.3.0.tar.gz$ +^dsBase_6.3.4.tar.gz$ +^dsBase_6.3.4-permissive.tar.gz$ +^dsDanger_6.3.4.tar.gz$ +^\.circleci$ +^\.circleci/config\.yml$ +^\.github$ diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000..8018f4eb1 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,67 @@ +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. + +version: 2.1 + +jobs: + dsbaseclient: + docker: +# - image: cimg/base:current + - image: cimg/base:2024.11 + resource_class: small + steps: + - checkout + - setup_remote_docker: + docker_layout_caching: true + - run: | + echo "Building" + echo " Repo Name: " $CIRCLE_PROJECT_REPONAME + echo " Branch: " $CIRCLE_BRANCH + echo " Tag: " $CIRCLE_TAG + - run: + command: | + sudo apt-get install --no-install-recommends software-properties-common dirmngr + wget -qO- https://cloud.r-project.org/bin/linux/ubuntu/marutter_pubkey.asc | sudo tee -a /etc/apt/trusted.gpg.d/cran_ubuntu_key.asc + sudo add-apt-repository -y "deb https://cloud.r-project.org/bin/linux/ubuntu $(lsb_release -cs)-cran40/" + + sudo apt-get update -y + sudo apt-get install -y r-base-core cmake + - run: + command: | + sudo apt-get install -y libxml2-dev + sudo apt-get install -y libfontconfig1-dev + sudo apt-get install -y libudunits2-dev + sudo apt-get install -y libharfbuzz-dev + sudo apt-get install -y libfribidi-dev + sudo apt-get install -y libfreetype6-dev + sudo apt-get install -y libpng-dev + sudo apt-get install -y libtiff5-dev + sudo apt-get install -y libjpeg-dev + - run: + command: | + sudo Rscript -e "install.packages('devtools', dependencies=TRUE)" + sudo Rscript -e "install.packages('covr', dependencies=TRUE)" + + sudo Rscript -e "install.packages('fields', dependencies=TRUE)" + sudo Rscript -e "install.packages('metafor', dependencies=TRUE)" + sudo Rscript -e "install.packages('meta', dependencies=TRUE)" + sudo Rscript -e "install.packages('ggplot2', dependencies=TRUE)" + sudo Rscript -e "install.packages('gridExtra', dependencies=TRUE)" + sudo Rscript -e "install.packages('data.table', dependencies=TRUE)" + sudo Rscript -e "install.packages('methods', dependencies=TRUE)" + sudo Rscript -e "install.packages('dplyr', dependencies=TRUE)" + + sudo Rscript -e "install.packages('DSI', dependencies=TRUE)" + sudo Rscript -e "install.packages('DSOpal', dependencies=TRUE)" + sudo Rscript -e "install.packages('DSLite', dependencies=TRUE)" + sudo Rscript -e "install.packages('MolgenisAuth', dependencies=TRUE)" + sudo Rscript -e "install.packages('MolgenisArmadillo', dependencies=TRUE)" + sudo Rscript -e "install.packages('DSMolgenisArmadillo', dependencies=TRUE)" + sudo Rscript -e "install.packages('DescTools', dependencies=TRUE)" + sudo Rscript -e "install.packages('e1071', dependencies=TRUE)" + - run: + command: | + sudo Rscript -e 'library(covr); covr::codecov(token = "'$CODECOV_TOKEN'", quiet=FALSE)' +workflows: + build: + jobs: + - dsbaseclient diff --git a/.github/.gitignore b/.github/.gitignore new file mode 100644 index 000000000..2d19fc766 --- /dev/null +++ b/.github/.gitignore @@ -0,0 +1 @@ +*.html diff --git a/.github/workflows/dsBaseClient_test_suite.yaml b/.github/workflows/dsBaseClient_test_suite.yaml new file mode 100644 index 000000000..fb4ed9893 --- /dev/null +++ b/.github/workflows/dsBaseClient_test_suite.yaml @@ -0,0 +1,245 @@ +################################################################################ +# DataSHIELD GHA test suite - dsBaseClient +# Adapted from `armadillo_azure-pipelines.yml` by Roberto Villegas-Diaz +# +# Inside the root directory $(Pipeline.Workspace) will be a file tree like: +# /dsBaseClient <- Checked out version of datashield/dsBaseClient +# /dsBaseClient/logs <- Where results of tests and logs are collated +# /testStatus <- Checked out version of datashield/testStatus +# +# As of Sept. 2025 this takes ~ 95 mins to run. +################################################################################ +name: dsBaseClient tests' suite + +on: + push: + schedule: + - cron: '0 0 * * 0' # Weekly + - cron: '0 1 * * *' # Nightly + +jobs: + dsBaseClient_test_suite: + runs-on: ubuntu-latest + timeout-minutes: 120 + permissions: + contents: read + + # These should all be constant, except TEST_FILTER. This can be used to test + # subsets of test files in the testthat directory. Options are like: + # '*' <- Run all tests. + # 'asNumericDS*' <- Run all asNumericDS tests, i.e. all the arg, etc. tests. + # '*_smk_*' <- Run all the smoke tests for all functions. + env: + TEST_FILTER: '_-|datachk-|smk-|arg-|disc-|perf-|smk_expt-|expt-|math-' + _r_check_system_clock_: 0 + WORKFLOW_ID: ${{ github.run_id }}-${{ github.run_attempt }} + PROJECT_NAME: dsBaseClient + BRANCH_NAME: ${{ github.head_ref || github.ref_name }} + REPO_OWNER: ${{ github.repository_owner }} + R_KEEP_PKG_SOURCE: yes + GITHUB_TOKEN: ${{ github.token || 'placeholder-token' }} + + steps: + - name: Checkout dsBaseClient + uses: actions/checkout@v4 + with: + path: dsBaseClient + + - name: Checkout testStatus + if: ${{ github.actor != 'nektos/act' }} # for local deployment only + uses: actions/checkout@v4 + with: + repository: ${{ env.REPO_OWNER }}/testStatus + ref: master + path: testStatus + persist-credentials: false + token: ${{ env.GITHUB_TOKEN }} + + - name: Uninstall default MySQL + run: | + curl https://bazel.build/bazel-release.pub.gpg | sudo apt-key add - + sudo service mysql stop || true + sudo apt-get update + sudo apt-get remove --purge mysql-client mysql-server mysql-common -y + sudo apt-get autoremove -y + sudo apt-get autoclean -y + sudo rm -rf /var/lib/mysql/ + + - uses: r-lib/actions/setup-pandoc@v2 + + - uses: r-lib/actions/setup-r@v2 + with: + r-version: release + http-user-agent: release + use-public-rspm: true + + - name: Install R and dependencies + run: | + sudo apt-get install --no-install-recommends software-properties-common dirmngr -y + wget -qO- https://cloud.r-project.org/bin/linux/ubuntu/marutter_pubkey.asc | sudo tee -a /etc/apt/trusted.gpg.d/cran_ubuntu_key.asc + sudo add-apt-repository "deb https://cloud.r-project.org/bin/linux/ubuntu $(lsb_release -cs)-cran40/" + sudo apt-get update -qq + sudo apt-get upgrade -y + sudo apt-get install -qq libxml2-dev libcurl4-openssl-dev libssl-dev libgsl-dev libgit2-dev r-base -y + sudo apt-get install -qq libharfbuzz-dev libfribidi-dev libmagick++-dev xml-twig-tools -y + sudo R -q -e "install.packages(c('devtools','covr','fields','meta','metafor','ggplot2','gridExtra','data.table','DSI','DSOpal','DSLite','MolgenisAuth','MolgenisArmadillo','DSMolgenisArmadillo','DescTools','e1071'), repos='https://cloud.r-project.org')" + sudo R -q -e "devtools::install_github(repo='datashield/dsDangerClient', ref=Sys.getenv('BRANCH_NAME'))" + + - uses: r-lib/actions/setup-r-dependencies@v2 + with: + dependencies: 'c("Imports")' + extra-packages: | + any::rcmdcheck + cran::devtools + cran::git2r + cran::RCurl + cran::readr + cran::magrittr + cran::xml2 + cran::purrr + cran::dplyr + cran::stringr + cran::tidyr + cran::quarto + cran::knitr + cran::kableExtra + cran::rmarkdown + cran::downlit + needs: check + + - name: Check manual updated + run: | + orig_sum=$(find man -type f | sort -u | xargs cat | md5sum) + R -q -e "devtools::document()" + new_sum=$(find man -type f | sort -u | xargs cat | md5sum) + if [ "$orig_sum" != "$new_sum" ]; then + echo "Your committed man/*.Rd files are out of sync with the R headers." + exit 1 + fi + working-directory: dsBaseClient + continue-on-error: true + + - name: Devtools checks + run: | + R -q -e "devtools::check(args = c('--no-examples', '--no-tests'))" | tee azure-pipelines_check.Rout + grep --quiet "^0 errors" azure-pipelines_check.Rout && grep --quiet " 0 warnings" azure-pipelines_check.Rout && grep --quiet " 0 notes" azure-pipelines_check.Rout + working-directory: dsBaseClient + continue-on-error: true + + - name: Start Armadillo docker-compose + run: docker compose -f docker-compose_armadillo.yml up -d --build + working-directory: dsBaseClient + + - name: Install test datasets + run: | + sleep 60 + R -q -f "molgenis_armadillo-upload_testing_datasets.R" + working-directory: dsBaseClient/tests/testthat/data_files + + - name: Install dsBase to Armadillo + run: | + curl -u admin:admin -X GET http://localhost:8080/packages + curl -u admin:admin -H 'Content-Type: multipart/form-data' -F "file=@dsBase_6.3.4-permissive.tar.gz" -X POST http://localhost:8080/install-package + sleep 60 + docker restart dsbaseclient-armadillo-1 + sleep 30 + curl -u admin:admin -X POST http://localhost:8080/whitelist/dsBase + working-directory: dsBaseClient + + - name: Run tests with coverage & JUnit report + run: | + mkdir -p logs + R -q -e "devtools::reload();" + R -q -e ' + write.csv( + covr::coverage_to_list( + covr::package_coverage( + type = c("none"), + code = c('"'"' + output_file <- file("test_console_output.txt"); + sink(output_file); + sink(output_file, type = "message"); + junit_rep <- testthat::JunitReporter$new(file = file.path(getwd(), "test_results.xml")); + progress_rep <- testthat::ProgressReporter$new(max_failures = 999999); + multi_rep <- testthat::MultiReporter$new(reporters = list(progress_rep, junit_rep)); + options("datashield.return_errors" = FALSE, "default_driver" = "ArmadilloDriver"); + testthat::test_package("${{ env.PROJECT_NAME }}", filter = "${{ env.TEST_FILTER }}", reporter = multi_rep, stop_on_failure = FALSE)'"'"' + ) + ) + ), + "coveragelist.csv" + )' + + mv coveragelist.csv logs/ + mv test_* logs/ + working-directory: dsBaseClient + + - name: Check for JUnit errors + run: | + issue_count=$(sed 's/failures="0" errors="0"//' test_results.xml | grep -c errors= || true) + echo "Number of testsuites with issues: $issue_count" + sed 's/failures="0" errors="0"//' test_results.xml | grep errors= > issues.log || true + cat issues.log || true + # continue with workflow even when some tests fail + exit 0 + working-directory: dsBaseClient/logs + + - name: Write versions to file + run: | + echo "branch:${{ env.BRANCH_NAME }}" > ${{ env.WORKFLOW_ID }}.txt + echo "os:$(lsb_release -ds)" >> ${{ env.WORKFLOW_ID }}.txt + echo "R:$(R --version | head -n1)" >> ${{ env.WORKFLOW_ID }}.txt + working-directory: dsBaseClient/logs + + - name: Parse results from testthat and covr + run: | + Rscript --verbose --vanilla ../testStatus/source/parse_test_report.R logs/ + working-directory: dsBaseClient + + - name: Render report + run: | + cd testStatus + + mkdir -p new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ + mkdir -p new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ + mkdir -p new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/latest/ + + # Copy logs to new logs directory location + cp -rv ../dsBaseClient/logs/* new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ + cp -rv ../dsBaseClient/logs/${{ env.WORKFLOW_ID }}.txt new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ + + R -e 'input_dir <- file.path("../new/logs", Sys.getenv("PROJECT_NAME"), Sys.getenv("BRANCH_NAME"), Sys.getenv("WORKFLOW_ID")); quarto::quarto_render("source/test_report.qmd", execute_params = list(input_dir = input_dir))' + mv source/test_report.html new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/index.html + cp -r new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/* new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/latest + + env: + PROJECT_NAME: ${{ env.PROJECT_NAME }} + BRANCH_NAME: ${{ env.BRANCH_NAME }} + WORKFLOW_ID: ${{ env.WORKFLOW_ID }} + + - name: Upload test logs + uses: actions/upload-artifact@v4 + with: + name: dsbaseclient-logs + path: testStatus/new + + - name: Dump environment info + run: | + echo -e "\n#############################" + echo -e "ls /: ######################" + ls -al . + echo -e "\n#############################" + echo -e "lscpu: ######################" + lscpu + echo -e "\n#############################" + echo -e "memory: #####################" + free -m + echo -e "\n#############################" + echo -e "env: ########################" + env + echo -e "\n#############################" + echo -e "R sessionInfo(): ############" + R -e 'sessionInfo()' + sudo apt install tree -y + tree . + \ No newline at end of file diff --git a/.github/workflows/pkgdown.yaml b/.github/workflows/pkgdown.yaml new file mode 100644 index 000000000..bfc9f4db3 --- /dev/null +++ b/.github/workflows/pkgdown.yaml @@ -0,0 +1,49 @@ +# Workflow derived from https://github.com/r-lib/actions/tree/v2/examples +# Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help +on: + push: + branches: [main, master] + pull_request: + release: + types: [published] + workflow_dispatch: + +name: pkgdown.yaml + +permissions: read-all + +jobs: + pkgdown: + runs-on: ubuntu-latest + # Only restrict concurrency for non-PR jobs + concurrency: + group: pkgdown-${{ github.event_name != 'pull_request' || github.run_id }} + env: + GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + + - uses: r-lib/actions/setup-pandoc@v2 + + - uses: r-lib/actions/setup-r@v2 + with: + use-public-rspm: true + + - uses: r-lib/actions/setup-r-dependencies@v2 + with: + extra-packages: any::pkgdown, local::. + needs: website + + - name: Build site + run: pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE) + shell: Rscript {0} + + - name: Deploy to GitHub pages 🚀 + if: github.event_name != 'pull_request' + uses: JamesIves/github-pages-deploy-action@v4.5.0 + with: + clean: false + branch: gh-pages + folder: docs diff --git a/.gitignore b/.gitignore index 646e99a1d..60d56797e 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,5 @@ inst/tests/test-template.R azure-pipelines.Rout tests/testthat/connection_to_datasets/local_settings.csv +tests/docker/armadillo/standard/logs/ +tests/docker/armadillo/standard/data/ diff --git a/DESCRIPTION b/DESCRIPTION index bf6208f30..cc110b7f7 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,13 +1,70 @@ Package: dsBaseClient -Title: DataSHIELD Client Functions -Version: 6.3.0 -Author: DataSHIELD Developers -Maintainer: DataSHIELD Developers -Description: DataSHIELD client functions for the client side. +Title: 'DataSHIELD' Client Side Base Functions +Version: 6.3.4 +Description: Base 'DataSHIELD' functions for the client side. 'DataSHIELD' is a software package which allows + you to do non-disclosive federated analysis on sensitive data. 'DataSHIELD' analytic functions have + been designed to only share non disclosive summary statistics, with built in automated output + checking based on statistical disclosure control. With data sites setting the threshold values for + the automated output checks. For more details, see 'citation("dsBaseClient")'. +Authors@R: c(person(given = "Paul", + family = "Burton", + role = c("aut"), + comment = c(ORCID = "0000-0001-5799-9634")), + person(given = "Rebecca", + family = "Wilson", + role = c("aut"), + comment = c(ORCID = "0000-0003-2294-593X")), + person(given = "Olly", + family = "Butters", + role = c("aut"), + comment = c(ORCID = "0000-0003-0354-8461")), + person(given = "Patricia", + family = "Ryser-Welch", + role = c("aut"), + comment = c(ORCID = "0000-0002-0070-0264")), + person(given = "Alex", + family = "Westerberg", + role = c("aut")), + person(given = "Leire", + family = "Abarrategui", + role = c("aut")), + person(given = "Roberto", + family = "Villegas-Diaz", + role = c("aut"), + comment = c(ORCID = "0000-0001-5036-8661")), + person(given = "Demetris", + family = "Avraam", + role = c("aut"), + comment = c(ORCID = "0000-0001-8908-2441")), + person(given = "Demetris", + family = "Avraam", + role = c("aut"), + comment = c(ORCID = "0000-0001-8908-2441")), + person(given = "Yannick", + family = "Marcon", + role = c("aut"), + email = "yannick.marcon@obiba.org", + comment = c(ORCID = "0000-0003-0138-2023")), + person(given = "Tom", + family = "Bishop", + role = c("aut")), + person(given = "Amadou", + family = "Gaye", + role = c("aut"), + comment = c(ORCID = "0000-0002-1180-2792")), + person(given = "Xavier", + family = "Escribà-Montagut", + role = c("aut"), + comment = c(ORCID = "0000-0003-2888-8948")), + person(given = "Stuart", + family = "Wheater", + role = c("aut", "cre"), + email = "stuart.wheater@arjuna.com", + comment = c(ORCID = "0009-0003-2419-1964"))) License: GPL-3 Depends: - R (>= 3.5.0), - DSI (>= 1.3.0) + R (>= 4.0.0), + DSI (>= 1.7.1) Imports: fields, metafor, @@ -15,8 +72,19 @@ Imports: ggplot2, gridExtra, data.table, - panelaggregation, methods, dplyr -RoxygenNote: 7.2.3 +Suggests: + lme4, + httr, + spelling, + tibble, + testthat, + e1071, + DescTools, + DSOpal, + DSMolgenisArmadillo, + DSLite +RoxygenNote: 7.3.3 Encoding: UTF-8 +Language: en-GB diff --git a/NAMESPACE b/NAMESPACE index a112b9c9d..ec905eb6e 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -1,5 +1,6 @@ # Generated by roxygen2: do not edit by hand +export(computeWeightedMeans) export(ds.Boole) export(ds.abs) export(ds.asCharacter) @@ -119,3 +120,7 @@ export(ds.var) export(ds.vectorCalc) import(DSI) import(data.table) +importFrom(stats,as.formula) +importFrom(stats,na.omit) +importFrom(stats,ts) +importFrom(stats,weighted.mean) diff --git a/R/checkClass.R b/R/checkClass.R index 5cf86eac0..779eca1e0 100644 --- a/R/checkClass.R +++ b/R/checkClass.R @@ -3,8 +3,8 @@ #' @description This is an internal function. #' @details In DataSHIELD an object included in analysis must be of the same type in all #' the collaborating studies. If that is not the case the process is stopped -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @param obj a string character, the name of the object to check for. #' @keywords internal #' @return a message or the class of the object if the object has the same class in all studies. diff --git a/R/computeWeightedMeans.R b/R/computeWeightedMeans.R new file mode 100644 index 000000000..0f04fc915 --- /dev/null +++ b/R/computeWeightedMeans.R @@ -0,0 +1,25 @@ +#' Compute Weighted Mean by Group +#' +#' This function is originally from the panelaggregation package. +#' It has been ported here in order to bypass the package being +#' kicked off CRAN. +#' +#' @author Matthias Bannert, Gabriel Bucur +#' @param data_table a data.table +#' @param variables character name of the variable(s) to focus on. The variables must be in the data.table +#' @param weight character name of the data.table column that contains a weight. +#' @param by character vector of the columns to group by +#' @import data.table +#' @importFrom stats as.formula na.omit ts weighted.mean +#' @export +computeWeightedMeans <- function(data_table, variables, weight, by) { + + if (is.null(weight)) { + res_dt <- data_table[, lapply(.SD, mean, na.rm = TRUE), .SDcols = variables, by = by] + } else { + res_dt <- data_table[, lapply(.SD, weighted.mean, weight = eval(as.name(weight)), na.rm = TRUE), + .SDcols = variables, by = by] + } + + res_dt +} diff --git a/R/ds.Boole.R b/R/ds.Boole.R index 1c5cf4f4b..252346bfd 100644 --- a/R/ds.Boole.R +++ b/R/ds.Boole.R @@ -34,9 +34,9 @@ #' Default \code{'NA'}. For more information see details. #' @param newobj a character string that provides the name for the output #' object that is stored on the data servers. Default \code{boole.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.Boole} returns the object specified by the \code{newobj} argument #' which is written to the server-side. Also, two validity messages are returned #' to the client-side indicating the name of the \code{newobj} which diff --git a/R/ds.abs.R b/R/ds.abs.R index 5b57b0ed1..41c204551 100644 --- a/R/ds.abs.R +++ b/R/ds.abs.R @@ -10,9 +10,9 @@ #' @param x a character string providing the name of a numeric or an integer vector. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Default name is set to \code{abs.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified the default set of connections will be -#' used: see \code{\link{datashield.connections_default}}. +#' used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.abs} assigns a vector for each study that includes the absolute values of #' the input numeric or integer vector specified in the argument \code{x}. The created vectors #' are stored in the servers. diff --git a/R/ds.asCharacter.R b/R/ds.asCharacter.R index 72acbd451..c0bd4ce0a 100644 --- a/R/ds.asCharacter.R +++ b/R/ds.asCharacter.R @@ -9,9 +9,9 @@ #' character. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{ascharacter.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.asCharacter} returns the object converted into a class character #' that is written to the server-side. Also, two validity messages are returned to the client-side #' indicating the name of the \code{newobj} which has been created in each data source and if diff --git a/R/ds.asDataMatrix.R b/R/ds.asDataMatrix.R index 83b1037fb..7b4833bbd 100644 --- a/R/ds.asDataMatrix.R +++ b/R/ds.asDataMatrix.R @@ -8,9 +8,9 @@ #' a matrix. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{asdatamatrix.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.asDataMatrix} returns the object converted into a matrix #' that is written to the server-side. Also, two validity messages are returned #' to the client-side diff --git a/R/ds.asFactor.R b/R/ds.asFactor.R index 7d6246d59..476f00f85 100644 --- a/R/ds.asFactor.R +++ b/R/ds.asFactor.R @@ -130,9 +130,9 @@ #' to be used in the creation of the matrix with dummy variables. #' If the \code{fixed.dummy.vars} is set to FALSE then any value of the baseline level is not taken #' into account. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.asFactor} returns the unique levels of the converted #' variable in ascending order and a validity #' message with the name of the created object on the client-side and diff --git a/R/ds.asFactorSimple.R b/R/ds.asFactorSimple.R index dce56a8a9..313f7b408 100644 --- a/R/ds.asFactorSimple.R +++ b/R/ds.asFactorSimple.R @@ -14,9 +14,9 @@ #' the name of the variable to be converted to a factor. #' @param newobj.name a character string that provides the name for the output variable #' that is stored on the data servers. Default \code{asfactor.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return an output vector of class factor to the serverside. In addition, returns a validity #' message with the name of the created object on the client-side and if creation fails an #' error message which can be viewed using datashield.errors(). @@ -109,4 +109,4 @@ return(list(return.info=return.info,return.message=return.message)) # } -#ds.asFactorSimple \ No newline at end of file +#ds.asFactorSimple diff --git a/R/ds.asInteger.R b/R/ds.asInteger.R index 139ceb6c5..9b3b1a397 100644 --- a/R/ds.asInteger.R +++ b/R/ds.asInteger.R @@ -22,9 +22,9 @@ #' an integer. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{asinteger.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.asInteger} returns the R object converted into an integer #' that is written to the server-side. Also, two validity messages are returned to the #' client-side indicating the name of the \code{newobj} which diff --git a/R/ds.asList.R b/R/ds.asList.R index 5383c506d..d73668785 100644 --- a/R/ds.asList.R +++ b/R/ds.asList.R @@ -9,9 +9,9 @@ #' a list. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{aslist.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.asList} returns the R object converted into a list #' which is written to the server-side. Also, two validity messages are returned to the #' client-side indicating the name of the \code{newobj} which has been created in each data diff --git a/R/ds.asLogical.R b/R/ds.asLogical.R index de2920bf3..2ddc33cfe 100644 --- a/R/ds.asLogical.R +++ b/R/ds.asLogical.R @@ -8,9 +8,9 @@ #' input object to be coerced to a logical. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{aslogical.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.asLogical} returns the R object converted into a logical #' that is written to the server-side. Also, two validity messages are returned #' to the client-side indicating the name of the \code{newobj} which diff --git a/R/ds.asMatrix.R b/R/ds.asMatrix.R index 041861095..1c5b0ced7 100644 --- a/R/ds.asMatrix.R +++ b/R/ds.asMatrix.R @@ -11,9 +11,9 @@ #' a matrix. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{asmatrix.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.asMatrix} returns the object converted into a matrix #' that is written to the server-side. Also, two validity messages are returned #' to the client-side indicating the name of the \code{newobj} which diff --git a/R/ds.asNumeric.R b/R/ds.asNumeric.R index 0c4e9a034..3e2b445fa 100644 --- a/R/ds.asNumeric.R +++ b/R/ds.asNumeric.R @@ -5,7 +5,7 @@ #' @details This function is based on the native R function \code{as.numeric}. #' However, it behaves differently with some specific classes of variables. For example, if the input #' object is of class factor, it first converts its values into characters and then convert those to -#' numerics. This behavior is important for the case where the input object is of class factor having +#' numerics. This behaviour is important for the case where the input object is of class factor having #' numbers as levels. In that case, the native R #' \code{as.numeric} function returns the underlying level codes and not the values as numbers. #' For example \code{as.numeric} in R converts the factor vector: \cr @@ -22,9 +22,9 @@ #' a numeric. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{asnumeric.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.asNumeric} returns the R object converted into a numeric class #' that is written to the server-side. Also, two validity messages are returned #' to the client-side indicating the name of the \code{newobj} which diff --git a/R/ds.assign.R b/R/ds.assign.R index 7c7017263..25b71c74e 100644 --- a/R/ds.assign.R +++ b/R/ds.assign.R @@ -10,9 +10,9 @@ #' @param toAssign a character string providing the object to assign. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{assign.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.assign} returns the R object assigned to a name #' that is written to the server-side. #' @author DataSHIELD Development Team diff --git a/R/ds.auc.R b/R/ds.auc.R index b971651ac..ebfc5fc31 100644 --- a/R/ds.auc.R +++ b/R/ds.auc.R @@ -6,9 +6,9 @@ #' @param pred the name of the vector of the predicted values #' @param y the name of the outcome variable. Note that this variable should include #' the complete cases that are used in the regression model. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return returns the AUC and its standard error #' @author Demetris Avraam for DataSHIELD Development Team #' @export @@ -44,4 +44,4 @@ ds.auc <- function(pred=NULL, y=NULL, datasources=NULL){ return(output) -} \ No newline at end of file +} diff --git a/R/ds.boxPlot.R b/R/ds.boxPlot.R index ad2de1d87..d89c54709 100644 --- a/R/ds.boxPlot.R +++ b/R/ds.boxPlot.R @@ -12,7 +12,7 @@ #' @param ylabel \code{caracter} (default \code{"y axis"}) Label to put on the y axis of the plot #' @param type \code{character} Return a pooled plot (\code{"pooled"}) or a split plot (one for each study server #' \code{"split"}) -#' @param datasources a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login #' #' @return \code{ggplot} object #' @export @@ -54,7 +54,7 @@ #' ds.boxPlot("D", "LAB_HDL", type= "split", datasources = connections) #' #' ## Create a boxplot of two variables variable -#' ds.boxPlot("D", c("LAB_HDL", "LAB_TRIG", type="combine", +#' ds.boxPlot("D", c("LAB_HDL", "LAB_TRIG", type="pooled", #' datasources = connections) #' # only one plot is created (of the aggregated results of all servers) #' diff --git a/R/ds.boxPlotGG.R b/R/ds.boxPlotGG.R index d35caba38..e09fa8d6d 100644 --- a/R/ds.boxPlotGG.R +++ b/R/ds.boxPlotGG.R @@ -17,7 +17,7 @@ #' @param ylabel \code{caracter} (default \code{"y axis"}) Label to put on the y axis of the plot #' @param type \code{character} Return a pooled plot (\code{"pooled"}) or a split plot (one for each study server #' \code{"split"}) -#' @param datasources a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login #' #' @return \code{ggplot} object @@ -46,19 +46,19 @@ ds.boxPlotGG <- function(x, group = NULL, group2 = NULL, xlabel = "x axis", ylab } pt_merged <- data.table::data.table(pt_merged) if(!is.null(group) & is.null(group2)){ - pt_merged <- panelaggregation::computeWeightedMeans(pt_merged, + pt_merged <- computeWeightedMeans(pt_merged, variables = c("ymin", "lower", "middle", "upper", "ymax"), weight = "n", by = c("group", "x")) } else if(!is.null(group) & !is.null(group2)){ - pt_merged <- panelaggregation::computeWeightedMeans(pt_merged, + pt_merged <- computeWeightedMeans(pt_merged, variables = c("ymin", "lower", "middle", "upper", "ymax"), weight = "n", by = c("group", "group2", "x")) } else{ - pt_merged <- panelaggregation::computeWeightedMeans(pt_merged, + pt_merged <- computeWeightedMeans(pt_merged, variables = c("ymin", "lower", "middle", "upper", "ymax"), weight = "n", by = c("x")) @@ -146,4 +146,4 @@ ds.boxPlotGG <- function(x, group = NULL, group2 = NULL, xlabel = "x axis", ylab return(plt) -} \ No newline at end of file +} diff --git a/R/ds.boxPlotGG_data_Treatment.R b/R/ds.boxPlotGG_data_Treatment.R index f27a9110d..40f89730a 100644 --- a/R/ds.boxPlotGG_data_Treatment.R +++ b/R/ds.boxPlotGG_data_Treatment.R @@ -6,7 +6,7 @@ #' @param variables \code{character vector} Name of the column(s) of the data frame to include on the boxplot #' @param group \code{character} (default \code{NULL}) Name of the first grouping variable. #' @param group2 \code{character} (default \code{NULL}) Name of the second grouping variable. -#' @param datasources a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login #' #' @return Does not return nothing, it creates the table \code{"boxPlotRawData"} on the server arranged to be passed to the #' ggplot boxplot function. Structure of the created table: \cr @@ -35,4 +35,4 @@ ds.boxPlotGG_data_Treatment <- function(table, variables, group = NULL, group2 = DSI::datashield.assign.expr(datasources, "boxPlotRawData", as.symbol(cally)) -} \ No newline at end of file +} diff --git a/R/ds.boxPlotGG_data_Treatment_numeric.R b/R/ds.boxPlotGG_data_Treatment_numeric.R index 04b33e045..0cf4b383d 100644 --- a/R/ds.boxPlotGG_data_Treatment_numeric.R +++ b/R/ds.boxPlotGG_data_Treatment_numeric.R @@ -3,7 +3,7 @@ #' @description Internal function #' #' @param vector \code{character} Name of the table on the server side that holds the information to be plotted later -#' @param datasources a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login #' #' @return Does not return nothing, it creates the table \code{"boxPlotRawDataNumeric"} on the server arranged to be passed to the #' ggplot boxplot function. Structure of the created table: \cr diff --git a/R/ds.boxPlotGG_numeric.R b/R/ds.boxPlotGG_numeric.R index 7300c1281..c1996628a 100644 --- a/R/ds.boxPlotGG_numeric.R +++ b/R/ds.boxPlotGG_numeric.R @@ -5,7 +5,7 @@ #' @param ylabel \code{caracter} (default \code{"y axis"}) Label to put on the y axis of the plot #' @param type \code{character} Return a pooled plot (\code{"pooled"}) or a split plot (one for each study server #' \code{"split"}) -#' @param datasources a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login #' #' @return \code{ggplot} object diff --git a/R/ds.boxPlotGG_table.R b/R/ds.boxPlotGG_table.R index fe6c158ec..ebe9eb4ce 100644 --- a/R/ds.boxPlotGG_table.R +++ b/R/ds.boxPlotGG_table.R @@ -10,7 +10,7 @@ #' @param ylabel \code{caracter} (default \code{"y axis"}) Label to put on the y axis of the plot #' @param type \code{character} Return a pooled plot (\code{"pooled"}) or a split plot (one for each study server #' \code{"split"}) -#' @param datasources a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login #' #' @return \code{ggplot} object diff --git a/R/ds.bp_standards.R b/R/ds.bp_standards.R index 493744977..ae76c54ca 100644 --- a/R/ds.bp_standards.R +++ b/R/ds.bp_standards.R @@ -16,9 +16,9 @@ #' blood pressure. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default name is set to \code{bp.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified the default set of connections will be -#' used: see \code{\link{datashield.connections_default}}. +#' used: see \code{\link[DSI]{datashield.connections_default}}. #' @return assigns a new object on the server-side. The assigned object is a list #' with two elements: the 'Zbp' which is the zscores of the blood pressure and 'perc' #' which is the percentiles of the BP zscores. @@ -76,4 +76,4 @@ ds.bp_standards <- function(sex=NULL, age=NULL, height=NULL, bp=NULL, systolic=T cally <- call('bp_standardsDS', sex, age, height, bp, systolic) DSI::datashield.assign(datasources, newobj, cally) -} \ No newline at end of file +} diff --git a/R/ds.c.R b/R/ds.c.R index 01257aec8..2093ac013 100644 --- a/R/ds.c.R +++ b/R/ds.c.R @@ -10,9 +10,9 @@ #' @param x a vector of character string providing the names of the objects to be combined. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{c.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.c} returns the vector of concatenating R #' objects which are written to the server-side. #' @examples diff --git a/R/ds.cbind.R b/R/ds.cbind.R index fdd1b34b0..d943e0175 100644 --- a/R/ds.cbind.R +++ b/R/ds.cbind.R @@ -32,9 +32,9 @@ #' For more information see \strong{Details}. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Defaults \code{cbind.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @param notify.of.progress specifies if console output should be produced to indicate #' progress. Default FALSE. #' @return \code{ds.cbind} returns a data frame combining the columns of the R diff --git a/R/ds.changeRefGroup.R b/R/ds.changeRefGroup.R index 1b8ad5565..4bd5080ae 100644 --- a/R/ds.changeRefGroup.R +++ b/R/ds.changeRefGroup.R @@ -22,9 +22,9 @@ #' @param reorderByRef logical, if TRUE the new vector #' should be ordered by the reference group (i.e. putting the reference group first). #' The default is to not re-order (see the reasons in the details). -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.changeRefGroup} returns a new vector with the specified level as a reference #' which is written to the server-side. #' @author DataSHIELD Development Team diff --git a/R/ds.class.R b/R/ds.class.R index 285b40453..036848ad8 100644 --- a/R/ds.class.R +++ b/R/ds.class.R @@ -6,9 +6,9 @@ #' #' Server function called: \code{classDS} #' @param x a character string providing the name of the input R object. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.class} returns the type of the R object. #' @author DataSHIELD Development Team #' @seealso \code{\link{ds.exists}} to verify if an object is defined (exists) on the server-side. diff --git a/R/ds.colnames.R b/R/ds.colnames.R index 5950d99bc..a4b98b1ad 100644 --- a/R/ds.colnames.R +++ b/R/ds.colnames.R @@ -6,9 +6,9 @@ #' #' Server function called: \code{colnamesDS} #' @param x a character string providing the name of the input data frame or matrix. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.colnames} returns the column names of #' the specified server-side data frame or matrix. #' @author DataSHIELD Development Team diff --git a/R/ds.completeCases.R b/R/ds.completeCases.R index fed432083..ed95bf6d3 100644 --- a/R/ds.completeCases.R +++ b/R/ds.completeCases.R @@ -14,9 +14,9 @@ #' that is stored on the data servers. If the user does not specify a name, then the function #' generates a name for the generated object that is the name of the input object with the #' suffix "_complete.cases" -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified, the default set of connections will be -#' used: see \code{\link{datashield.connections_default}}. +#' used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.completeCases} generates a modified data frame, matrix or vector from which #' all rows containing at least one NA have been deleted. The output object is stored on the #' server-side. Only two validity messages are returned to the client-side indicating the name diff --git a/R/ds.contourPlot.R b/R/ds.contourPlot.R index 7973c1d15..4e195e48b 100644 --- a/R/ds.contourPlot.R +++ b/R/ds.contourPlot.R @@ -50,9 +50,9 @@ #' see details. #' @param noise the percentage of the initial variance that is used as the variance of the embedded #' noise if the argument \code{method} is set to \code{'probabilistic'}. For more information see details. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.contourPlot} returns a contour plot to the client-side. #' @author DataSHIELD Development Team #' @examples diff --git a/R/ds.cor.R b/R/ds.cor.R index 1a5c86d64..53fb22db4 100644 --- a/R/ds.cor.R +++ b/R/ds.cor.R @@ -27,9 +27,9 @@ #' Default NULL. #' @param type a character string that represents the type of analysis to carry out. #' This must be set to \code{'split'} or \code{'combine'}. Default \code{'split'}. For more information see details. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.cor} returns a list containing the number of missing values in each variable, #' the number of missing variables casewise, the correlation matrix, #' the number of used complete cases. The function applies two disclosure controls. The first disclosure diff --git a/R/ds.corTest.R b/R/ds.corTest.R index 7be1f992e..3c9e42a81 100644 --- a/R/ds.corTest.R +++ b/R/ds.corTest.R @@ -10,7 +10,7 @@ #' used for the test. One of "pearson", "kendall", or "spearman", can be abbreviated. #' Default is set to "pearson". #' @param exact a logical indicating whether an exact p-value should be computed. Used for -#' Kendall's tau and Spearman's rho. See ‘Details’ of R stats function \code{cor.test} for +#' Kendall's tau and Spearman's rho. See \emph{Details} of R stats function \code{cor.test} for #' the meaning of NULL (the default). #' @param conf.level confidence level for the returned confidence interval. Currently #' only used for the Pearson product moment correlation coefficient if there are at least @@ -19,9 +19,9 @@ #' This must be set to \code{'split'} or \code{'combine'}. Default is set to \code{'split'}. If #' \code{type} is set to "combine" then an approximated pooled correlation is estimated based on #' Fisher's z transformation. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.corTest} returns to the client-side the results of the correlation test. #' @author DataSHIELD Development Team #' @export diff --git a/R/ds.cov.R b/R/ds.cov.R index 58aedc945..c67d2e134 100644 --- a/R/ds.cov.R +++ b/R/ds.cov.R @@ -35,9 +35,9 @@ #' \code{'pairwise.complete'}. Default \code{'pairwise.complete'}. For more information see details. #' @param type a character string that represents the type of analysis to carry out. #' This must be set to \code{'split'} or \code{'combine'}. Default \code{'split'}. For more information see details. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.cov} returns a list containing the number of missing values in each variable, the number of missing values #' casewise or pairwise depending on the argument \code{naAction}, the covariance matrix, the number of used complete cases #' and an error message which indicates whether or not the input variables pass the disclosure controls. The first disclosure diff --git a/R/ds.dataFrame.R b/R/ds.dataFrame.R index e71c1dafe..5837747c6 100644 --- a/R/ds.dataFrame.R +++ b/R/ds.dataFrame.R @@ -32,9 +32,9 @@ #' are the same #' @param newobj a character string that provides the name for the output data frame #' that is stored on the data servers. Default \code{dataframe.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @param notify.of.progress specifies if console output should be produced to indicate #' progress. Default is FALSE. #' @return \code{ds.dataFrame} returns the object specified by the \code{newobj} argument diff --git a/R/ds.dataFrameFill.R b/R/ds.dataFrameFill.R index eef95117f..3de389b7d 100644 --- a/R/ds.dataFrameFill.R +++ b/R/ds.dataFrameFill.R @@ -13,9 +13,9 @@ #' filled with extra columns of missing values. #' @param newobj a character string that provides the name for the output data frame #' that is stored on the data servers. Default value is "dataframefill.newobj". -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.dataFrameFill} returns the object specified by the \code{newobj} argument which #' is written to the server-side. Also, two validity messages are returned to the #' client-side indicating the name of the \code{newobj} that has been created in each data source diff --git a/R/ds.dataFrameSort.R b/R/ds.dataFrameSort.R index d170dd87b..de59d61e8 100644 --- a/R/ds.dataFrameSort.R +++ b/R/ds.dataFrameSort.R @@ -33,9 +33,9 @@ #' @param newobj a character string that provides the name for the output data frame #' that is stored on the data servers. Default \code{dataframesort.newobj}. #' where \code{df.name} is the first argument of \code{ds.dataFrameSort()}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.dataFrameSort} returns the sorted data frame is written to the server-side. #' Also, two validity messages are returned to the client-side #' indicating the name of the \code{newobj} which @@ -203,4 +203,4 @@ ds.dataFrameSort<-function(df.name=NULL, sort.key.name=NULL, sort.descending=FAL #END OF CHECK OBJECT CREATED CORRECTLY MODULE # ########################################################################################################### } -#ds.dataFrameSort \ No newline at end of file +#ds.dataFrameSort diff --git a/R/ds.dataFrameSubset.R b/R/ds.dataFrameSubset.R index 3b6dff97b..1c5ff6a00 100644 --- a/R/ds.dataFrameSubset.R +++ b/R/ds.dataFrameSubset.R @@ -12,7 +12,7 @@ #' #' Server functions called: \code{dataFrameSubsetDS1} and \code{dataFrameSubsetDS2} #' -#' @param df.name a character string providing the name of the data frame to be subseted. +#' @param df.name a character string providing the name of the data frame to be subset. #' @param V1.name A character string specifying the name of the vector #' to which the Boolean operator is to be applied to define the subset. #' For more information see details. @@ -28,9 +28,9 @@ #' If FALSE or NULL all rows with at least one missing values are removed from the subset. #' @param newobj a character string that provides the name for the output #' object that is stored on the data servers. Default \code{dataframesubset.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @param notify.of.progress specifies if console output should be produced to indicate #' progress. Default FALSE. #' @return \code{ds.dataFrameSubset} returns diff --git a/R/ds.densityGrid.R b/R/ds.densityGrid.R index 33e1bb6dc..b0766418a 100644 --- a/R/ds.densityGrid.R +++ b/R/ds.densityGrid.R @@ -23,9 +23,9 @@ #' \code{'combine'}, a pooled grid density matrix is generated, #' instead if \code{type} is set to \code{'split'} #' one grid density matrix is generated. Default \code{'combine'}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.densityGrid} returns a grid density matrix. #' @author DataSHIELD Development Team #' @export diff --git a/R/ds.dim.R b/R/ds.dim.R index 613bd984b..4a6cd3a76 100644 --- a/R/ds.dim.R +++ b/R/ds.dim.R @@ -22,9 +22,9 @@ #' Default \code{'both'}. #' @param checks logical. If TRUE undertakes all DataSHIELD checks (time-consuming). #' Default FALSE. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.dim} retrieves to the client-side the dimension of the object #' in the form of a vector where the first #' element indicates the number of rows and the second element indicates the number of columns. diff --git a/R/ds.dmtC2S.R b/R/ds.dmtC2S.R index 1f91efbfe..ee2e4be67 100644 --- a/R/ds.dmtC2S.R +++ b/R/ds.dmtC2S.R @@ -29,7 +29,7 @@ #' wish to change the connections you wish to use by default the call #' datashield.connections_default('opals.a') will set 'default.connections' #' to be 'opals.a' and so in the absence of specific instructions to the contrary -#' (e.g. by specifiying a particular dataset to be used via the +#' (e.g. by specifying a particular dataset to be used via the #' argument) all subsequent function calls will be to the datasets held in opals.a. #' If the argument is specified, it should be set without #' inverted commas: e.g. datasources=opals.a or datasources=default.connections. diff --git a/R/ds.elspline.R b/R/ds.elspline.R index 1fc4cffe3..01ddca05b 100644 --- a/R/ds.elspline.R +++ b/R/ds.elspline.R @@ -13,13 +13,13 @@ #' @param x the name of the input numeric variable #' @param n integer greater than 2, knots are computed such that they cut n equally-spaced #' intervals along the range of x -#' @param marginal logical, how to parametrize the spline, see Details +#' @param marginal logical, how to parametrise the spline, see Details #' @param names character, vector of names for constructed variables #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. Default \code{elspline.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return an object of class "lspline" and "matrix", which its name is specified by the #' \code{newobj} argument (or its default name "elspline.newobj"), is assigned on the serverside. #' @author Demetris Avraam for DataSHIELD Development Team @@ -61,4 +61,4 @@ ds.elspline <- function(x, n, marginal = FALSE, names = NULL, newobj = NULL, dat calltext <- call("elsplineDS", x, n, marginal, names) DSI::datashield.assign(datasources, newobj, calltext) -} \ No newline at end of file +} diff --git a/R/ds.exists.R b/R/ds.exists.R index b6ef9e66b..6dff8b4c1 100644 --- a/R/ds.exists.R +++ b/R/ds.exists.R @@ -10,9 +10,9 @@ #' #' Server function called: \code{exists} #' @param x a character string providing the name of the object to look for. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.exists} returns a logical object. #' TRUE if the object is on the server-side and FALSE otherwise. #' @author DataSHIELD Development Team diff --git a/R/ds.exp.R b/R/ds.exp.R index 2fd9da3b8..5bf325bd8 100644 --- a/R/ds.exp.R +++ b/R/ds.exp.R @@ -9,9 +9,9 @@ #' @param x a character string providing the name of a numerical vector. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Default \code{exp.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.exp} returns a vector for each study of the exponential values for the numeric vector #' specified in the argument \code{x}. The created vectors are stored in the server-side. #' @author DataSHIELD Development Team diff --git a/R/ds.forestplot.R b/R/ds.forestplot.R index 3b4873875..60227913c 100644 --- a/R/ds.forestplot.R +++ b/R/ds.forestplot.R @@ -1,10 +1,10 @@ #' @title Forestplot for SLMA models -#' @description Draws a foresplot of the coefficients for Study-Level Meta-Analysis performed with +#' @description Draws a forestplot of the coefficients for Study-Level Meta-Analysis performed with #' DataSHIELD #' -#' @param mod \code{list} List outputed by any of the SLMA models of DataSHIELD (\code{ds.glmerSLMA}, +#' @param mod \code{list} List outputted by any of the SLMA models of DataSHIELD (\code{ds.glmerSLMA}, #' \code{ds.glmSLMA}, \code{ds.lmerSLMA}) -#' @param variable \code{character} (default \code{NULL}) Variable to meta-analyze and visualize, by setting this +#' @param variable \code{character} (default \code{NULL}) Variable to meta-analyse and visualise, by setting this #' argument to \code{NULL} (default) the first independent variable will be used. #' @param method \code{character} (Default \code{"ML"}) Method to estimate the between study variance. #' See details from \code{?meta::metagen} for the different options. diff --git a/R/ds.gamlss.R b/R/ds.gamlss.R index bd9d82f3f..6a7622c76 100644 --- a/R/ds.gamlss.R +++ b/R/ds.gamlss.R @@ -64,9 +64,9 @@ #' only. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{gamlss_res}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return a gamlss object with all components as in the native R gamlss function. #' Individual-level information like the components y (the response response) and #' residuals (the normalised quantile residuals of the model) are not disclosed to diff --git a/R/ds.getWGSR.R b/R/ds.getWGSR.R index f883549af..ff4c60f51 100644 --- a/R/ds.getWGSR.R +++ b/R/ds.getWGSR.R @@ -52,9 +52,9 @@ #' by the formula $age_days=age_months*(365.25/12)$. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Defaults \code{getWGSR.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified the default set of connections will be -#' used: see \code{\link{datashield.connections_default}}. +#' used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.getWGSR} assigns a vector for each study that includes the z-scores for the #' specified index. The created vectors are stored in the servers. #' @author Demetris Avraam for DataSHIELD Development Team diff --git a/R/ds.glm.R b/R/ds.glm.R index 3e18120c7..13cba2d96 100644 --- a/R/ds.glm.R +++ b/R/ds.glm.R @@ -17,7 +17,7 @@ #' #' Many GLMs can be fitted very simply using a formula such as: #' -#' \deqn{y~a+b+c+d} +#' \eqn{y~a+b+c+d} #' #' which simply means fit a GLM with \code{y} as the outcome variable and #' \code{a}, \code{b}, \code{c} and \code{d} as covariates. @@ -26,7 +26,7 @@ #' Instead, if you need to fit a more complex #' model, for example: #' -#' \deqn{EVENT~1+TID+SEXF*AGE.60} +#' \eqn{EVENT~1+TID+SEXF*AGE.60} #' #' In the above model the outcome variable is \code{EVENT} #' and the covariates @@ -46,7 +46,7 @@ #' #' In the \code{family} argument can be specified three types of models to fit: #' -#' \itemize{ +#' \describe{ #' \item{\code{"gaussian"}}{: conventional linear model with normally distributed errors} #' \item{\code{"binomial"}}{: conventional unconditional logistic regression model} #' \item{\code{"poisson"}}{: Poisson regression model which is the most used in survival analysis. @@ -63,7 +63,7 @@ #' The \code{data} argument avoids you having to specify the name of the #' data frame in front of each covariate in the formula. #' For example, if the data frame is called \code{DataFrame} you -#' avoid having to write: \eqn{DataFrame$y~DataFrame$a+DataFrame$b+DataFrame$c+DataFrame$d} +#' avoid having to write: \eqn{DataFrame\$y ~ DataFrame\$a + DataFrame\$b + DataFrame\$c + DataFrame\$d} #' #' The \code{checks} argument verifies that the variables in the model are all defined (exist) #' on the server-side at every study @@ -164,9 +164,9 @@ #' of parameter estimates is returned. Default FALSE. #' @param viewCor logical. If TRUE the correlation matrix of #' parameter estimates is returned. Default FALSE. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return Many of the elements of the output list returned by \code{ds.glm} are #' equivalent to those returned by the \code{glm()} function in native R. However, #' potentially disclosive elements @@ -190,7 +190,7 @@ #' @return \code{family}: error family and link function. #' @return \code{formula}: model formula, see description of formula as an input parameter (above). #' @return \code{coefficients}: a matrix with 5 columns: -#' \itemize{ +#' \describe{ #' \item{First}{: the names of all of the regression parameters (coefficients) in the model} #' \item{second}{: the estimated values} #' \item{third}{: corresponding standard errors of the estimated values} diff --git a/R/ds.glmPredict.R b/R/ds.glmPredict.R index ff0d7647e..96dfc792c 100644 --- a/R/ds.glmPredict.R +++ b/R/ds.glmPredict.R @@ -91,7 +91,7 @@ #' wish to change the connections you wish to use by default the call #' datashield.connections_default('opals.a') will set 'default.connections' #' to be 'opals.a' and so in the absence of specific instructions to the contrary -#' (e.g. by specifiying a particular dataset to be used via the +#' (e.g. by specifying a particular dataset to be used via the #' argument) all subsequent function calls will be to the datasets held in opals.a. #' If the argument is specified, it should be set without #' inverted commas: e.g. datasources=opals.a or datasources=default.connections. @@ -116,7 +116,7 @@ #' the name - if one was specified - of the dataframe being used as the basis for predictions, #' the output.type specified ('link', 'response' or 'terms'), the value of the #' dispersion parameter if one had been specified and the residual scale parameter (which is -#' multipled by sqrt(dispersion parameter) if one has been set). If output.type = 'terms', +#' multiplied by sqrt(dispersion parameter) if one has been set). If output.type = 'terms', #' the summary statistics for the fit and se.fit vectors are replaced by equivalent #' summary statistics for each column in fit and se.fit matrices which each have k columns #' if k terms are being summarised. diff --git a/R/ds.glmSLMA.R b/R/ds.glmSLMA.R index 5b782994e..9b46a6a40 100644 --- a/R/ds.glmSLMA.R +++ b/R/ds.glmSLMA.R @@ -61,7 +61,7 @@ #' #' Many glms can be fitted very simply using a formula such as: #' -#' \deqn{y~a+b+c+d} +#' \eqn{y~a+b+c+d} #' #' which simply means fit a glm with \code{y} as the outcome variable and #' \code{a}, \code{b}, \code{c} and \code{d} as covariates. @@ -70,7 +70,7 @@ #' Instead, if you need to fit a more complex #' model, for example: #' -#' \deqn{EVENT~1+TID+SEXF*AGE.60} +#' \eqn{EVENT~1+TID+SEXF*AGE.60} #' #' In the above model the outcome variable is \code{EVENT} #' and the covariates @@ -91,7 +91,7 @@ #' relatively widely. #' #' The standard models include: -#' \itemize{ +#' \describe{ #' \item{\code{"gaussian"}}{: conventional linear model with normally distributed errors} #' \item{\code{"binomial"}}{: conventional unconditional logistic regression model} #' \item{\code{"poisson"}}{: Poisson regression model which is often used in epidemiological @@ -134,7 +134,7 @@ #' The \code{dataName} argument avoids you having to specify the name of the #' data frame in front of each covariate in the formula. #' For example, if the data frame is called \code{DataFrame} you -#' avoid having to write: \eqn{DataFrame$y~DataFrame$a+DataFrame$b+DataFrame$c+DataFrame$d} +#' avoid having to write: \eqn{DataFrame\$y ~ DataFrame\$a + DataFrame\$b + DataFrame\$c + DataFrame\$d} #' #' The \code{checks} argument verifies that the variables in the model are all defined (exist) #' on the server-site at every study @@ -181,9 +181,9 @@ #' For more information see \strong{Details}. #' @param notify.of.progress specifies if console output should be produced to indicate #' progress. Default FALSE. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return The serverside aggregate functions \code{glmSLMADS1} and \code{glmSLMADS2} return #' output to the clientside, while the assign function \code{glmSLMADS.assign} simply writes #' the glm object to the serverside @@ -199,7 +199,7 @@ #' a series of other list objects that represent inferences aggregated across studies. #' @return the study specific items include: #' @return \code{coefficients}: a matrix with 5 columns: -#' \itemize{ +#' \describe{ #' \item{First}{: the names of all of the regression parameters (coefficients) in the model} #' \item{second}{: the estimated values} #' \item{third}{: corresponding standard errors of the estimated values} @@ -263,7 +263,7 @@ #' them into their meta-analysis package of choice. #' @return \code{is.object.created} and \code{validity.check} are standard #' items returned by an assign function when the designated newobj appears to have -#' been successfuly created on the serverside at each study. This output is +#' been successfully created on the serverside at each study. This output is #' produced specifically by the assign function \code{glmSLMADS.assign} that writes #' out the glm object on the serverside #' @author Paul Burton, for DataSHIELD Development Team 07/07/20 diff --git a/R/ds.glmSummary.R b/R/ds.glmSummary.R index 38df81637..5629d937f 100644 --- a/R/ds.glmSummary.R +++ b/R/ds.glmSummary.R @@ -48,7 +48,7 @@ #' wish to change the connections you wish to use by default the call #' datashield.connections_default('opals.a') will set 'default.connections' #' to be 'opals.a' and so in the absence of specific instructions to the contrary -#' (e.g. by specifiying a particular dataset to be used via the +#' (e.g. by specifying a particular dataset to be used via the #' argument) all subsequent function calls will be to the datasets held in opals.a. #' If the argument is specified, it should be set without #' inverted commas: e.g. datasources=opals.a or datasources=default.connections. diff --git a/R/ds.glmerSLMA.R b/R/ds.glmerSLMA.R index 0f8b280fe..8bb8aa36f 100644 --- a/R/ds.glmerSLMA.R +++ b/R/ds.glmerSLMA.R @@ -18,23 +18,23 @@ #' In \code{formula} most shortcut notation allowed by \code{glmer()} function is #' also allowed by \code{ds.glmerSLMA}. #' Many GLMEs can be fitted very simply using a formula like: -#' \deqn{y~a+b+(1|c)} +#' \eqn{y~a+b+(1|c)} #' which simply means fit an GLME with \code{y} as the outcome variable (e.g. #' a binary case-control using a logistic regression model or a count or a survival #' time using a Poisson regression model), \code{a} and \code{b} #' as fixed effects, and \code{c} as a random effect or grouping factor. #' #' It is also possible to fit models with random slopes by specifying a model such as -#' \deqn{y~a+b+(1+b|c)} +#' \eqn{y~a+b+(1+b|c)} #' where the effect of \code{b} can vary randomly between groups defined by \code{c}. #' Implicit nesting can be specified with formulas such as: \eqn{y~a+b+(1|c/d)} -#' or \eqn{y~a+b+(1|c)+(1|c:d)}. +#' or \eqn{y~a+b+(1|c)+(1|c:d)}. #' #' #' The \code{dataName} argument avoids you having to specify the name of the #' data frame in front of each covariate in the formula. #' For example, if the data frame is called \code{DataFrame} you avoid having to write: -#' \eqn{DataFrame$y~DataFrame$a+DataFrame$b+(1|DataFrame$c)}. +#' \eqn{DataFrame\$y ~ DataFrame\$a + DataFrame\$b + (1 | DataFrame\$c)}. #' #' The \code{checks} argument verifies that the variables in the model are all defined (exist) #' on the server-site at every study @@ -44,7 +44,7 @@ #' #' #' In the \code{family} argument can be specified two types of models to fit: -#' \itemize{ +#' \describe{ #' \item{\code{"binomial"}}{: logistic regression models} #' \item{\code{"poisson"}}{: poisson regression models} #' } @@ -90,9 +90,9 @@ #' that contains all of the variables in the GLME formula. For more information see \strong{Details}. #' @param checks logical. If TRUE \code{ds.glmerSLMA} checks the structural integrity #' of the model. Default FALSE. For more information see \strong{Details}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @param family a character string specifying the distribution of the observed #' value of the outcome variable around the predictions generated by the linear predictor. #' This can be set as \code{"binomial"} or \code{"poisson"}. @@ -133,7 +133,7 @@ #' The list of elements returned by \code{ds.glmerSLMA} is mentioned below: #' #' @return \code{coefficients}: a matrix with 5 columns: -#' \itemize{ +#' \describe{ #' \item{First}{: the names of all of the regression parameters (coefficients) in the model} #' \item{second}{: the estimated values} #' \item{third}{: corresponding standard errors of the estimated values} diff --git a/R/ds.heatmapPlot.R b/R/ds.heatmapPlot.R index b7fafe6bb..262b1d700 100644 --- a/R/ds.heatmapPlot.R +++ b/R/ds.heatmapPlot.R @@ -11,19 +11,19 @@ #' minimum and maximum value. This was done to reduce the risk of potential disclosure. #' #' In the argument \code{type} can be specified two types of graphics to display: -#' \itemize{ +#' \describe{ #' \item{\code{'combine'}}{: a combined heat map plot is displayed} #' \item{\code{'split'}}{: each heat map is plotted separately} #' } #' #' In the argument \code{show} can be specified two options: -#' \itemize{ +#' \describe{ #' \item{\code{'all'}}{: the ranges of the variables are used as plot limits} #' \item{\code{'zoomed'}}{: the plot is zoomed to the region where the actual data are} #' } #' #' In the argument \code{method} can be specified 3 different heat map to be created: -#' \itemize{ +#' \describe{ #' \item{\code{'smallCellsRule'}}{: the heat map of the actual variables is #' created but grids with low counts are replaced with grids with zero counts} #' \item{\code{'deterministic'}}{: the heat map of the scaled centroids of each @@ -82,9 +82,9 @@ #' noise if the argument \code{method} is set to \code{'probabilistic'}. #' Default \code{noise} value is \code{0.25}. #' For more information see \strong{Details}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.heatmapPlot} returns to the client-side a heat map plot and a message specifying #' the number of invalid cells in each study. #' @author DataSHIELD Development Team diff --git a/R/ds.hetcor.R b/R/ds.hetcor.R index 1b0e77f63..2b29be240 100644 --- a/R/ds.hetcor.R +++ b/R/ds.hetcor.R @@ -17,9 +17,9 @@ #' nearPD directly. #' @param use if "complete.obs", remove observations with any missing data; if "pairwise.complete.obs", #' compute each correlation using all observations with valid data for that pair of variables. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified the default set of connections will be -#' used: see \code{\link{datashield.connections_default}}. +#' used: see \code{\link[DSI]{datashield.connections_default}}. #' @return Returns an object of class "hetcor" from each study, with the following components: the #' correlation matrix; the type of each correlation: "Pearson", "Polychoric", or "Polyserial"; the #' standard errors of the correlations, if requested; the number (or numbers) of observations on which @@ -53,4 +53,4 @@ ds.hetcor <- function(data=NULL, ML=TRUE, std.err=TRUE, bins=4, pd=TRUE, use="co return(output) -} \ No newline at end of file +} diff --git a/R/ds.histogram.R b/R/ds.histogram.R index 5997c5c52..0f5357b77 100644 --- a/R/ds.histogram.R +++ b/R/ds.histogram.R @@ -6,13 +6,13 @@ #' the single plots. #' #' In the argument \code{type} can be specified two types of graphics to display: -#' \itemize{ +#' \describe{ #' \item{\code{'combine'}}{: a histogram that merges the single plot is displayed.} #' \item{\code{'split'}}{: each histogram is plotted separately.} #' } #' #' In the argument \code{method} can be specified 3 different histograms to be created: -#' \itemize{ +#' \describe{ #' \item{\code{'smallCellsRule'}}{: the histogram of the actual variable is #' created but bins with low counts are removed.} #' \item{\code{'deterministic'}}{: the histogram of the scaled centroids of each @@ -48,7 +48,7 @@ #' By default the value of noise is set to be equal to 0.25. #' #' In the argument \code{vertical.axis} can be specified two types of histograms: -#' \itemize{ +#' \describe{ #' \item{\code{'Frequency'}}{: the histogram of the frequencies #' is returned.} #' \item{\code{'Density'}}{: the histogram of the densities @@ -79,9 +79,9 @@ #' plot. The \code{vertical.axis} argument can be set as \code{'Frequency'} or \code{'Density'}. #' Default \code{'Frequency'}. #' For more information see \strong{Details}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return one or more histogram objects and plots depending on the argument \code{type} #' @author DataSHIELD Development Team #' @export diff --git a/R/ds.igb_standards.R b/R/ds.igb_standards.R index c3d74f405..89498a0ca 100644 --- a/R/ds.igb_standards.R +++ b/R/ds.igb_standards.R @@ -15,16 +15,25 @@ #' "igb_zscore2value", "igb_value2zscore" (default), "igb_value2centile". #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Default name is set to \code{igb.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified the default set of connections will be -#' used: see \code{\link{datashield.connections_default}}. +#' used: see \code{\link[DSI]{datashield.connections_default}}. #' @note For gestational ages between 24 and 33 weeks, the INTERGROWTH very early preterm #' standard is used. -#' @references International standards for newborn weight, length, and head circumference by -#' gestational age and sex: the Newborn Cross-Sectional Study of the INTERGROWTH-21st Project -#' Villar, José et al. The Lancet, Volume 384, Issue 9946, 857-868 -#' INTERGROWTH-21st very preterm size at birth reference charts. Lancet 2016 -#' doi.org/10.1016/S0140-6736(16) 00384-6. Villar, José et al. +#' @references +#' \itemize{ +#' \item Villar, J., Ismail, L.C., Victora, C.G., Ohuma, E.O., Bertino, E., +#' Altman, D.G., Lambert, A., Papageorghiou, A.T., Carvalho, M., Jaffer, Y.A., +#' Gravett, M.G., Purwar, M., Frederick, I.O., Noble, A.J., Pang, R., Barros, +#' F.C., Chumlea, C., Bhutta, Z.A., Kennedy, S.H., 2014. International +#' standards for newborn weight, length, and head circumference by gestational +#' age and sex: the Newborn Cross-Sectional Study of the INTERGROWTH-21st +#' Project. The Lancet 384, 857--868. https://doi.org/10.1016/S0140-6736(14)60932-6 +#' \item Villar, J., Giuliani, F., Fenton, T.R., Ohuma, E.O., Ismail, L.C., +#' Kennedy, S.H., 2016. INTERGROWTH-21st very preterm size at birth reference +#' charts. The Lancet 387, 844--845. https://doi.org/10.1016/S0140-6736(16)00384-6 +#' +#' } #' @return assigns the converted measurement as a new object on the server-side #' @author Demetris Avraam for DataSHIELD Development Team #' @export @@ -82,4 +91,4 @@ ds.igb_standards <- function(gagebrth=NULL, z=0, p=50, val=NULL, var=NULL, sex=N cally <- call('igb_standardsDS', gagebrth, z, p, val, var, sex, fun) DSI::datashield.assign(datasources, newobj, cally) -} \ No newline at end of file +} diff --git a/R/ds.isNA.R b/R/ds.isNA.R index 28ff808eb..1d84577f7 100644 --- a/R/ds.isNA.R +++ b/R/ds.isNA.R @@ -8,9 +8,9 @@ #' #' Server function called: \code{isNaDS} #' @param x a character string specifying the name of the vector to check. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.isNA} returns a boolean. If it is TRUE the vector is empty #' (all values are NA), FALSE otherwise. #' @author DataSHIELD Development Team diff --git a/R/ds.isValid.R b/R/ds.isValid.R index 34a942230..e43b61b1f 100644 --- a/R/ds.isValid.R +++ b/R/ds.isValid.R @@ -10,9 +10,9 @@ #' #' Server function called: \code{isValidDS} #' @param x a character string specifying the name of a vector, dataframe or matrix. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.isValid} returns a boolean. If it is TRUE input object is valid, FALSE otherwise. #' @author DataSHIELD Development Team #' @export diff --git a/R/ds.kurtosis.R b/R/ds.kurtosis.R index c262201b9..974682bba 100644 --- a/R/ds.kurtosis.R +++ b/R/ds.kurtosis.R @@ -17,9 +17,9 @@ #' if \code{type} is set to 'split', 'splits' or 's', the kurtosis is returned separately for each study. #' if \code{type} is set to 'both' or 'b', both sets of outputs are produced. #' The default value is set to 'both'. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return a matrix showing the kurtosis of the input numeric variable, the number of valid observations and #' the validity message. #' @author Demetris Avraam, for DataSHIELD Development Team diff --git a/R/ds.length.R b/R/ds.length.R index a288de72f..83cb5cae6 100644 --- a/R/ds.length.R +++ b/R/ds.length.R @@ -17,9 +17,9 @@ #' @param checks logical. If TRUE the model components are checked. #' Default FALSE to save time. It is suggested that checks #' should only be undertaken once the function call has failed. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.length} returns to the client-side the pooled length of a vector or a list, #' or the length of a vector or a list for each study separately. #' @author DataSHIELD Development Team diff --git a/R/ds.levels.R b/R/ds.levels.R index 1943ea0cf..b32a5d1c6 100644 --- a/R/ds.levels.R +++ b/R/ds.levels.R @@ -6,9 +6,9 @@ #' @details #' Server function called: \code{levelsDS} #' @param x a character string specifying the name of a factor variable. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.levels} returns to the client-side the levels of a factor #' class variable stored in the server-side. #' @author DataSHIELD Development Team diff --git a/R/ds.lexis.R b/R/ds.lexis.R index d692a65d7..665a29ed4 100644 --- a/R/ds.lexis.R +++ b/R/ds.lexis.R @@ -127,9 +127,9 @@ #' variables to include in the final expanded table. For more information see \strong{Details}. #' @param expandDF a character string denoting the name of the new data frame containing the #' expanded data set. Default \code{lexis.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.lexis} returns to the server-side a data frame for each study with #' the expanded version of the input table. #' diff --git a/R/ds.list.R b/R/ds.list.R index 239399db4..b08477031 100644 --- a/R/ds.list.R +++ b/R/ds.list.R @@ -8,9 +8,9 @@ #' @param x a character string specifying the names of the objects to coerce into a list. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Default \code{list.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.list} returns a list of objects for each study that is stored on the server-side. #' @author DataSHIELD Development Team #' @export diff --git a/R/ds.listDisclosureSettings.R b/R/ds.listDisclosureSettings.R index ec4546bb2..33c73875c 100644 --- a/R/ds.listDisclosureSettings.R +++ b/R/ds.listDisclosureSettings.R @@ -51,9 +51,9 @@ #' disclosure. #' #' Server function called: \code{listDisclosureSettingsDS} -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.listDisclosureSettings} returns a list containing the current settings of the #' \code{nfilters} in each study specified. #' @author DataSHIELD Development Team diff --git a/R/ds.listServersideFunctions.R b/R/ds.listServersideFunctions.R index fb5128a29..97188b288 100644 --- a/R/ds.listServersideFunctions.R +++ b/R/ds.listServersideFunctions.R @@ -1,6 +1,6 @@ #' @title Lists server-side functions #' @description Lists all current server-side functions -#' @details Uses \code{\link{datashield.methods}} function from \code{DSI} package to list all +#' @details Uses \code{\link[DSI]{datashield.methods}} function from \code{DSI} package to list all #' assign and aggregate functions on the available data repository servers. #' The only choice of arguments is in \code{datasources}; i.e. which studies to interrogate. #' Once the studies have @@ -8,9 +8,9 @@ #' of these studies and then all aggregate functions for all of them. #' #' This function does not call any server-side function. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.listServersideFunctions} returns to the client-side #' a list containing all server-side functions separately for each study. #' Firstly lists assign and then aggregate functions. diff --git a/R/ds.lmerSLMA.R b/R/ds.lmerSLMA.R index c0f4444b3..b6d05c9bf 100644 --- a/R/ds.lmerSLMA.R +++ b/R/ds.lmerSLMA.R @@ -18,20 +18,20 @@ #' #' In \code{formula} most shortcut notation allowed by \code{lmer()} function is #' also allowed by \code{ds.lmerSLMA}. Many LMEs can be fitted very simply using a formula like: -#' \deqn{y~a+b+(1|c)} +#' \eqn{y ~ a + b + (1 | c)} #' which simply means fit an LME with \code{y} as the outcome variable with \code{a} and \code{b} #' as fixed effects, and \code{c} as a random effect or grouping factor. #' #' It is also possible to fit models with random slopes by specifying a model such as -#' \deqn{y~a+b+(1+b|c)} +#' \eqn{y ~ a + b + (1 + b | c)} #' where the effect of \code{b} can vary randomly between groups defined by \code{c}. -#' Implicit nesting can be specified with formulae such as \eqn{y~a+b+(1|c/d)} -#' or \eqn{y~a+b+(1|c)+(1|c:d)}. +#' Implicit nesting can be specified with formulae such as \eqn{y ~ a + b + (1 | c / d)} +#' or \eqn{y ~ a + b + (1 | c) + (1 | c : d)}. #' #' The \code{dataName} argument avoids you having to specify the name of the #' data frame in front of each covariate in the formula. #' For example, if the data frame is called \code{DataFrame} you avoid having to write: -#' \eqn{DataFrame$y~DataFrame$a+DataFrame$b+(1|DataFrame$c)}. +#' \eqn{DataFrame\$y ~ DataFrame\$a + DataFrame\$b + (1 | DataFrame\$c)}. #' #' The \code{checks} argument verifies that the variables in the model are all defined (exist) #' on the server-site at every study @@ -84,9 +84,9 @@ #' For more information see \strong{Details}. #' @param checks logical. If TRUE \code{ds.lmerSLMA} checks the structural integrity #' of the model. Default FALSE. For more information see \strong{Details}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @param REML logical. If TRUE the REstricted Maximum Likelihood (REML) #' is used for parameter optimization. #' If FALSE the parameters are optimized using standard ML (maximum likelihood). Default TRUE. @@ -124,7 +124,7 @@ #' @return \code{ds.lmerSLMA} returns a list of elements mentioned #' below separately for each study. #' @return \code{coefficients}: a matrix with 5 columns: -#' \itemize{ +#' \describe{ #' \item{First}{: the names of all of the regression parameters (coefficients) in the model} #' \item{second}{: the estimated values} #' \item{third}{: corresponding standard errors of the estimated values} @@ -192,7 +192,7 @@ #' #' # Fit the lmer #' -#' ds.lmerSLMA(formula = "BMI ~ incid_rate + diabetes + (1 | Male)", +#' ds.lmerSLMA(formula = "BMI ~ incid_rate + diabetes + (1 | Male)", #' dataName = "D.comp", #' datasources = connections) #' diff --git a/R/ds.log.R b/R/ds.log.R index faed3f10b..8c0b2e5d2 100644 --- a/R/ds.log.R +++ b/R/ds.log.R @@ -8,9 +8,9 @@ #' Default \code{exp(1)}. #' @param newobj a character string that provides the name for the output variable #' that is stored on the server-side. Default \code{log.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.log} returns a vector for each study of the transformed values for the numeric vector #' specified in the argument \code{x}. The created vectors are stored in the server-side. #' @author DataSHIELD Development Team diff --git a/R/ds.look.R b/R/ds.look.R index d45e50478..8dffa52f2 100644 --- a/R/ds.look.R +++ b/R/ds.look.R @@ -20,9 +20,9 @@ #' For more information see \strong{Details}. #' @param checks logical. If TRUE the optional checks are undertaken. #' Default FALSE to save time. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return the output from the specified server-side aggregate function to the client-side. #' @author DataSHIELD Development Team #' diff --git a/R/ds.ls.R b/R/ds.ls.R index 8cfbf3ca7..2f65a3c8f 100644 --- a/R/ds.ls.R +++ b/R/ds.ls.R @@ -41,7 +41,7 @@ #' Server function called: \code{lsDS}. #' #' @param search.filter character string (potentially including \code{*} symbol) specifying the filter -#' for the object name that you want to find in the enviroment. For more information see \strong{Details}. +#' for the object name that you want to find in the environment. For more information see \strong{Details}. #' @param env.to.search an integer (e.g. in \code{2} or \code{2L} format) specifying the position #' in the search path of the environment to be explored. \code{1L} is the current active analytic #' environment on the server-side and is the default value of \code{env.to.search}. @@ -51,9 +51,9 @@ #' set as a valid integer, \code{ds.ls} will list all objects in the server-side R environment #' identified by \code{env.to.search} in the search path. #' For more information see \strong{Details}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.ls} returns to the client-side a list containing: \cr #' (1) the name/details of the server-side R environment which \code{ds.ls} has searched;\cr #' (2) a vector of character strings giving the names of diff --git a/R/ds.lspline.R b/R/ds.lspline.R index 5c5cee793..e044005cb 100644 --- a/R/ds.lspline.R +++ b/R/ds.lspline.R @@ -10,13 +10,13 @@ #' in slope as compared to the previous segment. #' @param x the name of the input numeric variable #' @param knots numeric vector of knot positions -#' @param marginal logical, how to parametrize the spline, see Details +#' @param marginal logical, how to parametrise the spline, see Details #' @param names character, vector of names for constructed variables #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. Default \code{lspline.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return an object of class "lspline" and "matrix", which its name is specified by the #' \code{newobj} argument (or its default name "lspline.newobj"), is assigned on the serverside. #' @author Demetris Avraam for DataSHIELD Development Team @@ -54,4 +54,4 @@ ds.lspline <- function(x, knots = NULL, marginal = FALSE, names = NULL, newobj = calltext <- call("lsplineDS", x, knots, marginal, names) DSI::datashield.assign(datasources, newobj, calltext) -} \ No newline at end of file +} diff --git a/R/ds.make.R b/R/ds.make.R index eb341d8e7..07d14a830 100644 --- a/R/ds.make.R +++ b/R/ds.make.R @@ -64,9 +64,9 @@ #' @param toAssign a character string specifying the function or the arithmetic expression. #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. Default \code{make.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.make} returns the new object which is written to the #' server-side. Also a validity message is returned to the client-side indicating whether the new object has been correctly #' created at each source. diff --git a/R/ds.matrix.R b/R/ds.matrix.R index d2e8f611f..b90356c59 100644 --- a/R/ds.matrix.R +++ b/R/ds.matrix.R @@ -27,7 +27,7 @@ #' #' @param mdata a character string specifying #' the name of a server-side scalar or vector. Also, a numeric value representing a -#' scalar specified from the client-side can be speficied. +#' scalar specified from the client-side can be specified #' Zeros, negative values and NAs are all allowed. #' For more information see \strong{Details}. #' @param from a character string specifying the source and nature of \code{mdata}. @@ -45,9 +45,9 @@ #' the row and column names respectively. #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. Default \code{matrix.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.matrix} returns the created matrix which is written on the server-side. #' In addition, two validity messages are returned #' indicating whether the new matrix has been created in each data source and if so whether diff --git a/R/ds.matrixDet.R b/R/ds.matrixDet.R index 90ac58cf5..5fcd81a53 100644 --- a/R/ds.matrixDet.R +++ b/R/ds.matrixDet.R @@ -13,9 +13,9 @@ #' variable that is stored on the data servers. Default \code{matrixdet.newobj}. #' @param logarithm logical. If TRUE the logarithm of the modulus of the determinant #' is calculated. Default FALSE. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.matrixDet} returns the determinant of an existing matrix on the server-side. #' The created new object is stored on the server-side. #' Also, two validity messages are returned diff --git a/R/ds.matrixDet.report.R b/R/ds.matrixDet.report.R index 126e48a7a..be21aaa72 100644 --- a/R/ds.matrixDet.report.R +++ b/R/ds.matrixDet.report.R @@ -12,9 +12,9 @@ #' @param M1 a character string specifying the name of the matrix. #' @param logarithm logical. If TRUE the logarithm of the modulus of the determinant #' is calculated. Default FALSE. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.matrixDet.report} returns to the client-side #' the determinant of a matrix that is stored on the server-side. #' @author DataSHIELD Development Team diff --git a/R/ds.matrixDiag.R b/R/ds.matrixDiag.R index 9c9ec6e94..8c8ca877a 100644 --- a/R/ds.matrixDiag.R +++ b/R/ds.matrixDiag.R @@ -37,7 +37,7 @@ #' Server function called: \code{matrixDiagDS} #' @param x1 a character string specifying #' the name of a server-side scalar or vector. Also, a numeric value or vector -#' specified from the client-side can be speficied. This argument depends +#' specified from the client-side can be specified. This argument depends #' on the value specified in \code{aim}. #' For more information see \strong{Details}. #' @param aim a character string specifying the behaviour of the function. @@ -53,9 +53,9 @@ #' For more information see \strong{Details}. #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. Default \code{matrixdiag.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.matrixDiag} returns to the server-side the square matrix diagonal. #' Also, two validity messages are returned #' indicating whether the new object has been created in each data source and if so whether diff --git a/R/ds.matrixDimnames.R b/R/ds.matrixDimnames.R index d83762107..6f4a37ead 100644 --- a/R/ds.matrixDimnames.R +++ b/R/ds.matrixDimnames.R @@ -11,9 +11,9 @@ #' An empty list is treated as NULL. #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. Default \code{matrixdimnames.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.matrixDimnames} returns to the server-side #' the matrix with specified row and column names. #' Also, two validity messages are returned to the client-side diff --git a/R/ds.matrixInvert.R b/R/ds.matrixInvert.R index f7745ebb5..8cc3c447a 100644 --- a/R/ds.matrixInvert.R +++ b/R/ds.matrixInvert.R @@ -9,9 +9,9 @@ #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. #' Default \code{matrixinvert.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.matrixInvert} returns to the server-side the inverts square matrix. #' Also, two validity messages are returned to the client-side #' indicating whether the new object has been created in each data source and if so whether diff --git a/R/ds.matrixMult.R b/R/ds.matrixMult.R index 89ce42ab9..cf5349fe0 100644 --- a/R/ds.matrixMult.R +++ b/R/ds.matrixMult.R @@ -12,9 +12,9 @@ #' @param M2 a character string specifying the name of the second matrix. #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. Default \code{matrixmult.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.matrixMult} returns to the server-side #' the result of the two matrix multiplication. #' Also, two validity messages are returned to the client-side diff --git a/R/ds.matrixTranspose.R b/R/ds.matrixTranspose.R index 46556d46f..bbd73a1a8 100644 --- a/R/ds.matrixTranspose.R +++ b/R/ds.matrixTranspose.R @@ -12,9 +12,9 @@ #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. #' Default \code{matrixtranspose.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.matrixTranspose} returns to the server-side the transpose matrix. #' Also, two validity messages are returned to the client-side #' indicating whether the new object has been created in each data source and if so whether diff --git a/R/ds.mean.R b/R/ds.mean.R index e26ef636e..f23356d56 100644 --- a/R/ds.mean.R +++ b/R/ds.mean.R @@ -38,9 +38,9 @@ #' the number of valid (non-missing) observations will be saved on the data servers. #' Default FALSE. #' For more information see \strong{Details}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.mean} returns to the client-side a list including: \cr #' #' \code{Mean.by.Study}: estimated mean, \code{Nmissing} diff --git a/R/ds.meanByClass.R b/R/ds.meanByClass.R index 279bf8919..538ef8c81 100644 --- a/R/ds.meanByClass.R +++ b/R/ds.meanByClass.R @@ -22,9 +22,9 @@ #' \code{type} can be set as: \code{'combine'} or \code{'split'}. #' Default \code{'combine'}. #' For more information see \strong{Details}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.meanByClass} returns to the client-side a table or a list of tables that #' hold the length of the numeric variable(s) and their mean #' and standard deviation in each subgroup (subset). diff --git a/R/ds.meanSdGp.R b/R/ds.meanSdGp.R index 14a3fb911..1bd60936b 100644 --- a/R/ds.meanSdGp.R +++ b/R/ds.meanSdGp.R @@ -61,9 +61,9 @@ #' are undertaken to ensure that the input objects are defined in all studies and that the #' variables are of equivalent class in each study. #' Default is FALSE to save time. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.meanSdGp} returns to the client-side the mean, SD, Nvalid and SEM combined #' across studies and/or separately for each study, depending on the argument \code{type}. #' diff --git a/R/ds.merge.R b/R/ds.merge.R index ae7d9f001..4ac436fb5 100644 --- a/R/ds.merge.R +++ b/R/ds.merge.R @@ -45,9 +45,9 @@ #' For more information see \code{match} in native R \code{merge} function. #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. Default \code{merge.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.merge} returns the merged data frame that is written on the server-side. #' Also, two validity messages are returned to the client-side #' indicating whether the new object has been created in each data source and if so whether diff --git a/R/ds.message.R b/R/ds.message.R index 289c365be..eb88d8b95 100644 --- a/R/ds.message.R +++ b/R/ds.message.R @@ -19,9 +19,9 @@ #' Server function called: \code{messageDS} #' @param message.obj.name is a character string specifying the name of the list that #' contains the message. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.message} returns a list object from each study, #' containing the message that has been written by #' DataSHIELD into \code{$studysideMessage}. diff --git a/R/ds.metadata.R b/R/ds.metadata.R index 214bde6ab..58f615b16 100644 --- a/R/ds.metadata.R +++ b/R/ds.metadata.R @@ -6,9 +6,9 @@ #' Server function \code{metadataDS} is called examines the attributes associated with the variable #' which are non-disclosive. #' @param x a character string specifying the name of the object. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.metadata} returns to the client-side the metadata of associated to an object #' held at the server. #' @author Stuart Wheater, DataSHIELD Development Team diff --git a/R/ds.mice.R b/R/ds.mice.R index f5e84f727..bcb473a4a 100644 --- a/R/ds.mice.R +++ b/R/ds.mice.R @@ -38,9 +38,9 @@ #' that are stored on the data servers. Default \code{imputationSet}. For example, if m=5, and #' newobj_df="imputationSet", then five imputed dataframes are saved on the servers with names #' imputationSet.1, imputationSet.2, imputationSet.3, imputationSet.4, imputationSet.5. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return a list with three elements: the method, the predictorMatrix and the post. #' @author Demetris Avraam for DataSHIELD Development Team #' @export diff --git a/R/ds.names.R b/R/ds.names.R index d6dcd15f9..97ebbdfd7 100644 --- a/R/ds.names.R +++ b/R/ds.names.R @@ -6,16 +6,16 @@ #' for example, it only works to extract names that already exist, #' not to create new names for objects. The function is restricted to objects of #' type list, but this includes objects that have a primary class other than list but which -#' return TRUE to the native R function {is.list}. As an example this includes +#' return TRUE to the native R function \code{is.list}. As an example this includes #' the multi-component object created by fitting a generalized linear model #' using ds.glmSLMA. The resultant object saved on each server separately #' is formally of class "glm" and "ls" but responds TRUE to is.list(), #' @param xname a character string specifying the name of the list. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login that represent the particular data sources #' (studies) to be addressed by the function call. If the \code{datasources} #' argument is not specified the default set of connections will be used: -#' see \code{\link{datashield.connections_default}}. +#' see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.names} returns to the client-side the names #' of a list object stored on the server-side. #' @author Amadou Gaye, updated by Paul Burton for DataSHIELD development diff --git a/R/ds.ns.R b/R/ds.ns.R index cc11be6a8..e98643d4d 100644 --- a/R/ds.ns.R +++ b/R/ds.ns.R @@ -8,7 +8,7 @@ #' sequence of interior knots, and the natural boundary conditions. These enforce the constraint #' that the function is linear beyond the boundary knots, which can either be supplied or default #' to the extremes of the data. -#' A primary use is in modeling formula to directly specify a natural spline term in a model. +#' A primary use is in modelling formula to directly specify a natural spline term in a model. #' @param x the predictor variable. Missing values are allowed. #' @param df degrees of freedom. One can supply df rather than knots; ns() then chooses #' df - 1 - intercept knots at suitably chosen quantiles of x (which will ignore missing values). @@ -22,9 +22,9 @@ #' are supplied, the basis parameters do not depend on x. Data can extend beyond Boundary.knots. #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. Default \code{ns.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return A matrix of dimension length(x) * df where either df was supplied or if knots were #' supplied, df = length(knots) + 1 + intercept. Attributes are returned that correspond to the #' arguments to ns, and explicitly give the knots, Boundary.knots etc for use by predict.ns(). @@ -54,4 +54,4 @@ ds.ns <- function(x, df = NULL, knots = NULL, intercept = FALSE, Boundary.knots calltext <- call("nsDS", x, df, knots, intercept, Boundary.knots) DSI::datashield.assign(datasources, newobj, calltext) -} \ No newline at end of file +} diff --git a/R/ds.numNA.R b/R/ds.numNA.R index c1757c873..0bd75185a 100644 --- a/R/ds.numNA.R +++ b/R/ds.numNA.R @@ -7,9 +7,9 @@ #' #' Server function called: \code{numNaDS} #' @param x a character string specifying the name of the vector. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.numNA} returns to the client-side the number of missing values #' on a server-side vector. #' @author DataSHIELD Development Team diff --git a/R/ds.qlspline.R b/R/ds.qlspline.R index 0302ccc46..9839d9843 100644 --- a/R/ds.qlspline.R +++ b/R/ds.qlspline.R @@ -18,13 +18,13 @@ #' intervals along x or a vector of numbers in (0; 1) specifying the quantiles explicitly. #' @param na.rm logical, whether NA should be removed when calculating quantiles, passed #' to na.rm of quantile. Default set to TRUE -#' @param marginal logical, how to parametrize the spline, see Details +#' @param marginal logical, how to parametrise the spline, see Details #' @param names character, vector of names for constructed variables #' @param newobj a character string that provides the name for the output #' variable that is stored on the data servers. Default \code{qlspline.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return an object of class "lspline" and "matrix", which its name is specified by the #' \code{newobj} argument (or its default name "qlspline.newobj"), is assigned on the serverside. #' @author Demetris Avraam for DataSHIELD Development Team @@ -62,4 +62,4 @@ ds.qlspline <- function(x, q, na.rm = TRUE, marginal = FALSE, names = NULL, newo calltext <- call("qlsplineDS", x, q, na.rm, marginal, names) DSI::datashield.assign(datasources, newobj, calltext) -} \ No newline at end of file +} diff --git a/R/ds.quantileMean.R b/R/ds.quantileMean.R index 43e32696f..48aa705b4 100644 --- a/R/ds.quantileMean.R +++ b/R/ds.quantileMean.R @@ -15,9 +15,9 @@ #' @param type a character that represents the type of graph to display. #' This can be set as \code{'combine'} or \code{'split'}. #' For more information see \strong{Details}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.quantileMean} returns to the client-side the quantiles and statistical mean #' of a server-side numeric vector. #' @author DataSHIELD Development Team diff --git a/R/ds.rBinom.R b/R/ds.rBinom.R index be5d92205..2f39f8b10 100644 --- a/R/ds.rBinom.R +++ b/R/ds.rBinom.R @@ -41,9 +41,9 @@ #' @param return.full.seed.as.set logical, if TRUE will return the full random number seed #' in each data source (a numeric vector of length 626). If FALSE it will only return the #' trigger seed value you have provided. Default is FALSE. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.rBinom} returns random number vectors #' with a Binomial distribution for each study, #' taking into account the values specified in each parameter of the function. @@ -216,10 +216,11 @@ single.integer.seed<-c(single.integer.seed,seed.as.integer.study.specific) if(seed.as.text=="NULL"){ cat("NO SEED SET IN STUDY",study.id,"\n\n") -} +} else { calltext <- paste0("setSeedDS(", seed.as.text, ")") ssDS.obj[[study.id]] <- DSI::datashield.aggregate(datasources[study.id], as.symbol(calltext)) } +} cat("\n\n") diff --git a/R/ds.rNorm.R b/R/ds.rNorm.R index 0f53ab92c..6100c8505 100644 --- a/R/ds.rNorm.R +++ b/R/ds.rNorm.R @@ -52,9 +52,9 @@ #' Default is FALSE. #' @param force.output.to.k.decimal.places an integer vector that #' forces the output random numbers vector to have k decimals. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.rNorm} returns random number vectors with a normal distribution for each #' study, taking into account the values specified in each parameter of the function. #' The output vector is written to the server-side. diff --git a/R/ds.rPois.R b/R/ds.rPois.R index 5fc262ff6..2d2c7f019 100644 --- a/R/ds.rPois.R +++ b/R/ds.rPois.R @@ -38,9 +38,9 @@ #' random number seed in each data source (a numeric vector of length 626). If #' FALSE it will only return the trigger seed value you have provided. #' Default is FALSE. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.rPois} returns random number vectors with a Poisson distribution for each study, #' taking into account the values specified in each parameter of the function. #' The created vectors are stored in the server-side. diff --git a/R/ds.rUnif.R b/R/ds.rUnif.R index 426914885..d98fa28f0 100644 --- a/R/ds.rUnif.R +++ b/R/ds.rUnif.R @@ -58,9 +58,9 @@ #' an integer vector that forces the output random #' numbers vector to have k decimals. #' -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.Unif} returns random number vectors with a uniform distribution for each study, #' taking into account the values specified in each parameter of the function. #' The created vectors are stored in the server-side. If requested, it also returned to the @@ -234,9 +234,10 @@ single.integer.seed<-c(single.integer.seed,seed.as.integer.study.specific) if(seed.as.text=="NULL"){ cat("NO SEED SET IN STUDY",study.id,"\n") -} +} else { calltext <- paste0("setSeedDS(", seed.as.text, ")") ssDS.obj[[study.id]] <- DSI::datashield.aggregate(datasources[study.id], as.symbol(calltext)) +} } diff --git a/R/ds.ranksSecure.R b/R/ds.ranksSecure.R index 5a3affcaf..1d9d98ed9 100644 --- a/R/ds.ranksSecure.R +++ b/R/ds.ranksSecure.R @@ -1,7 +1,7 @@ # ds.ranksSecure #' @title Secure ranking of a vector across all sources #' @description Securely generate the ranks of a numeric vector and estimate -#' true qlobal quantiles across all data sources simultaneously +#' true global quantiles across all data sources simultaneously #' @details ds.ranksSecure is a clientside function which calls a series of #' other clientside and serverside functions to securely generate the global #' ranks of a numeric vector "V2BR" (vector to be ranked) @@ -43,7 +43,7 @@ #' of the clusters of values that are being ranked such that some values are #' treated as being missing and the processing stops, then setting #' generate.quantiles to FALSE allows the generation of ranks to complete so -#' they can then be used for non-parameteric analysis, even if the key values +#' they can then be used for non-parametric analysis, even if the key values #' cannot be estimated. A real example of an unusual configuration was in a #' reasonably large dataset of survival times, where a substantial proportion #' of survival profiles were censored at precisely 10 years. This meant that diff --git a/R/ds.rbind.R b/R/ds.rbind.R index 85bca1d57..47e9165c2 100644 --- a/R/ds.rbind.R +++ b/R/ds.rbind.R @@ -22,9 +22,9 @@ #' specifies column names of the output object. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Defaults \code{rbind.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @param notify.of.progress specifies if console output should be produced to indicate #' progress. Default FALSE. #' @return \code{ds.rbind} returns a matrix combining the rows of the diff --git a/R/ds.reShape.R b/R/ds.reShape.R index 2a4840e99..f2214f559 100644 --- a/R/ds.reShape.R +++ b/R/ds.reShape.R @@ -29,9 +29,9 @@ #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. #' Default \code{reshape.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.reShape} returns to the server-side a reshaped data frame #' converted from 'long' to 'wide' format or from 'wide' to long' format. #' Also, two validity messages are returned to the client-side diff --git a/R/ds.recodeLevels.R b/R/ds.recodeLevels.R index 671401e48..a22d25b31 100644 --- a/R/ds.recodeLevels.R +++ b/R/ds.recodeLevels.R @@ -13,9 +13,9 @@ #' to the current number of levels. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{recodelevels.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.recodeLevels} returns to the server-side a variable of type factor #' with the replaces levels. #' @author DataSHIELD Development Team diff --git a/R/ds.recodeValues.R b/R/ds.recodeValues.R index 9643d33e0..184ccea2b 100644 --- a/R/ds.recodeValues.R +++ b/R/ds.recodeValues.R @@ -4,7 +4,7 @@ #' @details This function recodes individual values with new individual values. This can #' apply to numeric and character values, factor levels and NAs. One particular use of #' \code{ds.recodeValues} is to convert NAs to an explicit value. This value is specified -#' in the argument \code{missing}. If tthe user want to recode only missing values, then it +#' in the argument \code{missing}. If the user want to recode only missing values, then it #' should also specify an identical vector of values in both arguments \code{values2replace.vector} #' and \code{new.values.vector} (see Example 2 below). #' Server function called: \code{recodeValuesDS} @@ -19,9 +19,9 @@ #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. #' Default \code{recodevalues.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @param notify.of.progress logical. If TRUE console output should be produced to indicate #' progress. Default FALSE. #' @return Assigns to each server a new variable with the recoded values. diff --git a/R/ds.rep.R b/R/ds.rep.R index 0e5261493..2d2ce9515 100644 --- a/R/ds.rep.R +++ b/R/ds.rep.R @@ -23,9 +23,9 @@ #' the \code{x1} is a character. #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{seq.vect}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.rep} returns in the server-side a vector with the specified repetitive sequence. #' Also, two validity messages are returned to the client-side #' the name of \code{newobj} that has been created diff --git a/R/ds.replaceNA.R b/R/ds.replaceNA.R index 3345b7dee..28a51adb1 100644 --- a/R/ds.replaceNA.R +++ b/R/ds.replaceNA.R @@ -19,9 +19,9 @@ #' The length of the list or vector must be equal to the number of servers (studies). #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{replacena.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.replaceNA} returns to the server-side a new vector or table structure #' with the missing values replaced by the specified values. #' The class of the vector is the same as the initial vector. diff --git a/R/ds.rm.R b/R/ds.rm.R index 77b693db1..e977afe5c 100644 --- a/R/ds.rm.R +++ b/R/ds.rm.R @@ -14,9 +14,9 @@ #' #' Server function called: \code{rmDS} #' @param x.names a character string specifying the objects to be deleted. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return The \code{ds.rm} function deletes from the server-side #' the specified object. If this #' is successful the message \code{"Object(s) '' was deleted."} is returned @@ -94,4 +94,4 @@ ds.rm<-function(x.names=NULL, datasources=NULL){ return(output) } -#ds.rm \ No newline at end of file +#ds.rm diff --git a/R/ds.rowColCalc.R b/R/ds.rowColCalc.R index b63190bf9..d531cce47 100644 --- a/R/ds.rowColCalc.R +++ b/R/ds.rowColCalc.R @@ -14,9 +14,9 @@ #' \code{"rowSums"}, \code{"colSums"}, \code{"rowMeans"} or \code{"colMeans"}. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Default \code{rowcolcalc.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.rowColCalc} returns to the server-side rows and columns sums and means. #' @author DataSHIELD Development Team #' @examples diff --git a/R/ds.sample.R b/R/ds.sample.R index 45a3d6afa..08c1b04c1 100644 --- a/R/ds.sample.R +++ b/R/ds.sample.R @@ -4,8 +4,8 @@ #' on the serverside #' or - as a special case - randomly permutes a vector, dataframe or matrix. #' @details Clientside function ds.sample calls serverside -#' assign function sampleDS. Based on the native R function {sample()} but deals -#' slightly differently with data.frames and matrices. Specifically the {sample()} +#' assign function sampleDS. Based on the native R function \code{sample()} but deals +#' slightly differently with data.frames and matrices. Specifically the \code{sample()} #' function in R identifies the length of an object and then samples n components #' of that length. But length(data.frame) in native R returns the number of columns #' not the number of rows. So if you have a data.frame with 71 rows and 10 columns, @@ -45,7 +45,7 @@ #' case with the default name 'newobj.sample) using ds.dataFrameSort with the #' 'sampling.order' vector as the sort key, the output object is rendered #' equivalent to PRWa but with the rows randomly permuted (so the column reflecting -#' the vector 'sample.order' now runs from 1:length of obejct, while the +#' the vector 'sample.order' now runs from 1:length of object, while the #' column reflecting 'ID.seq' denoting the original order is now randomly ordered. #' If you need to return to the original order you can simply us ds.dataFrameSort #' again using the column reflecting 'ID.seq' as the sort key: @@ -63,11 +63,11 @@ #' that is a randomly permuted sample of the vector 1:923, or (if [replace] #' = FALSE, a full random permutation of that same vector. For further details #' of using ds.sample with x set as an integer/numeric please see help for -#' the {sample} function in native R. But if x is set as a character string +#' the \code{sample} function in native R. But if x is set as a character string #' denoting a vector, matrix or data.frame on the serverside, please note -#' that although {ds.sample} effectively calls {sample} on the serverside -#' it behaves somewhat differently to {sample} - for the reasons identified -#' at the top of 'details' and so help for {sample} should be used as a guide +#' that although \code{ds.sample} effectively calls \code{sample} on the serverside +#' it behaves somewhat differently to \code{sample} - for the reasons identified +#' at the top of 'details' and so help for \code{sample} should be used as a guide #' only. #' @param size a numeric/integer scalar indicating the size of the sample to #' be drawn. If the [x] argument is a vector, matrix or data.frame on the @@ -102,18 +102,18 @@ #' @param replace a Boolean indicator (TRUE or FALSE) specifying whether the #' sample should be drawn with or without replacement. Default is FALSE so #' the sample is drawn without replacement. For further details see -#' help for {sample} in native R. +#' help for \code{sample} in native R. #' @param prob a character string containing the name of a numeric vector #' of probability weights on the serverside that is associated with each of the #' elements of the vector to be sampled enabling the drawing of a sample #' with some elements given higher probability of being drawn than others. -#' For further details see help for {sample} in native R. +#' For further details see help for \code{sample} in native R. #' @param newobj This a character string providing a name for the output #' data.frame which defaults to 'newobj.sample' if no name is specified. #' @param datasources specifies the particular opal object(s) to use. If the #' argument is not specified the default set of opals will be used. The default opals #' are called default.opals and the default can be set using the function -#' {ds.setDefaultOpals}. If the is to be specified, it should be set without +#' \code{ds.setDefaultOpals}. If the is to be specified, it should be set without #' inverted commas: e.g. datasources=opals.em or datasources=default.opals. If you wish to #' apply the function solely to e.g. the second opal server in a set of three, #' the argument can be specified as: e.g. datasources=opals.em[2]. diff --git a/R/ds.scatterPlot.R b/R/ds.scatterPlot.R index 81828648a..6c2c78058 100644 --- a/R/ds.scatterPlot.R +++ b/R/ds.scatterPlot.R @@ -6,7 +6,7 @@ #' permitted in DataSHIELD, this function allows the user to plot non-disclosive scatter plots. #' #' If the argument \code{method} is set to \code{'deterministic'}, the server-side function searches -#' for the \code{k-1} nearest neighbors of each single data point and calculates the centroid +#' for the \code{k-1} nearest neighbours of each single data point and calculates the centroid #' of such \code{k} points. #' The proximity is defined by the minimum Euclidean distances of z-score transformed data. #' @@ -52,7 +52,7 @@ #' This argument can be set as \code{'deteministic'} or \code{'probabilistic'}. #' Default \code{'deteministic'}. #' For more information see \strong{Details}. -#' @param k the number of the nearest neighbors for which their centroid is calculated. +#' @param k the number of the nearest neighbours for which their centroid is calculated. #' Default 3. #' For more information see \strong{Details}. #' @param noise the percentage of the initial variance that is used as the variance of the embedded @@ -64,9 +64,9 @@ #' For more information see \strong{Details}. #' @param return.coords a logical. If TRUE the coordinates of the anonymised data points are return #' to the Console. Default value is FALSE. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.scatterPlot} returns to the client-side one or more scatter #' plots depending on the argument \code{type}. #' @author DataSHIELD Development Team diff --git a/R/ds.seq.R b/R/ds.seq.R index 1f0b722ac..d21522a00 100644 --- a/R/ds.seq.R +++ b/R/ds.seq.R @@ -55,9 +55,9 @@ #' For more information see \strong{Details}. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Default \code{seq.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.seq} returns to the server-side the generated sequence. #' Also, two validity messages are returned to the client-side #' indicating whether the new object has been created in each data source and if so whether diff --git a/R/ds.setSeed.R b/R/ds.setSeed.R index 20eabd5ba..ee84a1740 100644 --- a/R/ds.setSeed.R +++ b/R/ds.setSeed.R @@ -31,9 +31,9 @@ #' Server function called: \code{setSeedDS} #' @param seed.as.integer a numeric value or a NULL that primes the random seed #' in each data source. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return Sets the values of the vector of integers of length 626 known as #' \code{.Random.seed} on each data source that is the true current state of the #' random seed in each source. It also returns the value of the trigger diff --git a/R/ds.skewness.R b/R/ds.skewness.R index ed3b05d9b..0ef8d93d3 100644 --- a/R/ds.skewness.R +++ b/R/ds.skewness.R @@ -31,9 +31,9 @@ #' \code{type} can be set as: \code{'combine'}, \code{'split'} or \code{'both'}. For more information #' see \strong{Details}. #' The default value is set to \code{'both'}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.skewness} returns a matrix showing the skewness of the input numeric variable, #' the number of valid observations and the validity message. #' @author Demetris Avraam, for DataSHIELD Development Team diff --git a/R/ds.sqrt.R b/R/ds.sqrt.R index 22a039374..e78011def 100644 --- a/R/ds.sqrt.R +++ b/R/ds.sqrt.R @@ -10,9 +10,9 @@ #' @param x a character string providing the name of a numeric or an integer vector. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Default name is set to \code{sqrt.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified the default set of connections will be -#' used: see \code{\link{datashield.connections_default}}. +#' used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.sqrt} assigns a vector for each study that includes the square root values of #' the input numeric or integer vector specified in the argument \code{x}. The created vectors #' are stored in the servers. @@ -59,7 +59,7 @@ #' # and get their square roots #' ds.make(toAssign='rep((1:10)^2, times=10)', newobj='squares.vector', datasources=connections) #' ds.sqrt(x='squares.vector', newobj='sqrt.vector', datasources=connections) -#' # check the behavior of that operation by comparing the tables of squares.vector and sqrt.vector + # check the behaviour of that operation by comparing the tables of squares.vector and sqrt.vector #' ds.table(rvar='squares.vector')$output.list$TABLE_rvar.by.study_counts #' ds.table(rvar='sqrt.vector')$output.list$TABLE_rvar.by.study_counts #' diff --git a/R/ds.subset.R b/R/ds.subset.R index 6f21bd08e..03b2e7344 100644 --- a/R/ds.subset.R +++ b/R/ds.subset.R @@ -1,10 +1,10 @@ #' #' @title Generates a valid subset of a table or a vector #' @description The function uses the R classical subsetting with squared brackets '[]' and allows also to -#' subset using a logical oprator and a threshold. The object to subset from must be a vector (factor, numeric -#' or charcater) or a table (data.frame or matrix). +#' subset using a logical operator and a threshold. The object to subset from must be a vector (factor, numeric +#' or character) or a table (data.frame or matrix). #' @details (1) If the input data is a table the user specifies the rows and/or columns to include in the subset; the columns can be -#' refered to by their names. Table subsetting can also be done using the name of a variable and a threshold (see example 3). +#' referred to by their names. Table subsetting can also be done using the name of a variable and a threshold (see example 3). #' (2) If the input data is a vector and the parameters 'rows', 'logical' and 'threshold' are all provided the last two are ignored #' (i.e. 'rows' has precedence over the other two parameters then). #' IMPORTANT NOTE: If the requested subset is not valid (i.e. contains less than the allowed number of observations) all the values are @@ -19,8 +19,8 @@ #' operator. This parameter is ignored if the input data is not a vector. #' @param threshold a numeric, the threshold to use in conjunction with the logical parameter. This parameter is ignored #' if the input data is not a vector. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @return no data are return to the user, the generated subset dataframe is stored on the server side. #' @author Gaye, A. #' @seealso \link{ds.subsetByClass} to subset by the classes of factor vector(s). diff --git a/R/ds.subsetByClass.R b/R/ds.subsetByClass.R index 2a2ab29a8..b3b14ec27 100644 --- a/R/ds.subsetByClass.R +++ b/R/ds.subsetByClass.R @@ -11,8 +11,8 @@ #' @param variables a vector of string characters, the name(s) of the variables to subset by. #' @param subsets the name of the output object, a list that holds the subset objects. If set to NULL #' the default name of this list is 'subClasses'. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @return a no data are return to the user but messages are printed out. #' @author Gaye, A. #' @seealso \link{ds.meanByClass} to compute mean and standard deviation across categories of a factor vectors. @@ -107,7 +107,7 @@ ds.subsetByClass <- function(x=NULL, subsets="subClasses", variables=NULL, datas } # call the server side function that does the job - # get the indices of the columns refered to by their names in the arguments + # get the indices of the columns referred to by their names in the arguments if(is.null(variables)){ cally <- paste0("subsetByClassDS('", x, "')") }else{ diff --git a/R/ds.summary.R b/R/ds.summary.R index b4d3061ce..2d86287b1 100644 --- a/R/ds.summary.R +++ b/R/ds.summary.R @@ -9,9 +9,9 @@ #' #' server functions called: \code{isValidDS}, \code{dimDS} and \code{colnamesDS} #' @param x a character string specifying the name of a numeric or factor variable. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.summary} returns to the client-side the class and #' size of the server-side object. #' Also other information is returned depending on the class of the object. diff --git a/R/ds.table.R b/R/ds.table.R index 5163e7168..780f9f862 100644 --- a/R/ds.table.R +++ b/R/ds.table.R @@ -36,7 +36,7 @@ #' table returned to the clientside, it means that the true count #' in that same cell is held as the 13th element of the true count #' vector saved on the serverside. This means that a data analyst -#' can still make use of the counts from a call to the {ds.table} +#' can still make use of the counts from a call to the \code{ds.table} #' function to drive their ongoing analysis even when one or #' more non-zero cell counts fall below the specified threshold #' for potential disclosure risk. @@ -55,7 +55,7 @@ #' #' In creating a 3-dimensional table the #' ('separate tables') argument identifies the variable that -#' indexes the set of two dimensional tables in the output {ds.table}. +#' indexes the set of two dimensional tables in the output \code{ds.table}. #' #' As a minor technicality, it should be noted that #' if a 1-dimensional table is required, one only need specify a value @@ -67,7 +67,7 @@ #' for one dimensional tables are actually two dimensional: with #' rows defined by and with one column for each of the studies. #' -#' The output list generated by {ds.table} contains tables based on counts +#' The output list generated by \code{ds.table} contains tables based on counts #' named "table.name_counts" and other tables reporting corresponding #' column proportions ("table.name_col.props") or row proportions #' ("table.name_row.props"). In one dimensional tables in the output the @@ -102,14 +102,14 @@ #' @param report.chisq.tests if TRUE, chi-squared tests #' are applied to every 2 dimensional table in the output and reported #' as "chisq.test_table.name". Default = FALSE. -#' @param exclude this argument is passed through to the {table} function in -#' native R which is called by {tableDS}. The help for {table} in native R +#' @param exclude this argument is passed through to the \code{table} function in +#' native R which is called by \code{tableDS}. The help for \code{table} in native R #' indicates that 'exclude' specifies any levels that should be deleted for #' all factors in rvar, cvar or stvar. If the argument #' does not include NA and if the argument is not specified, -#' it implies = "always" in DataSHIELD. If you read the help for {table} in native R +#' it implies = "always" in DataSHIELD. If you read the help for \code{table} in native R #' including the 'details' and the 'examples' (particularly 'd.patho') you -#' will see that the response of {table} to different combinations of the +#' will see that the response of \code{table} to different combinations of the #' and arguments can be non-intuitive. This is particularly #' so if there is more than one type of missing (e.g. missing by observation #' as well as missing because of an NaN response to a mathematical @@ -118,24 +118,24 @@ #' you cannot interpret the output that has been approached #' you might try: (1) making sure that the variable producing the strange results #' is of class factor rather than integer or numeric - although integers and -#' numerics are coerced to factors by {ds.table} they can occasionally behave less +#' numerics are coerced to factors by \code{ds.table} they can occasionally behave less #' well when the NA setting is complex; (2) specify both an argument #' e.g. exclude = c("NaN","3") and a argument e.g. useNA= "no"; #' (3) if you are excluding multiple levels e.g exclude = c("NA","3") #' then you can reduce this to one e.g. exclude = c("NA") and then remove #' the 3s by deleting rows of data, or converting the 3s to a different value. -#' @param useNA this argument is passed through to the {table} function in -#' native R which is called by {tableDS}. In DataSHIELD, this argument can take +#' @param useNA this argument is passed through to the \code{table} function in +#' native R which is called by \code{tableDS}. In DataSHIELD, this argument can take #' two values: "no" or "always" which indicate whether to include NA values in the table. #' For further information, please see the help for the argument (above) -#' and/or the help for the {table} function in native R. Default value is set to "always". +#' and/or the help for the \code{table} function in native R. Default value is set to "always". #' @param suppress.chisq.warnings if set to TRUE, the default warnings are -#' suppressed that would otherwise be produced by the {table} function in +#' suppressed that would otherwise be produced by the \code{table} function in #' native R whenever an expected cell count in one or more cells is less than 5. #' Default is FALSE. Further details can be found under 'details' and the #' help provided for the argument (above). #' @param table.assign is a Boolean argument set by default to FALSE. If it is -#' FALSE the {ds.table} function acts as a standard aggregate function - +#' FALSE the \code{ds.table} function acts as a standard aggregate function - #' it returns the table that is specified in its call to the clientside #' where it can be visualised and worked with by the analyst. But if #' is TRUE, the same table object is also written to @@ -149,8 +149,8 @@ #' If no explicit name for the table object is specified, but #' is nevertheless TRUE, the name for the serverside table object defaults #' to \code{table.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' If the is to be specified, it should be set without #' inverted commas: e.g. datasources=connections.em or datasources=default.connections. If you wish to #' apply the function solely to e.g. the second connection server in a set of three, @@ -161,7 +161,7 @@ #' a positive integer represented as a character string: e.g. "173". This #' the has the effect of the standard value of 'nfilter.tab' (often 1, 3, 5 or 10 #' depending what value the data custodian has selected for this particular -#' data set), to this new value (here, 173). CRUCIALLY, the {ds.table} function +#' data set), to this new value (here, 173). CRUCIALLY, the \code{ds.table} function #' only allows the standard value to be INCREASED. So if the standard value has #' been set as 5 (as one of the R options set in the serverside connection), "6" and #' "4981" would be allowable values for the argument but "4" or @@ -174,9 +174,9 @@ #' there is an error for some other reason). #' #' The clientside output from -#' {ds.table} includes error messages that identify when the creation of a +#' \code{ds.table} includes error messages that identify when the creation of a #' table from a particular study has failed and why. If table.assign=TRUE, -#' {ds.table} also writes the requested table as an object named by +#' \code{ds.table} also writes the requested table as an object named by #' the argument or set to 'newObj' by default. #' #' Further information diff --git a/R/ds.table1D.R b/R/ds.table1D.R index 76e131994..8a13afdc7 100644 --- a/R/ds.table1D.R +++ b/R/ds.table1D.R @@ -10,14 +10,14 @@ #' count. This way it is possible the know the total count and combine total counts across data sources but it #' is not possible to identify the cell(s) that had the small counts which render the table invalid. #' @param x a character, the name of a numerical vector with discrete values - usually a factor. -#' @param type a character which represent the type of table to ouput: pooled table or one table for each +#' @param type a character which represent the type of table to output: pooled table or one table for each #' data source. If \code{type} is set to 'combine', a pooled 1-dimensional table is returned; if If \code{type} #' is set to 'split' a 1-dimensional table is returned for each data source. -#' @param warningMessage a boolean, if set to TRUE (deafult) a warning is displayed if any returned table is invalid. Warning +#' @param warningMessage a boolean, if set to TRUE (default) a warning is displayed if any returned table is invalid. Warning #' messages are suppressed if this parameter is set to FALSE. However the analyst can still view 'validity' information #' which are stored in the output object 'validity' - see the list of output objects. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @return A list object containing the following items: #' \item{counts}{ table(s) that hold counts for each level/category. If some cells counts are invalid (see 'Details' #' section) only the total (outer) cell counts are displayed in the returned individual study tables or in the pooled diff --git a/R/ds.table2D.R b/R/ds.table2D.R index c8b882ac2..0dee00abd 100644 --- a/R/ds.table2D.R +++ b/R/ds.table2D.R @@ -10,15 +10,15 @@ #' identify the cell(s) that had the small counts which render the table invalid. #' @param x a character, the name of a numerical vector with discrete values - usually a factor. #' @param y a character, the name of a numerical vector with discrete values - usually a factor. -#' @param type a character which represent the type of table to ouput: pooled table or one table for each +#' @param type a character which represent the type of table to output: pooled table or one table for each #' data source or both. If \code{type} is set to 'combine', a pooled 2-dimensional table is returned; If \code{type} #' is set to 'split' a 2-dimensional table is returned for each data source. If \code{type} is set to 'both' (default) #' a pooled 2-dimensional table plus a 2-dimensional table for each data source are returned. -#' @param warningMessage a boolean, if set to TRUE (deafult) a warning is displayed if any returned table is invalid. Warning +#' @param warningMessage a boolean, if set to TRUE (default) a warning is displayed if any returned table is invalid. Warning #' messages are suppressed if this parameter is set to FALSE. However the analyst can still view 'validity' information #' which are stored in the output object 'validity' - see the list of output objects. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @return A list object containing the following items: #' \item{colPercent}{table(s) that hold column percentages for each level/category. Inner cells are reported as #' missing if one or more cells are 'invalid'.} diff --git a/R/ds.tapply.R b/R/ds.tapply.R index 457ecfe4d..e9805e1d5 100644 --- a/R/ds.tapply.R +++ b/R/ds.tapply.R @@ -68,9 +68,9 @@ #' \code{"N"} (or \code{"length"}), \code{"mean"},\code{"sd"}, \code{"sum"}, #' or \code{"quantile"}. #' For more information see \strong{Details}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.tapply} returns to the client-side an array of the summarized values. #' It has the same number of dimensions as INDEX. #' @examples diff --git a/R/ds.tapply.assign.R b/R/ds.tapply.assign.R index 6f506532f..be7b74081 100644 --- a/R/ds.tapply.assign.R +++ b/R/ds.tapply.assign.R @@ -70,9 +70,9 @@ #' For more information see \strong{Details}. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Default \code{tapply.assign.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.tapply.assign} returns an array of the summarized values. #' The array is written to the server-side. It has the same number of #' dimensions as INDEX. diff --git a/R/ds.testObjExists.R b/R/ds.testObjExists.R index bf84065d7..5b7e946ae 100644 --- a/R/ds.testObjExists.R +++ b/R/ds.testObjExists.R @@ -8,9 +8,9 @@ #' #' Server function called: \code{testObjExistsDS} #' @param test.obj.name a character string specifying the name of the object to search. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.testObjExists} returns a list of messages specifying that the object exists #' on the server-side. #' If the specified object does not exist in at least one diff --git a/R/ds.unList.R b/R/ds.unList.R index 773246fd1..fa14a4f24 100644 --- a/R/ds.unList.R +++ b/R/ds.unList.R @@ -18,9 +18,9 @@ #' @param x.name a character string specifying the name of the input object to be unlisted. #' @param newobj a character string that provides the name for the output variable #' that is stored on the data servers. Default \code{unlist.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.unList} returns to the server-side the unlist object. #' Also, two validity messages are returned to the client-side #' indicating whether the new object has been created in each data source and if so whether diff --git a/R/ds.unique.R b/R/ds.unique.R index 29797deeb..8f2717054 100644 --- a/R/ds.unique.R +++ b/R/ds.unique.R @@ -4,12 +4,12 @@ #' @details Will create a vector or list which has no duplicate values. #' #' Server function called: \code{uniqueDS} -#' @param x.name a character string providing the name of the varable, in the server, to perform \code{unique} upon +#' @param x.name a character string providing the name of the variable, in the server, to perform \code{unique} upon #' @param newobj a character string that provides the name for the output object #' that is stored on the data servers. Default \code{unique.newobj}. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.unique} returns the vector of unique R objects which are written to the server-side. #' @examples #' \dontrun{ diff --git a/R/ds.var.R b/R/ds.var.R index 0a74d4ebd..178dc4436 100644 --- a/R/ds.var.R +++ b/R/ds.var.R @@ -23,9 +23,9 @@ #' components will be undertaken. Default is FALSE to save time. #' It is suggested that checks #' should only be undertaken once the function call has failed. -#' @param datasources a list of \code{\link{DSConnection-class}} +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @return \code{ds.var} returns to the client-side a list including:\cr #' #' \code{Variance.by.Study}: estimated variance, \code{Nmissing} diff --git a/R/ds.vectorCalc.R b/R/ds.vectorCalc.R index bc090a87d..f8918aab4 100644 --- a/R/ds.vectorCalc.R +++ b/R/ds.vectorCalc.R @@ -3,18 +3,18 @@ #' @description Carries out a row-wise operation on two or more vector. The function calls no #' server side function; it uses the R operation symbols built in DataSHIELD. #' @details In DataSHIELD it is possible to perform an operation on vectors by just using the relevant -#' R symbols (e.g. '+' for addtion, '*' for multiplication, '-' for substraction and '/' for division). +#' R symbols (e.g. '+' for addition, '*' for multiplication, '-' for subtraction and '/' for division). #' This might however be inconvenient if the number of vectors to include in the operation is large. #' This function takes the names of two or more vectors and performs the desired operation which could be -#' an addition, a multiplication, a substraction or a division. If one or more vectors have a missing value +#' an addition, a multiplication, a subtraction or a division. If one or more vectors have a missing value #' at any one entry (i.e. observation), the operation returns a missing value ('NA') for that entry; the output #' vectors has, hence the same length as the input vectors. #' @param x a vector of characters, the names of the vectors to include in the operation. #' @param calc a character, a symbol that indicates the mathematical operation to carry out: #' '+' for addition, '/' for division, *' for multiplication and '-' for subtraction. #' @param newobj the name of the output object. By default the name is 'vectorcalc.newobj'. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @return no data are returned to user, the output vector is stored on the server side. #' @author Gaye, A. #' @export diff --git a/R/getPooledMean.R b/R/getPooledMean.R index ded7fae8a..dba45bf30 100644 --- a/R/getPooledMean.R +++ b/R/getPooledMean.R @@ -4,8 +4,8 @@ #' @details This function is called to avoid calling the client function 'ds.mean' #' which may stop the process due to some checks not required when computing a mean inside #' a function. -#' @param dtsources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param dtsources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @param x a character, the name of a numeric vector #' @keywords internal #' @return a pooled mean diff --git a/R/getPooledVar.R b/R/getPooledVar.R index 9fc9bbcd2..0738d4bf5 100644 --- a/R/getPooledVar.R +++ b/R/getPooledVar.R @@ -4,8 +4,8 @@ #' @details This function is called to avoid calling the client function 'ds.var' #' which may stop the process due to some checks not required when computing a mean inside #' a function. -#' @param dtsources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param dtsources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @param x a character, the name of a numeric vector #' @keywords internal #' @return a pooled variance diff --git a/R/glmChecks.R b/R/glmChecks.R index 535764484..6dcfe2ee7 100644 --- a/R/glmChecks.R +++ b/R/glmChecks.R @@ -1,19 +1,19 @@ #' #' @title Checks if the elements in the glm model have the right characteristics #' @description This is an internal function required by the client function \code{ds.glm} -#' to verify all the variables and ensure the process does not halt inadvertanly. +#' to verify all the variables and ensure the process does not halt inadvertently #' @details the variables are checked to ensure they are defined, not empty (i.e. are not missing -#' at complete) and evantually (if 'offset' or 'weights') are of 'numeric' with non negative value +#' at complete) and eventually (if 'offset' or 'weights') are of 'numeric' with non negative value #' (if 'weights'). #' @param formula a character, a regression formula given as a string character #' @param data a character, the name of an optional data frame containing the variables in #' in the \code{formula}. -#' @param offset null or a numreric vector that can be used to specify an a priori known component to be +#' @param offset null or a numeric vector that can be used to specify an a priori known component to be #' included in the linear predictor during fitting. #' @param weights a character, the name of an optional vector of 'prior weights' to be used in the fitting #' process. Should be NULL or a numeric vector. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @keywords internal #' @return an integer 0 if check was passed and 1 if failed #' @author Gaye, A. diff --git a/R/isAssigned.R b/R/isAssigned.R index c0f5150f5..0460cb779 100644 --- a/R/isAssigned.R +++ b/R/isAssigned.R @@ -4,8 +4,8 @@ #' @details After calling an assign function it is important #' to know whether or not the action has been completed by #' checking if the output actually exists on the server side. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @param newobj a character, the name the object to look for. #' @keywords internal #' @return nothing is return but the process is stopped if diff --git a/R/isDefined.R b/R/isDefined.R index a02062578..621b809a8 100644 --- a/R/isDefined.R +++ b/R/isDefined.R @@ -3,9 +3,9 @@ #' @description This is an internal function. #' @details In DataSHIELD an object included in analysis must be defined (i.e. exists) #' in all the studies. If not the process should halt. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. #' If the \code{datasources} argument is not specified, the default set of connections will be -#' used: see \code{\link{datashield.connections_default}}. +#' used: see \code{\link[DSI]{datashield.connections_default}}. #' @param obj a character vector, the name of the object(s) to look for. #' @param error.message a Boolean which specifies if the function should stop and return #' an error message when the input object is not defined in one or more studies or to diff --git a/R/logical2int.R b/R/logical2int.R index b2a6d20bb..575960c41 100644 --- a/R/logical2int.R +++ b/R/logical2int.R @@ -1,7 +1,7 @@ #' #' @title Turns a logical operator into an integer #' @description This is an internal function. -#' @details This function is called to turn a logical oprator given as a +#' @details This function is called to turn a logical operator given as a #' character into an integer: '>' is turned into 1, '>=' into 2, '<' into 3, #' '<=' into 4, '==' into 5 and '!=' into 6. #' @param obj a character, the logical parameter to turn into an integer diff --git a/R/meanByClassHelper0a.R b/R/meanByClassHelper0a.R index d94898ff3..c1c51c9b6 100644 --- a/R/meanByClassHelper0a.R +++ b/R/meanByClassHelper0a.R @@ -7,9 +7,9 @@ #' @param b a character, the name of a factor vector. #' @param type a character which represents the type of analysis to carry out. If \code{type} is set to #' 'combine', a pooled table of results is generated. If \code{type} is set to 'split', a table of results -#' is genrated for each study. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' is generated for each study. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @return a table or a list of tables that hold the length of the numeric variable and its mean #' and standard deviation in each subgroup (subset). #' @keywords internal diff --git a/R/meanByClassHelper0b.R b/R/meanByClassHelper0b.R index 21b66d440..89c1c17d6 100644 --- a/R/meanByClassHelper0b.R +++ b/R/meanByClassHelper0b.R @@ -2,15 +2,15 @@ #' @title Runs the computation if variables are within a table structure #' @description This is an internal function. #' @details This function is called by the function 'ds.meanByClass' to produce the final tables -#' if the user soecify a table structure. +#' if the user specify a table structure. #' @param x a character, the name of the dataset to get the subsets from. #' @param outvar a character vector, the names of the continuous variables #' @param covar a character vector, the names of up to 3 categorical variables #' @param type a character which represents the type of analysis to carry out. If \code{type} is set to #' 'combine', a pooled table of results is generated. If \code{type} is set to 'split', a table of results -#' is genrated for each study. -#' @param datasources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' is generated for each study. +#' @param datasources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @return a table or a list of tables that hold the length of the numeric variable(s) and their mean #' and standard deviation in each subgroup (subset). #' @keywords internal diff --git a/R/meanByClassHelper1.R b/R/meanByClassHelper1.R index 6010e58b0..4b98201e6 100644 --- a/R/meanByClassHelper1.R +++ b/R/meanByClassHelper1.R @@ -3,8 +3,8 @@ #' @description This is an internal function. #' @details This function is called by the function 'ds.meanByClass' to break down #' the initial table by the specified categorical variables. -#' @param dtsource a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param dtsource a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @param tables a character vector, the tables to breakdown #' @param variable a character, the variable to subset on #' @param categories a character vector, the classes in the variables to subset on diff --git a/R/meanByClassHelper2.R b/R/meanByClassHelper2.R index a0b0dfef4..55dca1c33 100644 --- a/R/meanByClassHelper2.R +++ b/R/meanByClassHelper2.R @@ -2,12 +2,12 @@ #' @title Generates a table for pooled results #' @description This is an internal function. #' @details This function is called by the function 'ds.meanByClass' to produce the final table -#' if the user sets the parmater 'type' to combine (the default behaviour of 'ds.meanByClass'). -#' @param dtsources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' if the user sets the parameter 'type' to combine (the default behaviour of 'ds.meanByClass'). +#' @param dtsources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @param tablenames a character vector, the name of the subset tables #' @param variables a character vector, the names of the continuous variables to computes a mean for. -#' @param invalidrecorder a list, holds informations about invalid subsets in each study. +#' @param invalidrecorder a list, holds information about invalid subsets in each study. #' @keywords internal #' @return a matrix, a table which contains the length, mean and standard deviation of each of the #' specified 'variables' in each subset table. diff --git a/R/meanByClassHelper3.R b/R/meanByClassHelper3.R index 0b8200334..4c834b78a 100644 --- a/R/meanByClassHelper3.R +++ b/R/meanByClassHelper3.R @@ -2,12 +2,12 @@ #' @title Generates results tables for each study separately #' @description This is an internal function. #' @details This function is called by the function 'ds.meanByClass' to produce the final tables -#' if the user sets the parmater 'type' to 'split'. -#' @param dtsources a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' if the user sets the parameter 'type' to 'split'. +#' @param dtsources a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @param tablenames a character vector, the name of the subset tables #' @param variables a character vector, the names of the continuous variables to computes a mean for. -#' @param invalidrecorder a list, holds informations about invalid subsets in each study +#' @param invalidrecorder a list, holds information about invalid subsets in each study #' @keywords internal #' @return a list which one results table for each study. #' @author Gaye, A. @@ -19,7 +19,7 @@ meanByClassHelper3 <- function(dtsources, tablenames, variables, invalidrecorder finalist <- vector('list', length(dtsources)) for(s in 1:length(dtsources)){ - # now get the mean and SD for the continuous variables in each of tthe subset tables + # now get the mean and SD for the continuous variables in each of the subset tables finaltable <- matrix(numeric(0), ncol=numtables) finalrows <- c() for(z in 1:length(variables)){ diff --git a/R/meanByClassHelper4.R b/R/meanByClassHelper4.R index 88e827fce..9597a7e87 100644 --- a/R/meanByClassHelper4.R +++ b/R/meanByClassHelper4.R @@ -3,8 +3,8 @@ #' @description This is an internal function. #' @details This function is called by the function 'ds.meanByClass' to obtain 'loose' #' subset tables because the 'subsetByClass' function does not handle a table within a list. -#' @param dtsource a list of \code{\link{DSConnection-class}} objects obtained after login. If the -#' the default set of connections will be used: see \link{datashield.connections_default}. +#' @param dtsource a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +#' the default set of connections will be used: see \link[DSI]{datashield.connections_default}. #' @param alist the name of the list that holds the final subset tables #' @param initialtable a character the name of the table that the subset were generated from #' @param variable a character, the variable to subset on diff --git a/R/subsetHelper.R b/R/subsetHelper.R index c01529ac5..025a06803 100644 --- a/R/subsetHelper.R +++ b/R/subsetHelper.R @@ -7,9 +7,9 @@ #' This function is internal. #' #' Server function called: \code{dimDS} -#' @param dts a list of \code{\link{DSConnection-class}} +#' @param dts a list of \code{\link[DSI]{DSConnection-class}} #' objects obtained after login. If the \code{datasources} argument is not specified -#' the default set of connections will be used: see \code{\link{datashield.connections_default}}. +#' the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}. #' @param data a character string specifying the name of the data frame or #' the factor vector and the range of the subset. #' @param rs a vector of two integers specifying the indices of the rows de extract. diff --git a/README.md b/README.md index 936e913c2..c82341b5f 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,38 @@ -dsBaseClient -============ +## dsBaseClient: 'DataSHIELD' Client Side Base Functions -DataSHIELD client side base R library. +[![License](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0.html) +[![](https://www.r-pkg.org/badges/version/dsBaseClient?color=black)](https://cran.r-project.org/package=dsBaseClient) +[![R build +status](https://github.com/datashield/dsBaseClient/workflows/R-CMD-check/badge.svg)](https://github.com/datashield/dsBaseClient/actions) +[![Codecov test coverage](https://codecov.io/gh/datashield/dsBaseClient/graph/badge.svg)](https://app.codecov.io/gh/datashield/dsBaseClient) -| Branch | dsBase status | dsBase tests | dsBaseClient status | dsBaseClient tests | -| -------- | ------------- | ------------ | ------------------- | ------------------ | -| Master | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBase?branchName=master)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=3&branchName=master) | | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBaseClient?branchName=master)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=1&branchName=master) | [Tests](https://datashield.github.io/testStatus/dsBaseClient/master/latest/) | -| 6.0 | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBase?branchName=v6.0)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=3&branchName=v6.0) | | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBaseClient?branchName=v6.0)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=1&branchName=v6.0) | [Tests](https://datashield.github.io/testStatus/dsBaseClient/v6.0/latest/) | -| 6.0.1 | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBase?branchName=v6.0.1)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=3&branchName=v6.0.1) | | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBaseClient?branchName=v6.0.1)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=1&branchName=v6.0.1) | [Tests](https://datashield.github.io/testStatus/dsBaseClient/v6.0.1/latest/) | -| 6.1 | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBase?branchName=v6.1)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=3&branchName=v6.1) | [Tests](https://datashield.github.io/testStatus/dsBase/v6.1/latest/) | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBaseClient?branchName=v6.1)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=1&branchName=v6.1) | [Tests](https://datashield.github.io/testStatus/dsBaseClient/v6.1/latest/) | -| 6.1.1 | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBase?branchName=v6.1.1)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=3&branchName=v6.1.1) | [Tests](https://datashield.github.io/testStatus/dsBase/v6.1.1/latest/) | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBaseClient?branchName=v6.1.1)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=1&branchName=v6.1.1) | [Tests](https://datashield.github.io/testStatus/dsBaseClient/v6.1.1/latest/) | -| 6.2 | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBase?branchName=v6.2)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=3&branchName=v6.2) | [Tests](https://datashield.github.io/testStatus/dsBase/v6.2/latest/) | [![Build Status](https://dev.azure.com/datashield-testing/datashield/_apis/build/status/datashield.dsBaseClient?branchName=v6.2)](https://dev.azure.com/datashield-testing/datashield/_build/latest?definitionId=1&branchName=v6.2) | [Tests](https://datashield.github.io/testStatus/dsBaseClient/v6.2/latest/) | +## Installation +You can install the released version of dsBaseClient from +[CRAN](https://cran.r-project.org/package=dsBaseClient) with: -[![License](https://img.shields.io/badge/license-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0.html) +``` r +install.packages("dsBaseClient") +``` -About -===== +And the development version from +[GitHub](https://github.com/datashield/dsBaseClient/) with: + -DataSHIELD is a software package which allows you to do non-disclosive federated analysis on sensitive data. Our website (https://www.datashield.org) has in depth descriptions of what it is, how it works and how to install it. A key point to highlight is that DataSHIELD has a client-server infrastructure, so the dsBase package (https://github.com/datashield/dsBase) needs to be used in conjuction with the dsBaseClient package (https://github.com/datashield/dsBaseClient) - trying to use one without the other makes no sense. +``` r +install.packages("remotes") +remotes::install_github("datashield/dsBaseClient", "") + +# Install v6.3.4 with the following +remotes::install_github("datashield/dsBaseClient", "6.3.4") +``` + +For a full list of development branches, checkout https://github.com/datashield/dsBaseClient/branches + + +## About + +DataSHIELD is a software package which allows you to do non-disclosive federated analysis on sensitive data. Our website (https://www.datashield.org) has in depth descriptions of what it is, how it works and how to install it. A key point to highlight is that DataSHIELD has a client-server infrastructure, so the dsBase package (https://github.com/datashield/dsBase) needs to be used in conjunction with the dsBaseClient package (https://github.com/datashield/dsBaseClient) - trying to use one without the other makes no sense. Detailed instructions on how to install DataSHIELD are at https://www.datashield.org/wiki. @@ -31,3 +45,40 @@ The code here is organised as: | obiba CRAN | Where you probably should install DataSHIELD from. | | releases | Stable releases. | | master branch | Mostly in sync with the latest release, changes rarely. | + +## References + +[1] Burton P, Wilson R, Butters O, Ryser-Welch P, Westerberg A, Abarrategui L, Villegas-Diaz R, + Avraam D, Marcon Y, Bishop T, Gaye A, Escribà Montagut X, Wheater S (2025). + _dsBaseClient: 'DataSHIELD' Client Side Base Functions_. R package version 6.3.4. + +[2] Gaye A, Marcon Y, Isaeva J, LaFlamme P, Turner A, Jones E, Minion J, Boyd A, Newby C, Nuotio + M, Wilson R, Butters O, Murtagh B, Demir I, Doiron D, Giepmans L, Wallace S, Budin-Ljøsne I, + Oliver Schmidt C, Boffetta P, Boniol M, Bota M, Carter K, deKlerk N, Dibben C, Francis R, + Hiekkalinna T, Hveem K, Kvaløy K, Millar S, Perry I, Peters A, Phillips C, Popham F, Raab G, + Reischl E, Sheehan N, Waldenberger M, Perola M, van den Heuvel E, Macleod J, Knoppers B, + Stolk R, Fortier I, Harris J, Woffenbuttel B, Murtagh M, Ferretti V, Burton P (2014). + “DataSHIELD: taking the analysis to the data, not the data to the analysis.” _International + Journal of Epidemiology_, *43*(6), 1929-1944. . + +[3] Wilson R, W. Butters O, Avraam D, Baker J, Tedds J, Turner A, Murtagh M, R. Burton P (2017). + “DataSHIELD – New Directions and Dimensions.” _Data Science Journal_, *16*(21), 1-21. + . + +[4] Avraam D, Wilson R, Aguirre Chan N, Banerjee S, Bishop T, Butters O, Cadman T, Cederkvist L, + Duijts L, Escribà Montagut X, Garner H, Gonçalves G, González J, Haakma S, Hartlev M, + Hasenauer J, Huth M, Hyde E, Jaddoe V, Marcon Y, Mayrhofer M, Molnar-Gabor F, Morgan A, + Murtagh M, Nestor M, Nybo Andersen A, Parker S, Pinot de Moira A, Schwarz F, + Strandberg-Larsen K, Swertz M, Welten M, Wheater S, Burton P (2024). “DataSHIELD: + mitigating disclosure risk in a multi-site federated analysis platform.” _Bioinformatics + Advances_, *5*(1), 1-21. . + +> **_Note:_** Apple Mx architecture users, please be aware that there are some numerical limitations on this platform, which leads to unexpected results when using base R packages, like stats​. +> +> x <- c(0, 3, 7) +> +> 1 - cor(x, x)​ +> +> The above should result in a value of zero. +> +> _Also See:_ For more details see https://cran.r-project.org/doc/FAQ/R-FAQ.html#Why-doesn_0027t-R-think-these-numbers-are-equal_003f and the bug report: https://bugs.r-project.org/show_bug.cgi?id=18941 diff --git a/_pkgdown.yml b/_pkgdown.yml index 4c98f6e56..f46c2ebc7 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -1,3 +1,4 @@ template: + lang: en-GB params: bootswatch: simplex diff --git a/armadillo_azure-pipelines.yml b/armadillo_azure-pipelines.yml new file mode 100644 index 000000000..8439c8cef --- /dev/null +++ b/armadillo_azure-pipelines.yml @@ -0,0 +1,643 @@ +######################################################################################### +# DataSHIELD Azure test suite. +# Starts with a vanilla Armadillo docker composition, installs dsBase +# and dsBaseClient (as well as dependencies - including a fully functional +# Armadillo server). +# Does checks and tests then saves results to testStatus repo. +# +# Inside the root directory $(Pipeline.Workspace) will be a file tree like: +# /dsBaseClient <- Checked out version of datashield/dsBaseClient +# /testStatus <- Checked out version of datashield/testStatus +# /logs <- Where results of tests and lots are collated +# +# As of May 2020 this takes ~ 70 mins to run. +# As of Nov 2020 this takes ~ 120 mins to run. +# As of Mar 2024 this takes ~ 300+ mins to run! +# As of Mar 2024 this takes ~ 300+ mins to run! +# As of Jun 2024 this takes ~ 360+ mins to run! +# +# The only things that should ever be changed are the repo branches in the resources. +# +######################################################################################### + + +##################################################################################### +# These should all be constant, except test_filter. This can be used to test subsets +# of test files in the testthat directory. Options are like: +# '*' <- Run all tests +# 'ds.asNumeric*' <- Run all ds.asNumeric tests, i.e. all the arg, smk etc tests. +# '*_smk_*' <- Run all the smoke tests for all functions. +variables: + datetime: $[format('{0:yyyyMMddHHmmss}', pipeline.startTime)] + repoName: $(Build.Repository.Name) + projectName: 'dsBaseClient' + branchName: $(Build.SourceBranchName) + test_filter: '*' + _r_check_system_clock_: 0 + + +######################################################################################### +# Need to define all the GH repos and their access tokens, see: +# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml +resources: + repositories: + - repository: testStatusRepo + type: github + endpoint: datashield-testing + name: datashield/testStatus + ref: master + + +######################################################################################### +# When and under what condition to run the pipeline. +schedules: + - cron: "0 0 * * 0" + displayName: Weekly build - master + branches: + include: + - master + always: true + - cron: "0 2 * * *" + displayName: Nightly build - v6.3.4-dev + branches: + include: + - v6.3.4-dev + always: true + +######################################################################################### +# Jobs + +jobs: +- job: build_and_run_tests + timeoutInMinutes: 360 + pool: + vmImage: ubuntu-24.04 + + steps: + ##################################################################################### + # Checkout the source code to a subfolder. + # This may give an error in the logs like: + # [warning]Unable move and reuse existing repository to required location + # This is an Azure bug - https://github.com/microsoft/azure-pipelines-yaml/issues/403 + - checkout: self + path: 'dsBaseClient' + + - checkout: testStatusRepo + path: 'testStatus' + persistCredentials: true + condition: and(eq(variables['Build.Repository.Name'], 'datashield/dsBaseClient'), ne(variables['Build.Reason'], 'PullRequest')) + + + ##################################################################################### + # The MySQL install that comes with the VM doesn't seem compatable with our set up + # so we delete it. + # If previous steps have failed then don't run. + - bash: | + + # Work-around for tempory Bazel's apt repository issue. + curl https://bazel.build/bazel-release.pub.gpg | sudo apt-key add - + + # Purge the default mysql installed on the VM as it is incompatible with our stuff. + sudo service mysql stop + sudo apt-get update + sudo apt-get remove --purge mysql-client mysql-server mysql-common -y + sudo apt-get purge mysql-client mysql-server mysql-common -y + sudo apt-get autoremove -y + sudo apt-get autoclean -y + sudo rm -rf /var/lib/mysql/ + + displayName: 'Uninstall default MySQL' + condition: succeeded() + + + ##################################################################################### + # The Azure VMs have 2 CPUs, so configure R to use both when compile/install packages. + # If previous steps have failed then don't run. + - bash: | + + echo "options(Ncpus=4)" >> ~/.Rprofile + + displayName: 'Tweak local R env using .Rprofile' + condition: succeeded() + + + ##################################################################################### + # Install R and all the dependencies dsBaseClient requires. + # If previous steps have failed then don't run. + - bash: | + sudo apt-get install --no-install-recommends software-properties-common dirmngr + wget -qO- https://cloud.r-project.org/bin/linux/ubuntu/marutter_pubkey.asc | sudo tee -a /etc/apt/trusted.gpg.d/cran_ubuntu_key.asc + sudo add-apt-repository "deb https://cloud.r-project.org/bin/linux/ubuntu $(lsb_release -cs)-cran40/" + sudo apt-get update -qq + sudo apt-get upgrade -y + + sudo apt-get install -qq libxml2-dev libcurl4-openssl-dev libssl-dev libgsl-dev libgit2-dev r-base -y + sudo apt-get install -qq libharfbuzz-dev libfribidi-dev libmagick++-dev -y + sudo R -q -e "install.packages(c('devtools','covr'), dependencies=TRUE, repos='https://cloud.r-project.org')" + sudo R -q -e "install.packages(c('fields','meta','metafor','ggplot2','gridExtra','data.table'), dependencies=TRUE, repos='https://cloud.r-project.org')" + sudo R -q -e "install.packages(c('DSI','DSOpal','DSLite'), dependencies=TRUE, repos='https://cloud.r-project.org')" + sudo R -q -e "install.packages(c('MolgenisAuth', 'MolgenisArmadillo', 'DSMolgenisArmadillo'), dependencies=TRUE, repos='https://cloud.r-project.org')" + sudo R -q -e "install.packages(c('DescTools','e1071'), dependencies=TRUE, repos='https://cloud.r-project.org')" + + sudo R -q -e "library('devtools'); devtools::install_github(repo='datashield/dsDangerClient', ref='v6.3.4-dev', dependencies = TRUE)" + + # XML grep for coverage report merging + sudo apt-get install -qq xml-twig-tools -y + + displayName: 'Install all dependencies for dsBaseClient' + condition: succeeded() + + + ##################################################################################### + # Check that the man files in the repo match what is in the function headers. i.e. has + # devtools::document() been run before commiting? + # If previous steps have failed then don't run. + # If this step fails still mark as failed, but don't stop the rest of the steps running. + - bash: | + + # Concatenate all the files in the man dir into one long string and md5sum it. + orig_sum=$(find man -type f | sort -u | xargs cat | md5sum) + + # Rebuild the documentation. + R -e "devtools::document()" + + # Concatenate all the files in the man dir into one long string and md5sum it. + new_sum=$(find man -type f | sort -u | xargs cat | md5sum) + + if [ "$orig_sum" != "$new_sum" ]; then + echo "Your committed manual files (man/*.Rd) are out of sync with the documentation in the R files." + echo "Run devtools::document() locally then commit again." + exit 1 + else + echo "Documentation up to date." + exit 0 + fi + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Check manual updated before being committed' + condition: succeeded() + continueOnError: true + + + ##################################################################################### + # Run devtools::check on the checked out source code. + # If previous steps have failed then don't run. + # If this step fails still mark as failed, but don't stop the rest of the steps running. + - bash: | + + R -q -e "library('devtools'); devtools::check(args = c('--no-examples', '--no-tests'))" | tee azure-pipelines_check.Rout + grep --quiet "^0 errors" azure-pipelines_check.Rout && grep --quiet " 0 warnings" azure-pipelines_check.Rout && grep --quiet " 0 notes" azure-pipelines_check.Rout + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Devtools checks' + condition: succeeded() + continueOnError: true + + ##################################################################################### + # + # Armadillo phase + # + ##################################################################################### + + ##################################################################################### + # Deploy docker for Opal. + # If previous steps have failed then don't run. + - task: DockerCompose@1 + inputs: + action: Run Services + dockerComposeFile: ../dsBaseClient/docker-compose_armadillo.yml + projectName: dsbaseclient + qualifyImageNames: true + buildImages: true + abortOnContainerExit: true + detached: true + displayName: 'Install Armadillo servers (armadillo, rserver, minio)' + condition: succeeded() + + + ##################################################################################### + # Install test datasets. + # If previous steps have failed then don't run. + - bash: | + sleep 60 + + R -q -f "molgenis_armadillo-upload_testing_datasets.R" + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient/tests/testthat/data_files + displayName: 'Install test datasets to Armadillo' + condition: succeeded() + + + ##################################################################################### + # Install dsBase. + # If previous steps have failed then don't run. + - bash: | + + curl -u admin:admin -X GET http://localhost:8080/packages + + curl -u admin:admin --max-time 300 -v -H 'Content-Type: multipart/form-data' -F "file=@dsBase_6.3.4-permissive.tar.gz" -X POST http://localhost:8080/install-package + sleep 60 + + docker container restart dsbaseclient_armadillo_1 + sleep 30 + + curl -u admin:admin -X POST http://localhost:8080/whitelist/dsBase + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Install dsBase to Armadillo' + condition: succeeded() + + + ##################################################################################### + # Essentially run devtools::test() on the checked out code. This is wrapped up with + # code coverage. The actual command is vary convoluted as it had to do some things + # which are not default behaviour: output the results to a JUnit xml file, not stop + # when a small number of errors have happened, run through the code coverage tool. + # TODO: Tidy up variable names - use timestamps here. + # TODO: Why is DSLite needed for this to run?! + - bash: | + + # There is an issue with the way we are using packages. The wrapped up test command + # below fails in a way that implies that it is not installed. I cannot figure out + # why this is case. As a work around we can run some of the functions below. My + # best guess is that there is an implicit build or similar that happens. Although + # I cannot replicate that directly with build etc directly. + + sudo R --verbose -e 'devtools::reload()' + + mkdir $(Pipeline.Workspace)/logs + + # run the coverage tool and output to coveragelist.csv + # testthat::testpackage uses a MultiReporter, comprised of a ProgressReporter and JunitReporter + # R output and messages are redirected by sink() to test_console_output.txt + # junit reporter output is to test_results.xml + # + # "_-|arg-|smk-|datachk-|disc-|math-|expt-|expt_smk-" + # testthat::test_package("$(projectName)", filter = "_-|datachk-|smk-|arg-|disc-|perf-|smk_expt-|expt-|math-", reporter = multi_rep, stop_on_failure = FALSE) + sudo R -q -e ' + library(covr); + dsbase.res <- covr::package_coverage( + type = c("none"), + code = c( + '"'"' + library(testthat); + output_file <- file("test_console_output_dsbase.txt"); + sink(output_file); + sink(output_file, type = "message"); + library(testthat); + junit_rep <- JunitReporter$new(file = "test_results_dsbase.xml"); + progress_rep <- ProgressReporter$new(max_failures = 999999); + multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); + options("datashield.return_errors" = FALSE); + options("default_driver" = "ArmadilloDriver"); + testthat::test_package("$(projectName)", filter = "_-|datachk-|smk-|arg-|disc-|perf-|smk_expt-|expt-|math-", reporter = multi_rep, stop_on_failure = FALSE) + '"'"' + ) + ); + base::saveRDS(dsbase.res, "test_results_dsbase.rds")' + + # display the test console output + cat test_console_output_dsbase.txt + + grep --quiet " FAIL 0 " test_console_output_dsbase.txt + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Code coverage and JUnit report output, with dsBase' + condition: succeeded() + + + ##################################################################################### + # Parse the JUnit file to see if there are any errors/warnings. If there are then + # echo them so finding bugs should be easier. + # This should run even if previous steps have failed. + - bash: | + + # Strip out when error and failure = 0 and count the number of times it does not. + issue_count=$(sed 's/failures="0" errors="0"//' test_results_dsbase.xml | sed 's/errors="0" failures="0"//' | grep --count errors=) + echo "Number of testsuites with issues: "$issue_count + echo "Testsuites with issues:" + sed 's/failures="0" errors="0"//' test_results_dsbase.xml | sed 's/errors="0" failures="0"//' | grep errors= > issues.log + cat issues.log + exit $issue_count + + workingDirectory: $(Pipeline.Workspace)/logs + displayName: 'Check for errors & Failures in JUnit file' + condition: succeededOrFailed() + + + ##################################################################################### + # Essentially run devtools::test() on the checked out code for discctrl reporting. + # The actual command is vary convoluted as it had to do some things + # which are not default behaviour: output the results to a JUnit xml file, not stop + # when a small number of errors have happened, run through the code coverage tool. + # TODO: Tidy up variable names - use timestamps here. + #- bash: | + + # junit reporter output is to test_results_discctrl.xml + # sudo R -q -e ' + # library(testthat); + # output_file <- file("test_console_output_discctrl.txt"); + # sink(output_file); + # sink(output_file, type = "message"); + # junit_rep <- JunitReporter$new(file = "test_results_discctrl.xml"); + # progress_rep <- ProgressReporter$new(max_failures = 999999); + # multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); + # options("default_driver" = "ArmadilloDriver"); + # testthat::test_package("$(projectName)", filter = "_-|discctrl-", reporter = multi_rep, stop_on_failure = FALSE)' + + # cat test_console_output_discctrl.txt + + # if [ -e test_results_discctrl.xml ]; then + # mv test_results_discctrl.xml $(Pipeline.Workspace)/logs + # else + # touch $(Pipeline.Workspace)/logs/test_results_discctrl.xml + # fi + + # workingDirectory: $(Pipeline.Workspace)/dsBaseClient + # displayName: 'discctrl report output' + # condition: succeededOrFailed() + + + ##################################################################################### + # Install dsDanger on Opal server + # If previous steps have failed then don't run + - bash: | + curl -u admin:admin http://localhost:8080/whitelist + + curl -u admin:admin -v -H 'Content-Type: multipart/form-data' -F "file=@dsDanger_6.3.4.tar.gz" -X POST http://localhost:8080/install-package + + docker container restart dsbaseclient_armadillo_1 + sleep 60 + + curl -u admin:admin -X POST http://localhost:8080/whitelist/dsDanger + + curl -u admin:admin http://localhost:8080/whitelist + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Install dsDanger package on Armadillo server' + condition: succeeded() + + + ##################################################################################### + # Essentially run devtools::test() on the checked out code. This is wrapped up with + # code coverage. The actual command is vary convoluted as it had to do some things + # which are not default behaviour: output the results to a JUnit xml file, not stop + # when a small number of errors have happened, run through the code coverage tool. + # TODO: Tidy up variable names - use timestamps here. + - bash: | + + # See, 'Code coverage and JUnit report output' for issues with the approach and improvement needed. + sudo R --verbose -e 'devtools::reload()' + + pwd + mkdir $(Pipeline.Workspace)/logs + + # run the coverage tool and output to coveragelist.csv + # testthat::testpackage uses a MultiReporter, comprised of a ProgressReporter and JunitReporter + # R output and messages are redirected by sink() to test_console_output.txt + # junit reporter output is to test_results.xml + sudo R -q -e ' + library(covr); + dsdanger.res <- covr::package_coverage( + type = c("none"), + code = c( + '"'"' + library(testthat); + output_file <- file("test_console_output_dsdanger.txt"); + sink(output_file); + sink(output_file, type = "message"); + library(testthat); + junit_rep <- JunitReporter$new(file = "test_results_dsdanger.xml"); + progress_rep <- ProgressReporter$new(max_failures = 999999); + multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); + options("datashield.return_errors" = FALSE); + options("default_driver" = "ArmadilloDriver"); + testthat::test_package("$(projectName)", filter = "__dgr-|datachk_dgr-|smk_dgr-|arg_dgr-|disc_dgr-|smk_expt_dgr-|expt_dgr-|math_dgr-", reporter = multi_rep, stop_on_failure = FALSE) + '"'"' + ) + ); + base::saveRDS(dsdanger.res, "test_results_dsdanger.rds")' + + # Merge coverage results + cat test_results_dsbase.txt test_results_dsdanger.txt > $(Pipeline.Workspace)/logs/test_console_output.txt + xml_grep --pretty_print indented --wrap "testsuites" --descr "" --cond "testsuite" test_results_dsbase.xml test_results_dsdanger.xml > test_results.xml + + # Create 'coveragelist.csv' + sudo R -q -e ' + library(covr); + dsbase.res <- base::readRDS("test_results_dsbase.rds") + write.csv( + coverage_to_list( + dsbase.res + ), + "coveragelist.csv" + )' + + # display the test console output + cat test_console_output_dsdanger.txt + + mv coveragelist.csv $(Pipeline.Workspace)/logs + mv test_results.xml $(Pipeline.Workspace)/logs + + grep --quiet " FAIL 0 " test_console_output_dsdanger.txt + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Code coverage and JUnit report output, with dsBase and dsDanger' + condition: succeeded() + + + ##################################################################################### + # Parse the JUnit file to see if there are any errors/warnings. If there are then + # echo them so finding bugs should be easier. + # This should run even if previous steps have failed. + - bash: | + + # Strip out when error and failure = 0 and count the number of times it does not. + issue_count=$(sed 's/failures="0" errors="0"//' test_results.xml | sed 's/errors="0" failures="0"//' | grep --count errors=) + echo "Number of testsuites with issues: "$issue_count + echo "Testsuites with issues:" + sed 's/failures="0" errors="0"//' test_results.xml | sed 's/errors="0" failures="0"//' | grep errors= > issues.log + cat issues.log + exit $issue_count + + workingDirectory: $(Pipeline.Workspace)/logs + displayName: 'Check for errors & Failures in JUnit file' + condition: succeededOrFailed() + + + ##################################################################################### + # Essentially run devtools::test() on the checked out code for bug reporting. + # The actual command is vary convoluted as it had to do some things + # which are not default behaviour: output the results to a JUnit xml file, not stop + # when a small number of errors have happened, run through the code coverage tool. + # TODO: Tidy up variable names - use timestamps here. +# - bash: | + + # junit reporter output is to test_results_bug.xml + # sudo R -q -e ' + # library(testthat); + # output_file <- file("test_console_output_bug.txt"); + # sink(output_file); + # sink(output_file, type = "message"); + # junit_rep <- JunitReporter$new(file = "test_results_bug.xml"); + # progress_rep <- ProgressReporter$new(max_failures = 999999); + # multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); + # options("default_driver" = "ArmadilloDriver"); + # testthat::test_package("$(projectName)", filter = "__bug-|datachk_bug-|smk_bug-|arg_bug-|disc_bug-|smk_expt_bug-|expt_bug-|math_bug-", reporter = multi_rep, stop_on_failure = FALSE)' + + # cat test_console_output_bug.txt + + # if [ -e test_results_bug.xml ]; then + # mv test_results_bug.xml $(Pipeline.Workspace)/logs + # else + # touch $(Pipeline.Workspace)/logs/test_results_bug.xml + # fi + +# workingDirectory: $(Pipeline.Workspace)/dsBaseClient +# displayName: 'Bug report output' +# condition: succeededOrFailed() + + + ##################################################################################### + # Parse the JUnit file to see if there are any errors/warnings. If there are then + # echo them so finding bugs should be easier. + # This should run even if previous steps have failed. + - bash: | + + # Strip out when error and failure = 0 and count the number of times it does not. + issue_count=$(sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep --count errors=) + echo "Number of testsuites with issues: "$issue_count + echo "Testsuites with issues:" + sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep errors= > issues.log + cat issues.log + no_issue_count=$(sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep -v --count errors=) + echo + echo "Number of testsuites with no issues: "$no_issue_count + echo "Testsuites with issues:" + sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep " no_issues.log + cat no_issues.log + exit 0 + + workingDirectory: $(Pipeline.Workspace)/logs + displayName: 'Bug summary report output' + condition: succeededOrFailed() + + ##################################################################################### + # Windup phase + ##################################################################################### + + ##################################################################################### + # Output some important version numbers to file. This gets added to the testStatus + # commit so it can be parsed and used on the status table. + - bash: | + + echo 'branch:'$(branchName) >> $(datetime).txt + echo 'os:'$(lsb_release -ds) >> $(datetime).txt + echo 'R:'$(R --version | head -n 1) >> $(datetime).txt + echo 'opal:'$(opal system --opal localhost:8443 --user administrator --password "datashield_test&" --version) >> $(datetime).txt + + workingDirectory: $(Pipeline.Workspace)/logs + displayName: 'Write versions to file' + condition: succeededOrFailed() + + + ##################################################################################### + # Checkout the testStatus repo, add the results from here, push back to GH. + # TODO: Automatically pull in better email/name info from somewhere. + # TODO: More debug info in commit message + - bash: | + + # Git needs some config set to be able to push to a repo. + git config --global user.email "you@example.com" + git config --global user.name "Azure pipeline" + + # This repo is checked out in detatched head state, so reconnect it here. + git checkout master + + # It is possible that other commits have been made to the testStatus repo since it + # was checked out. i.e. other pipeline runs might have finished. + git pull + + # Make the directories if they dont already exist + mkdir --parents logs/$(projectName)/$(branchName) + mkdir --parents docs/$(projectName)/$(branchName)/latest + + cp $(Pipeline.Workspace)/logs/coveragelist.csv logs/$(projectName)/$(branchName)/ + cp $(Pipeline.Workspace)/logs/coveragelist.csv logs/$(projectName)/$(branchName)/$(datetime).csv + + cp $(Pipeline.Workspace)/logs/test_results.xml logs/$(projectName)/$(branchName)/ + cp $(Pipeline.Workspace)/logs/test_results.xml logs/$(projectName)/$(branchName)/$(datetime).xml + + cp $(Pipeline.Workspace)/logs/$(datetime).txt logs/$(projectName)/$(branchName)/ + + # Run the script to parse the results and build the html pages. + # status.py JUnit_file.xml coverage_file.csv output_file.html local_repo_path remote_repo_name branch + source/status.py logs/$(projectName)/$(branchName)/$(datetime).xml logs/$(projectName)/$(branchName)/$(datetime).csv logs/$(projectName)/$(branchName)/$(datetime).txt status.html $(Pipeline.Workspace)/$(projectName) $(projectName) $(branchName) + + cp status.html docs/$(projectName)/$(branchName)/latest/index.html + git add logs/$(projectName)/$(branchName)/coveragelist.csv + git add logs/$(projectName)/$(branchName)/test_results.xml + git add logs/$(projectName)/$(branchName)/$(datetime).xml + git add logs/$(projectName)/$(branchName)/$(datetime).csv + git add logs/$(projectName)/$(branchName)/$(datetime).txt + git add docs/$(projectName)/$(branchName)/latest/index.html + + git commit -m "Azure auto test for $(projectName)/$(branchName) @ $(datetime)" -m "Debug info:\nProjectName:$(projectName)\nBranchName:$(branchName)\nDataTime:$(datetime)" + git push + exit 0 + + workingDirectory: $(Pipeline.Workspace)/testStatus + displayName: 'Parse test results' + condition: and(eq(variables['Build.Repository.Name'], 'datashield/dsBaseClient'), ne(variables['Build.Reason'], 'PullRequest')) + + + ##################################################################################### + # Output the environment information to the console. This is useful for debugging. + # Always do this, even if some of the above has failed or the job has been cacelled. + - bash: | + + echo 'BranchName: '$(branchName) + echo 'ProjectName: '$(projectName) + echo 'RepoName: '$(repoName) + + echo -e "\n#############################" + echo -e "ls /: ######################" + ls $(Pipeline.Workspace) + + echo -e "\n#############################" + echo -e "lscpu: ######################" + lscpu + + echo -e "\n#############################" + echo -e "memory: #####################" + free -m + + echo -e "\n#############################" + echo -e "env: ########################" + env + + echo -e "\n#############################" + echo -e "Puppet version: #############" + /opt/puppetlabs/bin/puppet --version + /opt/puppetlabs/puppet/bin/r10k version + + echo -e "\n#############################" + echo -e "Rprofile: ###################" + cat $(Pipeline.Workspace)/dsBaseClient/.Rprofile + + echo -e "\n#############################" + echo -e "R installed.packages(): #####" + R -e 'installed.packages()' + + echo -e "\n#############################" + echo -e "R sessionInfo(): ############" + R -e 'sessionInfo()' + + sudo apt install tree -y + pwd + echo -e "\n#############################" + echo -e "File tree: ##################" + tree $(Pipeline.Workspace) + + displayName: 'Environment info' + condition: always() diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 3d5b466e4..6c6103d4a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -1,8 +1,8 @@ ######################################################################################### # DataSHIELD Azure test suite. -# Starts with a vanilla Opal and Armadillo docker composition, installs dsBase +# Starts with a vanilla Opal docker composition, installs dsBase # and dsBaseClient (as well as dependencies - including a fully functional -# Opal and Armadillo server). +# Opal server). # Does checks and tests then saves results to testStatus repo. # # Inside the root directory $(Pipeline.Workspace) will be a file tree like: @@ -12,6 +12,9 @@ # # As of May 2020 this takes ~ 70 mins to run. # As of Nov 2020 this takes ~ 120 mins to run. +# As of Mar 2024 this takes ~ 300+ mins to run! +# As of Mar 2024 this takes ~ 300+ mins to run! +# As of Jun 2024 this takes ~ 360+ mins to run! # # The only things that should ever be changed are the repo branches in the resources. # @@ -55,10 +58,10 @@ schedules: - master always: true - cron: "0 2 * * *" - displayName: Nightly build - v6.3.0-dev + displayName: Nightly build - v6.3.4-dev branches: include: - - v6.3.0-dev + - v6.3.4-dev always: true ######################################################################################### @@ -66,9 +69,9 @@ schedules: jobs: - job: build_and_run_tests - timeoutInMinutes: 300 + timeoutInMinutes: 360 pool: - vmImage: ubuntu-20.04 + vmImage: ubuntu-24.04 steps: ##################################################################################### @@ -122,21 +125,22 @@ jobs: # Install R and all the dependencies dsBaseClient requires. # If previous steps have failed then don't run. - bash: | - - sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys E298A3A825C0D65DFD57CBB651716619E084DAB9 - sudo add-apt-repository "deb https://cloud.r-project.org/bin/linux/ubuntu focal-cran40/" - sudo add-apt-repository ppa:cran/libgit2 - sudo apt-get update + sudo apt-get install --no-install-recommends software-properties-common dirmngr + wget -qO- https://cloud.r-project.org/bin/linux/ubuntu/marutter_pubkey.asc | sudo tee -a /etc/apt/trusted.gpg.d/cran_ubuntu_key.asc + sudo add-apt-repository "deb https://cloud.r-project.org/bin/linux/ubuntu $(lsb_release -cs)-cran40/" + sudo apt-get update -qq + sudo apt-get upgrade -y sudo apt-get install -qq libxml2-dev libcurl4-openssl-dev libssl-dev libgsl-dev libgit2-dev r-base -y - sudo apt-get install -qq libharfbuzz-dev libfribidi-dev -y + sudo apt-get install -qq libharfbuzz-dev libfribidi-dev libmagick++-dev libudunits2-dev -y + sudo R -q -e "install.packages(c('curl','httr'), dependencies=TRUE, repos='https://cloud.r-project.org')" sudo R -q -e "install.packages(c('devtools','covr'), dependencies=TRUE, repos='https://cloud.r-project.org')" - sudo R -q -e "install.packages(c('fields','meta','metafor','ggplot2','gridExtra','data.table','panelaggregation'), dependencies=TRUE, repos='https://cloud.r-project.org')" + sudo R -q -e "install.packages(c('fields','meta','metafor','ggplot2','gridExtra','data.table'), dependencies=TRUE, repos='https://cloud.r-project.org')" sudo R -q -e "install.packages(c('DSI','DSOpal','DSLite'), dependencies=TRUE, repos='https://cloud.r-project.org')" sudo R -q -e "install.packages(c('MolgenisAuth', 'MolgenisArmadillo', 'DSMolgenisArmadillo'), dependencies=TRUE, repos='https://cloud.r-project.org')" sudo R -q -e "install.packages(c('DescTools','e1071'), dependencies=TRUE, repos='https://cloud.r-project.org')" - sudo R -q -e "library('devtools'); devtools::install_github(repo='datashield/dsDangerClient', ref='v6.3.0-dev', dependencies = TRUE)" + sudo R -q -e "library('devtools'); devtools::install_github(repo='datashield/dsDangerClient', ref='6.3.4', dependencies = TRUE)" # XML grep for coverage report merging sudo apt-get install -qq xml-twig-tools -y @@ -182,7 +186,7 @@ jobs: # If this step fails still mark as failed, but don't stop the rest of the steps running. - bash: | - R -q -e "library('devtools'); devtools::check(args = c('--no-examples'))" | tee azure-pipelines_check.Rout + R -q -e "library('devtools'); devtools::check(args = c('--no-examples', '--no-tests'))" | tee azure-pipelines_check.Rout grep --quiet "^0 errors" azure-pipelines_check.Rout && grep --quiet " 0 warnings" azure-pipelines_check.Rout && grep --quiet " 0 notes" azure-pipelines_check.Rout workingDirectory: $(Pipeline.Workspace)/dsBaseClient @@ -199,11 +203,11 @@ jobs: ##################################################################################### # Deploy docker for Opal. # If previous steps have failed then don't run. - - task: DockerCompose@0 + - task: DockerCompose@1 inputs: action: Run Services dockerComposeFile: ../dsBaseClient/docker-compose_opal.yml - projectName: dsBaseClient + projectName: dsbaseclient qualifyImageNames: true buildImages: true abortOnContainerExit: true @@ -216,7 +220,7 @@ jobs: # Install test datasets. # If previous steps have failed then don't run. - bash: | - sleep 240 + sleep 60 R -q -f "obiba_opal-upload_testing_datasets.R" @@ -229,7 +233,11 @@ jobs: # Install dsBase. # If previous steps have failed then don't run. - bash: | - R -q -e "library(opalr); opal <- opal.login('administrator','datashield_test&', url='https://localhost:8443/', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); dsadmin.install_github_package(opal, 'dsBase', username = 'datashield', ref = 'v6.3.0-dev'); opal.logout(opal)" + R -q -e "library(opalr); opal <- opal.login(username = 'administrator', password = 'datashield_test&', url = 'https://localhost:8443', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); opal.put(opal, 'system', 'conf', 'general', '_rPackage'); opal.logout(o)" + + R -q -e "library(opalr); opal <- opal.login('administrator','datashield_test&', url='https://localhost:8443/', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); dsadmin.install_github_package(opal, 'dsBase', username = 'datashield', ref = 'v6.3.4-dev'); opal.logout(opal)" + + sleep 60 R -q -e "library(opalr); opal <- opal.login('administrator','datashield_test&', url='https://localhost:8443/', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); dsadmin.set_option(opal, 'default.datashield.privacyControlLevel', 'permissive'); opal.logout(opal)" @@ -253,7 +261,7 @@ jobs: # best guess is that there is an implicit build or similar that happens. Although # I cannot replicate that directly with build etc directly. - sudo R --verbose -e 'devtools::check()' + sudo R --verbose -e 'devtools::reload()' mkdir $(Pipeline.Workspace)/logs @@ -263,7 +271,7 @@ jobs: # junit reporter output is to test_results.xml # # "_-|arg-|smk-|datachk-|disc-|math-|expt-|expt_smk-" - # testthat::test_package("$(projectName)", filter = "_-|datachk-|smk-|arg-|disc-|smk_expt-|expt-|math-", reporter = multi_rep, stop_on_failure = FALSE) + # testthat::test_package("$(projectName)", filter = "_-|datachk-|smk-|arg-|disc-|perf-|smk_expt-|expt-|math-", reporter = multi_rep, stop_on_failure = FALSE) sudo R -q -e ' library(covr); dsbase.res <- covr::package_coverage( @@ -278,8 +286,9 @@ jobs: junit_rep <- JunitReporter$new(file = "test_results_dsbase.xml"); progress_rep <- ProgressReporter$new(max_failures = 999999); multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); + options("datashield.return_errors" = FALSE); options("default_driver" = "OpalDriver"); - testthat::test_package("$(projectName)", filter = "_-|datachk-|smk-|arg-|disc-|smk_expt-|expt-|math-", reporter = multi_rep, stop_on_failure = FALSE) + testthat::test_package("$(projectName)", filter = "_-|datachk-|smk-|arg-|disc-|perf-|smk_expt-|expt-|math-", reporter = multi_rep, stop_on_failure = FALSE) '"'"' ) ); @@ -352,7 +361,9 @@ jobs: # If previous steps have failed then don't run - bash: | - R -q -e "library(opalr); opal <- opal.login('administrator','datashield_test&', url='https://localhost:8443/', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); dsadmin.install_github_package(opal, 'dsDanger', username = 'datashield', ref = 'v6.3.0-dev'); opal.logout(opal)" + R -q -e "library(opalr); opal <- opal.login(username = 'administrator', password = 'datashield_test&', url = 'https://localhost:8443', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); opal.put(opal, 'system', 'conf', 'general', '_rPackage'); opal.logout(o)" + + R -q -e "library(opalr); opal <- opal.login('administrator','datashield_test&', url='https://localhost:8443/', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); dsadmin.install_github_package(opal, 'dsDanger', username = 'datashield', ref = '6.3.4'); opal.logout(opal)" workingDirectory: $(Pipeline.Workspace)/dsBaseClient displayName: 'Install dsDanger package on Opal server' @@ -368,9 +379,8 @@ jobs: - bash: | # See, 'Code coverage and JUnit report output' for issues with the approach and improvement needed. - sudo R --verbose -e 'devtools::check()' + sudo R --verbose -e 'devtools::reload()' - pwd mkdir $(Pipeline.Workspace)/logs # run the coverage tool and output to coveragelist.csv @@ -391,6 +401,7 @@ jobs: junit_rep <- JunitReporter$new(file = "test_results_dsdanger.xml"); progress_rep <- ProgressReporter$new(max_failures = 999999); multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); + options("datashield.return_errors" = FALSE); options("default_driver" = "OpalDriver"); testthat::test_package("$(projectName)", filter = "__dgr-|datachk_dgr-|smk_dgr-|arg_dgr-|disc_dgr-|smk_expt_dgr-|expt_dgr-|math_dgr-", reporter = multi_rep, stop_on_failure = FALSE) '"'"' @@ -513,333 +524,6 @@ jobs: displayName: 'Down Opal Docker Composition' condition: succeeded() - ##################################################################################### - # - # Armadillo phase - # - ##################################################################################### - - ##################################################################################### - # Deploy docker for Opal. - # If previous steps have failed then don't run. - - task: DockerCompose@0 - inputs: - action: Run Services - dockerComposeFile: ../dsBaseClient/docker-compose_armadillo.yml - projectName: dsBaseClient - qualifyImageNames: true - buildImages: true - abortOnContainerExit: true - detached: true - displayName: 'Install Armadillo servers (armadillo, rserver, minio)' - condition: succeeded() - - - ##################################################################################### - # Install test datasets. - # If previous steps have failed then don't run. - - bash: | - sleep 240 - - R -q -f "molgenis_armadillo-upload_testing_datasets.R" - - workingDirectory: $(Pipeline.Workspace)/dsBaseClient/tests/testthat/data_files - displayName: 'Install test datasets to Armadillo' - condition: succeeded() - - - ##################################################################################### - # Install dsBase. - # If previous steps have failed then don't run. - - bash: | - curl -u admin:admin http://localhost:8080/whitelist - - curl -u admin:admin -v -H 'Content-Type: multipart/form-data' -F "file=@dsBase_6.3.0-permissive.tar.gz" -X POST http://localhost:8080/install-package - - docker container restart dsbaseclient_armadillo_1 - sleep 120 - - curl -u admin:admin -X POST http://localhost:8080/whitelist/dsBase - - curl -u admin:admin http://localhost:8080/whitelist - - workingDirectory: $(Pipeline.Workspace)/dsBaseClient - displayName: 'Install dsBase to Armadillo' - condition: succeeded() - - - ##################################################################################### - # Essentially run devtools::test() on the checked out code. This is wrapped up with - # code coverage. The actual command is vary convoluted as it had to do some things - # which are not default behaviour: output the results to a JUnit xml file, not stop - # when a small number of errors have happened, run through the code coverage tool. - # TODO: Tidy up variable names - use timestamps here. - # TODO: Why is DSLite needed for this to run?! - - bash: | - - # There is an issue with the way we are using packages. The wrapped up test command - # below fails in a way that implies that it is not installed. I cannot figure out - # why this is case. As a work around we can run some of the functions below. My - # best guess is that there is an implicit build or similar that happens. Although - # I cannot replicate that directly with build etc directly. - - sudo R --verbose -e 'devtools::check()' - - mkdir $(Pipeline.Workspace)/logs - - # run the coverage tool and output to coveragelist.csv - # testthat::testpackage uses a MultiReporter, comprised of a ProgressReporter and JunitReporter - # R output and messages are redirected by sink() to test_console_output.txt - # junit reporter output is to test_results.xml - # - # "_-|arg-|smk-|datachk-|disc-|math-|expt-|expt_smk-" - # testthat::test_package("$(projectName)", filter = "_-|datachk-|smk-|arg-|disc-|smk_expt-|expt-|math-", reporter = multi_rep, stop_on_failure = FALSE) - sudo R -q -e ' - library(covr); - dsbase.res <- covr::package_coverage( - type = c("none"), - code = c( - '"'"' - library(testthat); - output_file <- file("test_console_output_dsbase.txt"); - sink(output_file); - sink(output_file, type = "message"); - library(testthat); - junit_rep <- JunitReporter$new(file = "test_results_dsbase.xml"); - progress_rep <- ProgressReporter$new(max_failures = 999999); - multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); - options("default_driver" = "ArmadilloDriver"); - testthat::test_package("$(projectName)", filter = "_-|datachk-|smk-|arg-|disc-|smk_expt-|expt-|math-", reporter = multi_rep, stop_on_failure = FALSE) - '"'"' - ) - ); - base::saveRDS(dsbase.res, "test_results_dsbase.rds")' - - # display the test console output - cat test_console_output_dsbase.txt - - grep --quiet " FAIL 0 " test_console_output_dsbase.txt - - workingDirectory: $(Pipeline.Workspace)/dsBaseClient - displayName: 'Code coverage and JUnit report output, with dsBase' - condition: succeeded() - - - ##################################################################################### - # Parse the JUnit file to see if there are any errors/warnings. If there are then - # echo them so finding bugs should be easier. - # This should run even if previous steps have failed. - - bash: | - - # Strip out when error and failure = 0 and count the number of times it does not. - issue_count=$(sed 's/failures="0" errors="0"//' test_results_dsbase.xml | sed 's/errors="0" failures="0"//' | grep --count errors=) - echo "Number of testsuites with issues: "$issue_count - echo "Testsuites with issues:" - sed 's/failures="0" errors="0"//' test_results_dsbase.xml | sed 's/errors="0" failures="0"//' | grep errors= > issues.log - cat issues.log - exit $issue_count - - workingDirectory: $(Pipeline.Workspace)/logs - displayName: 'Check for errors & Failures in JUnit file' - condition: succeededOrFailed() - - - ##################################################################################### - # Essentially run devtools::test() on the checked out code for discctrl reporting. - # The actual command is vary convoluted as it had to do some things - # which are not default behaviour: output the results to a JUnit xml file, not stop - # when a small number of errors have happened, run through the code coverage tool. - # TODO: Tidy up variable names - use timestamps here. - #- bash: | - - # junit reporter output is to test_results_discctrl.xml - # sudo R -q -e ' - # library(testthat); - # output_file <- file("test_console_output_discctrl.txt"); - # sink(output_file); - # sink(output_file, type = "message"); - # junit_rep <- JunitReporter$new(file = "test_results_discctrl.xml"); - # progress_rep <- ProgressReporter$new(max_failures = 999999); - # multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); - # options("default_driver" = "ArmadilloDriver"); - # testthat::test_package("$(projectName)", filter = "_-|discctrl-", reporter = multi_rep, stop_on_failure = FALSE)' - - # cat test_console_output_discctrl.txt - - # if [ -e test_results_discctrl.xml ]; then - # mv test_results_discctrl.xml $(Pipeline.Workspace)/logs - # else - # touch $(Pipeline.Workspace)/logs/test_results_discctrl.xml - # fi - - # workingDirectory: $(Pipeline.Workspace)/dsBaseClient - # displayName: 'discctrl report output' - # condition: succeededOrFailed() - - - ##################################################################################### - # Install dsDanger on Opal server - # If previous steps have failed then don't run - - bash: | - curl -u admin:admin http://localhost:8080/whitelist - - curl -u admin:admin -v -H 'Content-Type: multipart/form-data' -F "file=@dsDanger_6.3.0.tar.gz" -X POST http://localhost:8080/install-package - - docker container restart dsbaseclient_armadillo_1 - sleep 120 - - curl -u admin:admin -X POST http://localhost:8080/whitelist/dsDanger - - curl -u admin:admin http://localhost:8080/whitelist - - workingDirectory: $(Pipeline.Workspace)/dsBaseClient - displayName: 'Install dsDanger package on Armadillo server' - condition: succeeded() - - - ##################################################################################### - # Essentially run devtools::test() on the checked out code. This is wrapped up with - # code coverage. The actual command is vary convoluted as it had to do some things - # which are not default behaviour: output the results to a JUnit xml file, not stop - # when a small number of errors have happened, run through the code coverage tool. - # TODO: Tidy up variable names - use timestamps here. - - bash: | - - # See, 'Code coverage and JUnit report output' for issues with the approach and improvement needed. - sudo R --verbose -e 'devtools::check()' - - pwd - mkdir $(Pipeline.Workspace)/logs - - # run the coverage tool and output to coveragelist.csv - # testthat::testpackage uses a MultiReporter, comprised of a ProgressReporter and JunitReporter - # R output and messages are redirected by sink() to test_console_output.txt - # junit reporter output is to test_results.xml - sudo R -q -e ' - library(covr); - dsdanger.res <- covr::package_coverage( - type = c("none"), - code = c( - '"'"' - library(testthat); - output_file <- file("test_console_output_dsdanger.txt"); - sink(output_file); - sink(output_file, type = "message"); - library(testthat); - junit_rep <- JunitReporter$new(file = "test_results_dsdanger.xml"); - progress_rep <- ProgressReporter$new(max_failures = 999999); - multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); - options("default_driver" = "ArmadilloDriver"); - testthat::test_package("$(projectName)", filter = "__dgr-|datachk_dgr-|smk_dgr-|arg_dgr-|disc_dgr-|smk_expt_dgr-|expt_dgr-|math_dgr-", reporter = multi_rep, stop_on_failure = FALSE) - '"'"' - ) - ); - base::saveRDS(dsdanger.res, "test_results_dsdanger.rds")' - - # Merge coverage results - cat test_results_dsbase.txt test_results_dsdanger.txt > $(Pipeline.Workspace)/logs/test_console_output.txt - xml_grep --pretty_print indented --wrap "testsuites" --descr "" --cond "testsuite" test_results_dsbase.xml test_results_dsdanger.xml > test_results.xml - - # Create 'coveragelist.csv' - sudo R -q -e ' - library(covr); - dsbase.res <- base::readRDS("test_results_dsbase.rds") - write.csv( - coverage_to_list( - dsbase.res - ), - "coveragelist.csv" - )' - - # display the test console output - cat test_console_output_dsdanger.txt - - mv coveragelist.csv $(Pipeline.Workspace)/logs - mv test_results.xml $(Pipeline.Workspace)/logs - - grep --quiet " FAIL 0 " test_console_output_dsdanger.txt - - workingDirectory: $(Pipeline.Workspace)/dsBaseClient - displayName: 'Code coverage and JUnit report output, with dsBase and dsDanger' - condition: succeeded() - - - ##################################################################################### - # Parse the JUnit file to see if there are any errors/warnings. If there are then - # echo them so finding bugs should be easier. - # This should run even if previous steps have failed. - - bash: | - - # Strip out when error and failure = 0 and count the number of times it does not. - issue_count=$(sed 's/failures="0" errors="0"//' test_results.xml | sed 's/errors="0" failures="0"//' | grep --count errors=) - echo "Number of testsuites with issues: "$issue_count - echo "Testsuites with issues:" - sed 's/failures="0" errors="0"//' test_results.xml | sed 's/errors="0" failures="0"//' | grep errors= > issues.log - cat issues.log - exit $issue_count - - workingDirectory: $(Pipeline.Workspace)/logs - displayName: 'Check for errors & Failures in JUnit file' - condition: succeededOrFailed() - - - ##################################################################################### - # Essentially run devtools::test() on the checked out code for bug reporting. - # The actual command is vary convoluted as it had to do some things - # which are not default behaviour: output the results to a JUnit xml file, not stop - # when a small number of errors have happened, run through the code coverage tool. - # TODO: Tidy up variable names - use timestamps here. -# - bash: | - - # junit reporter output is to test_results_bug.xml - # sudo R -q -e ' - # library(testthat); - # output_file <- file("test_console_output_bug.txt"); - # sink(output_file); - # sink(output_file, type = "message"); - # junit_rep <- JunitReporter$new(file = "test_results_bug.xml"); - # progress_rep <- ProgressReporter$new(max_failures = 999999); - # multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); - # options("default_driver" = "ArmadilloDriver"); - # testthat::test_package("$(projectName)", filter = "__bug-|datachk_bug-|smk_bug-|arg_bug-|disc_bug-|smk_expt_bug-|expt_bug-|math_bug-", reporter = multi_rep, stop_on_failure = FALSE)' - - # cat test_console_output_bug.txt - - # if [ -e test_results_bug.xml ]; then - # mv test_results_bug.xml $(Pipeline.Workspace)/logs - # else - # touch $(Pipeline.Workspace)/logs/test_results_bug.xml - # fi - -# workingDirectory: $(Pipeline.Workspace)/dsBaseClient -# displayName: 'Bug report output' -# condition: succeededOrFailed() - - - ##################################################################################### - # Parse the JUnit file to see if there are any errors/warnings. If there are then - # echo them so finding bugs should be easier. - # This should run even if previous steps have failed. - - bash: | - - # Strip out when error and failure = 0 and count the number of times it does not. - issue_count=$(sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep --count errors=) - echo "Number of testsuites with issues: "$issue_count - echo "Testsuites with issues:" - sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep errors= > issues.log - cat issues.log - no_issue_count=$(sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep -v --count errors=) - echo - echo "Number of testsuites with no issues: "$no_issue_count - echo "Testsuites with issues:" - sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep " no_issues.log - cat no_issues.log - exit 0 - - workingDirectory: $(Pipeline.Workspace)/logs - displayName: 'Bug summary report output' - condition: succeededOrFailed() - ##################################################################################### # Windup phase ##################################################################################### diff --git a/docker-compose_armadillo.yml b/docker-compose_armadillo.yml index 4d352a7ac..7a791d39f 100644 --- a/docker-compose_armadillo.yml +++ b/docker-compose_armadillo.yml @@ -1,18 +1,21 @@ -version: "3.4" - services: armadillo: hostname: armadillo - image: datashield/armadillo3:test - environment: - RSERVE_HOST: rserver - DEBUG: "FALSE" ports: - 8080:8080 - rserver: - hostname: rserver - image: datashield/armadillo-rserver_caravan-yak:1.0.0 + image: datashield/armadillo_citest:latest + environment: + LOGGING_CONFIG: 'classpath:logback-file.xml' + AUDIT_LOG_PATH: '/app/logs/audit.log' + SPRING_SECURITY_USER_PASSWORD: 'admin' + volumes: + - ./tests/docker/armadillo/standard/logs:/logs + - ./tests/docker/armadillo/standard/data:/data + - ./tests/docker/armadillo/standard/config:/config + - /var/run/docker.sock:/var/run/docker.sock + + default: + hostname: default + image: datashield/rock-omicron-karma-permissive:devel environment: DEBUG: "FALSE" - ports: - - 6311:6311 diff --git a/docker-compose_opal.yml b/docker-compose_opal.yml index 3a0f40355..40b3bf268 100644 --- a/docker-compose_opal.yml +++ b/docker-compose_opal.yml @@ -1,10 +1,8 @@ -version: '3' - services: opal: - image: obiba/opal:4.5 + image: datashield/opal_citest:latest ports: - - "8443:8443" + - 8443:8443 links: - mongo - rock @@ -22,4 +20,6 @@ services: - MONGO_INITDB_ROOT_USERNAME=root - MONGO_INITDB_ROOT_PASSWORD=foobar rock: - image: obiba/rock:1.0-R4.2 + image: datashield/rock-omicron-karma-permissive:devel + environment: + DEBUG: "FALSE" diff --git a/docs/404.html b/docs/404.html index 7960bb6cc..761ee0b96 100644 --- a/docs/404.html +++ b/docs/404.html @@ -1,12 +1,12 @@ - + Page not found (404) • dsBaseClient - + @@ -18,7 +18,7 @@ - +
@@ -50,7 +50,7 @@
- +
@@ -73,21 +73,21 @@

Page not found (404)

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/LICENSE.html b/docs/LICENSE.html index 4f02bf67f..b495f5487 100644 --- a/docs/LICENSE.html +++ b/docs/LICENSE.html @@ -1,9 +1,9 @@ -NA • dsBaseClientNA • dsBaseClient - +
@@ -29,7 +29,7 @@
- +
@@ -38,10 +38,10 @@

NA

-
                GNU GENERAL PUBLIC LICENSE
-                   Version 3, 29 June 2007
+
                GNU GENERAL PUBLIC LICENSE
+                   Version 3, 29 June 2007

Copyright (C) 2007 Free Software Foundation, Inc. http://fsf.org/ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.

-
                        Preamble
+
                        Preamble

The GNU General Public License is a free, copyleft license for software and other kinds of works.

The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program–to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too.

When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things.

@@ -52,8 +52,9 @@

NA

Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users’ freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users.

Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free.

The precise terms and conditions for copying, distribution and modification follow.

-
                   TERMS AND CONDITIONS
-
  1. Definitions.

“This License” refers to version 3 of the GNU General Public License.

+
                   TERMS AND CONDITIONS
+
  1. Definitions.
  2. +

“This License” refers to version 3 of the GNU General Public License.

“Copyright” also means copyright-like laws that apply to other kinds of works, such as semiconductor masks.

“The Program” refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. “Licensees” and “recipients” may be individuals or organizations.

To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work or a work “based on” the earlier work.

@@ -61,125 +62,136 @@

NA

To “propagate” a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well.

To “convey” a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying.

An interactive user interface displays “Appropriate Legal Notices” to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion.

-
  1. Source Code.

The “source code” for a work means the preferred form of the work for making modifications to it. “Object code” means any non-source form of a work.

+
  1. Source Code.
  2. +

The “source code” for a work means the preferred form of the work for making modifications to it. “Object code” means any non-source form of a work.

A “Standard Interface” means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language.

The “System Libraries” of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A “Major Component”, in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it.

The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work’s System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work.

The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source.

The Corresponding Source for a work in source code form is that same work.

-
  1. Basic Permissions.

All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law.

+
  1. Basic Permissions.
  2. +

All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law.

You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you.

Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary.

-
  1. Protecting Users’ Legal Rights From Anti-Circumvention Law.

No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures.

+
  1. Protecting Users’ Legal Rights From Anti-Circumvention Law.
  2. +

No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures.

When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work’s users, your or third parties’ legal rights to forbid circumvention of technological measures.

-
  1. Conveying Verbatim Copies.

You may convey verbatim copies of the Program’s source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program.

+
  1. Conveying Verbatim Copies.
  2. +

You may convey verbatim copies of the Program’s source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program.

You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee.

-
  1. Conveying Modified Source Versions.

You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions:

-
a) The work must carry prominent notices stating that you modified
-it, and giving a relevant date.
-
-b) The work must carry prominent notices stating that it is
-released under this License and any conditions added under section
-7.  This requirement modifies the requirement in section 4 to
-"keep intact all notices".
-
-c) You must license the entire work, as a whole, under this
-License to anyone who comes into possession of a copy.  This
-License will therefore apply, along with any applicable section 7
-additional terms, to the whole of the work, and all its parts,
-regardless of how they are packaged.  This License gives no
-permission to license the work in any other way, but it does not
-invalidate such permission if you have separately received it.
-
-d) If the work has interactive user interfaces, each must display
-Appropriate Legal Notices; however, if the Program has interactive
-interfaces that do not display Appropriate Legal Notices, your
-work need not make them do so.
+
  1. Conveying Modified Source Versions.
  2. +

You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions:

+
a) The work must carry prominent notices stating that you modified
+it, and giving a relevant date.
+
+b) The work must carry prominent notices stating that it is
+released under this License and any conditions added under section
+7.  This requirement modifies the requirement in section 4 to
+"keep intact all notices".
+
+c) You must license the entire work, as a whole, under this
+License to anyone who comes into possession of a copy.  This
+License will therefore apply, along with any applicable section 7
+additional terms, to the whole of the work, and all its parts,
+regardless of how they are packaged.  This License gives no
+permission to license the work in any other way, but it does not
+invalidate such permission if you have separately received it.
+
+d) If the work has interactive user interfaces, each must display
+Appropriate Legal Notices; however, if the Program has interactive
+interfaces that do not display Appropriate Legal Notices, your
+work need not make them do so.

A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an “aggregate” if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation’s users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate.

-
  1. Conveying Non-Source Forms.

You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways:

-
a) Convey the object code in, or embodied in, a physical product
-(including a physical distribution medium), accompanied by the
-Corresponding Source fixed on a durable physical medium
-customarily used for software interchange.
-
-b) Convey the object code in, or embodied in, a physical product
-(including a physical distribution medium), accompanied by a
-written offer, valid for at least three years and valid for as
-long as you offer spare parts or customer support for that product
-model, to give anyone who possesses the object code either (1) a
-copy of the Corresponding Source for all the software in the
-product that is covered by this License, on a durable physical
-medium customarily used for software interchange, for a price no
-more than your reasonable cost of physically performing this
-conveying of source, or (2) access to copy the
-Corresponding Source from a network server at no charge.
-
-c) Convey individual copies of the object code with a copy of the
-written offer to provide the Corresponding Source.  This
-alternative is allowed only occasionally and noncommercially, and
-only if you received the object code with such an offer, in accord
-with subsection 6b.
-
-d) Convey the object code by offering access from a designated
-place (gratis or for a charge), and offer equivalent access to the
-Corresponding Source in the same way through the same place at no
-further charge.  You need not require recipients to copy the
-Corresponding Source along with the object code.  If the place to
-copy the object code is a network server, the Corresponding Source
-may be on a different server (operated by you or a third party)
-that supports equivalent copying facilities, provided you maintain
-clear directions next to the object code saying where to find the
-Corresponding Source.  Regardless of what server hosts the
-Corresponding Source, you remain obligated to ensure that it is
-available for as long as needed to satisfy these requirements.
-
-e) Convey the object code using peer-to-peer transmission, provided
-you inform other peers where the object code and Corresponding
-Source of the work are being offered to the general public at no
-charge under subsection 6d.
+
  1. Conveying Non-Source Forms.
  2. +

You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways:

+
a) Convey the object code in, or embodied in, a physical product
+(including a physical distribution medium), accompanied by the
+Corresponding Source fixed on a durable physical medium
+customarily used for software interchange.
+
+b) Convey the object code in, or embodied in, a physical product
+(including a physical distribution medium), accompanied by a
+written offer, valid for at least three years and valid for as
+long as you offer spare parts or customer support for that product
+model, to give anyone who possesses the object code either (1) a
+copy of the Corresponding Source for all the software in the
+product that is covered by this License, on a durable physical
+medium customarily used for software interchange, for a price no
+more than your reasonable cost of physically performing this
+conveying of source, or (2) access to copy the
+Corresponding Source from a network server at no charge.
+
+c) Convey individual copies of the object code with a copy of the
+written offer to provide the Corresponding Source.  This
+alternative is allowed only occasionally and noncommercially, and
+only if you received the object code with such an offer, in accord
+with subsection 6b.
+
+d) Convey the object code by offering access from a designated
+place (gratis or for a charge), and offer equivalent access to the
+Corresponding Source in the same way through the same place at no
+further charge.  You need not require recipients to copy the
+Corresponding Source along with the object code.  If the place to
+copy the object code is a network server, the Corresponding Source
+may be on a different server (operated by you or a third party)
+that supports equivalent copying facilities, provided you maintain
+clear directions next to the object code saying where to find the
+Corresponding Source.  Regardless of what server hosts the
+Corresponding Source, you remain obligated to ensure that it is
+available for as long as needed to satisfy these requirements.
+
+e) Convey the object code using peer-to-peer transmission, provided
+you inform other peers where the object code and Corresponding
+Source of the work are being offered to the general public at no
+charge under subsection 6d.

A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work.

A “User Product” is either (1) a “consumer product”, which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, “normally used” refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product.

“Installation Information” for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made.

If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM).

The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network.

Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying.

-
  1. Additional Terms.

“Additional permissions” are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions.

+
  1. Additional Terms.
  2. +

“Additional permissions” are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions.

When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission.

Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms:

-
a) Disclaiming warranty or limiting liability differently from the
-terms of sections 15 and 16 of this License; or
-
-b) Requiring preservation of specified reasonable legal notices or
-author attributions in that material or in the Appropriate Legal
-Notices displayed by works containing it; or
-
-c) Prohibiting misrepresentation of the origin of that material, or
-requiring that modified versions of such material be marked in
-reasonable ways as different from the original version; or
-
-d) Limiting the use for publicity purposes of names of licensors or
-authors of the material; or
-
-e) Declining to grant rights under trademark law for use of some
-trade names, trademarks, or service marks; or
-
-f) Requiring indemnification of licensors and authors of that
-material by anyone who conveys the material (or modified versions of
-it) with contractual assumptions of liability to the recipient, for
-any liability that these contractual assumptions directly impose on
-those licensors and authors.
+
a) Disclaiming warranty or limiting liability differently from the
+terms of sections 15 and 16 of this License; or
+
+b) Requiring preservation of specified reasonable legal notices or
+author attributions in that material or in the Appropriate Legal
+Notices displayed by works containing it; or
+
+c) Prohibiting misrepresentation of the origin of that material, or
+requiring that modified versions of such material be marked in
+reasonable ways as different from the original version; or
+
+d) Limiting the use for publicity purposes of names of licensors or
+authors of the material; or
+
+e) Declining to grant rights under trademark law for use of some
+trade names, trademarks, or service marks; or
+
+f) Requiring indemnification of licensors and authors of that
+material by anyone who conveys the material (or modified versions of
+it) with contractual assumptions of liability to the recipient, for
+any liability that these contractual assumptions directly impose on
+those licensors and authors.

All other non-permissive additional terms are considered “further restrictions” within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying.

If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms.

Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way.

-
  1. Termination.

You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11).

+
  1. Termination.
  2. +

You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11).

However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation.

Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice.

Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10.

-
  1. Acceptance Not Required for Having Copies.

You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so.

-
  1. Automatic Licensing of Downstream Recipients.

Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License.

+
  1. Acceptance Not Required for Having Copies.
  2. +

You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so.

+
  1. Automatic Licensing of Downstream Recipients.
  2. +

Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License.

An “entity transaction” is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party’s predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts.

You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it.

-
  1. Patents.

A “contributor” is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor’s “contributor version”.

+
  1. Patents.
  2. +

A “contributor” is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor’s “contributor version”.

A contributor’s “essential patent claims” are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, “control” includes the right to grant patent sublicenses in a manner consistent with the requirements of this License.

Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor’s essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version.

In the following three paragraphs, a “patent license” is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To “grant” such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party.

@@ -187,41 +199,47 @@

NA

If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it.

A patent license is “discriminatory” if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007.

Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law.

-
  1. No Surrender of Others’ Freedom.

If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program.

-
  1. Use with the GNU Affero General Public License.

Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such.

-
  1. Revised Versions of this License.

The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.

+
  1. No Surrender of Others’ Freedom.
  2. +

If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program.

+
  1. Use with the GNU Affero General Public License.
  2. +

Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such.

+
  1. Revised Versions of this License.
  2. +

The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.

Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation.

If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy’s public statement of acceptance of a version permanently authorizes you to choose that version for the Program.

Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version.

-
  1. Disclaimer of Warranty.

THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.

-
  1. Limitation of Liability.

IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.

-
  1. Interpretation of Sections 15 and 16.

If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee.

-
                 END OF TERMS AND CONDITIONS
-
-        How to Apply These Terms to Your New Programs
+
  1. Disclaimer of Warranty.
  2. +

THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.

+
  1. Limitation of Liability.
  2. +

IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.

+
  1. Interpretation of Sections 15 and 16.
  2. +

If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee.

+
                 END OF TERMS AND CONDITIONS
+
+        How to Apply These Terms to Your New Programs

If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.

To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found.

-
{one line to give the program's name and a brief idea of what it does.}
-Copyright (C) {year}  {name of author}
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
{one line to give the program's name and a brief idea of what it does.}
+Copyright (C) {year}  {name of author}
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program.  If not, see <http://www.gnu.org/licenses/>.

Also add information on how to contact you by electronic and paper mail.

If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode:

-
{project}  Copyright (C) {year}  {fullname}
-This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
-This is free software, and you are welcome to redistribute it
-under certain conditions; type `show c' for details.
+
{project}  Copyright (C) {year}  {fullname}
+This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+This is free software, and you are welcome to redistribute it
+under certain conditions; type `show c' for details.

The hypothetical commands show w' andshow c’ should show the appropriate parts of the General Public License. Of course, your program’s commands might be different; for a GUI interface, you would use an “about box”.

You should also get your employer (if you work as a programmer) or school, if any, to sign a “copyright disclaimer” for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see http://www.gnu.org/licenses/.

The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read http://www.gnu.org/philosophy/why-not-lgpl.html.

@@ -238,19 +256,19 @@

NA

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/authors.html b/docs/authors.html index 2d3366e54..177847f60 100644 --- a/docs/authors.html +++ b/docs/authors.html @@ -1,9 +1,9 @@ -Authors and Citation • dsBaseClientAuthors and Citation • dsBaseClient - +
@@ -29,38 +29,136 @@
- +
- +
  • -

    DataSHIELD Developers. Maintainer. +

    Paul Burton. Author. +

    +
  • +
  • +

    Rebecca Wilson. Author. +

    +
  • +
  • +

    Olly Butters. Author. +

    +
  • +
  • +

    Patricia Ryser-Welch. Author. +

    +
  • +
  • +

    Alex Westerberg. Author. +

    +
  • +
  • +

    Leire Abarrategui. Author. +

    +
  • +
  • +

    Roberto Villegas-Diaz. Author. +

    +
  • +
  • +

    Demetris Avraam. Author. +

    +
  • +
  • +

    Demetris Avraam. Author. +

    +
  • +
  • +

    Yannick Marcon. Author. +

    +
  • +
  • +

    Tom Bishop. Author. +

    +
  • +
  • +

    Amadou Gaye. Author. +

    +
  • +
  • +

    Xavier Escribà-Montagut. Author. +

    +
  • +
  • +

    Stuart Wheater. Author, maintainer.

Citation

- +
-

Developers D (2023). -dsBaseClient: DataSHIELD Client Functions. -R package version 6.3.0. +

Burton P, Wilson R, Butters O, Ryser-Welch P, Westerberg A, Abarrategui L, Villegas-Diaz R, Avraam D, Avraam D, Marcon Y, Bishop T, Gaye A, Escribà-Montagut X, Wheater S (2025). +dsBaseClient: 'DataSHIELD' Client Side Base Functions. +R package version 6.3.4.

@Manual{,
-  title = {dsBaseClient: DataSHIELD Client Functions},
-  author = {DataSHIELD Developers},
-  year = {2023},
-  note = {R package version 6.3.0},
+  title = {dsBaseClient: 'DataSHIELD' Client Side Base Functions},
+  author = {Paul Burton and Rebecca Wilson and Olly Butters and Patricia Ryser-Welch and Alex Westerberg and Leire Abarrategui and Roberto Villegas-Diaz and Demetris Avraam and Demetris Avraam and Yannick Marcon and Tom Bishop and Amadou Gaye and Xavier Escribà-Montagut and Stuart Wheater},
+  note = {R package version 6.3.4},
+}
+

Gaye A, Marcon Y, Isaeva J, LaFlamme P, Turner A, Jones E, Minion J, Boyd A, Newby C, Nuotio M, Wilson R, Butters O, Murtagh B, Demir I, Doiron D, Giepmans L, Wallace S, Budin-Ljøsne I, Schmidt C, Boffetta P, Boniol M, Bota M, Carter K, deKlerk N, Dibben C, Francis R, Hiekkalinna T, Hveem K, Kvaløy K, Millar S, Perry I, Peters A, Phillips C, Popham F, Raab G, Reischl E, Sheehan N, Waldenberger M, Perola M, van den Heuvel E, Macleod J, Knoppers B, Stolk R, Fortier I, Harris J, Woffenbuttel B, Murtagh M, Ferretti V, Burton P (2014). +“DataSHIELD: taking the analysis to the data, not the data to the analysis.” +International Journal of Epidemiology, 43(6), 1929–1944. +doi:10.1093/ije/dyu188. +

+
@Article{,
+  title = {{DataSHIELD: taking the analysis to the data, not the data to the analysis}},
+  author = {Amadou Gaye and Yannick Marcon and Julia Isaeva and Philippe {LaFlamme} and Andrew Turner and Elinor M Jones and Joel Minion and Andrew W Boyd and Christopher J Newby and Marja-Liisa Nuotio and Rebecca Wilson and Oliver Butters and Barnaby Murtagh and Ipek Demir and Dany Doiron and Lisette Giepmans and Susan E Wallace and Isabelle Budin-Lj{\o}sne and Carsten O. Schmidt and Paolo Boffetta and Mathieu Boniol and Maria Bota and Kim W Carter and Nick {deKlerk} and Chris Dibben and Richard W Francis and Tero Hiekkalinna and Kristian Hveem and Kirsti Kval{\o}y and Sean Millar and Ivan J Perry and Annette Peters and Catherine M Phillips and Frank Popham and Gillian Raab and Eva Reischl and Nuala Sheehan and Melanie Waldenberger and Markus Perola and Edwin {{van den Heuvel}} and John Macleod and Bartha M Knoppers and Ronald P Stolk and Isabel Fortier and Jennifer R Harris and Bruce H R Woffenbuttel and Madeleine J Murtagh and Vincent Ferretti and Paul R Burton},
+  journal = {International Journal of Epidemiology},
+  year = {2014},
+  volume = {43},
+  number = {6},
+  pages = {1929--1944},
+  doi = {10.1093/ije/dyu188},
+}
+

Wilson R, Butters O, Avraam D, Baker J, Tedds J, Turner A, Murtagh M, Burton P (2017). +“DataSHIELD – New Directions and Dimensions.” +Data Science Journal, 16(21), 1–21. +doi:10.5334/dsj-2017-021. +

+
@Article{,
+  title = {{DataSHIELD – New Directions and Dimensions}},
+  author = {Rebecca C. Wilson and Oliver W. Butters and Demetris Avraam and James Baker and Jonathan A. Tedds and Andrew Turner and Madeleine Murtagh and Paul R. Burton},
+  journal = {Data Science Journal},
+  year = {2017},
+  volume = {16},
+  number = {21},
+  pages = {1--21},
+  doi = {10.5334/dsj-2017-021},
+}
+

Avraam D, Wilson R, Aguirre Chan N, Banerjee S, Bishop T, Butters O, Cadman T, Cederkvist L, Duijts L, Escribà Montagut X, Garner H, Gonçalves G, González J, Haakma S, Hartlev M, Hasenauer J, Huth M, Hyde E, Jaddoe V, Marcon Y, Mayrhofer M, Molnar-Gabor F, Morgan A, Murtagh M, Nestor M, Nybo Andersen A, Parker S, Pinot de Moira A, Schwarz F, Strandberg-Larsen K, Swertz M, Welten M, Wheater S, Burton P (2024). +“DataSHIELD: mitigating disclosure risk in a multi-site federated analysis platform.” +Bioinformatics Advances, 5(1), 1–21. +doi:10.1093/bioadv/vbaf046. +

+
@Article{,
+  title = {{DataSHIELD: mitigating disclosure risk in a multi-site federated analysis platform}},
+  author = {Demetris Avraam and Rebecca C Wilson and Noemi {{Aguirre Chan}} and Soumya Banerjee and Tom R P Bishop and Olly Butters and Tim Cadman and Luise Cederkvist and Liesbeth Duijts and Xavier {{Escrib{\a`a} Montagut}} and Hugh Garner and Gon{\c c}alo {Gon{\c c}alves} and Juan R Gonz{\a'a}lez and Sido Haakma and Mette Hartlev and Jan Hasenauer and Manuel Huth and Eleanor Hyde and Vincent W V Jaddoe and Yannick Marcon and Michaela Th Mayrhofer and Fruzsina Molnar-Gabor and Andrei Scott Morgan and Madeleine Murtagh and Marc Nestor and Anne-Marie {{Nybo Andersen}} and Simon Parker and Angela {{Pinot de Moira}} and Florian Schwarz and Katrine Strandberg-Larsen and Morris A Swertz and Marieke Welten and Stuart Wheater and Paul R Burton},
+  journal = {Bioinformatics Advances},
+  year = {2024},
+  volume = {5},
+  number = {1},
+  pages = {1--21},
+  doi = {10.1093/bioadv/vbaf046},
+  editor = {Thomas Lengauer},
+  publisher = {Oxford University Press (OUP)},
 }
@@ -70,19 +168,19 @@

Citation

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/index.html b/docs/index.html index 2a1e21284..cc91e1494 100644 --- a/docs/index.html +++ b/docs/index.html @@ -1,25 +1,25 @@ - + -DataSHIELD Client Functions • dsBaseClient - +DataSHIELD Client Side Base Functions • dsBaseClient + - - + + - +
@@ -51,78 +51,43 @@
- +
-
- -

DataSHIELD client side base R library.

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
BranchdsBase statusdsBase testsdsBaseClient statusdsBaseClient tests
MasterBuild StatusBuild StatusTests
6.0Build StatusBuild StatusTests
6.0.1Build StatusBuild StatusTests
6.1Build StatusTestsBuild StatusTests
6.1.1Build StatusTestsBuild StatusTests
6.2Build StatusTestsBuild StatusTests
-

License

+
+

dsBaseClient: ‘DataSHIELD’ Client Side Base Functions +

+

License R build status Codecov test coverage

+
+
+

Installation +

+

You can install the released version of dsBaseClient from CRAN with:

+
+install.packages("dsBaseClient")
+

And the development version from GitHub with:

+
+install.packages("remotes")
+remotes::install_github("datashield/dsBaseClient", "<BRANCH>")
+
+# Install v6.3.4 with the following
+remotes::install_github("datashield/dsBaseClient", "6.3.4")
+

For a full list of development branches, checkout https://github.com/datashield/dsBaseClient/branches

-
-

About -

-

DataSHIELD is a software package which allows you to do non-disclosive federated analysis on sensitive data. Our website (https://www.datashield.org) has in depth descriptions of what it is, how it works and how to install it. A key point to highlight is that DataSHIELD has a client-server infrastructure, so the dsBase package (https://github.com/datashield/dsBase) needs to be used in conjuction with the dsBaseClient package (https://github.com/datashield/dsBaseClient) - trying to use one without the other makes no sense.

+
+

About +

+

DataSHIELD is a software package which allows you to do non-disclosive federated analysis on sensitive data. Our website (https://www.datashield.org) has in depth descriptions of what it is, how it works and how to install it. A key point to highlight is that DataSHIELD has a client-server infrastructure, so the dsBase package (https://github.com/datashield/dsBase) needs to be used in conjunction with the dsBaseClient package (https://github.com/datashield/dsBaseClient) - trying to use one without the other makes no sense.

Detailed instructions on how to install DataSHIELD are at https://www.datashield.org/wiki.

Discussion and help with using DataSHIELD can be obtained from The DataSHIELD Forum https://datashield.discourse.group/

The code here is organised as:

++++ @@ -142,6 +107,21 @@

About

Location What is it?
+
+
+

References +

+

[1] Burton P, Wilson R, Butters O, Ryser-Welch P, Westerberg A, Abarrategui L, Villegas-Diaz R, Avraam D, Marcon Y, Bishop T, Gaye A, Escribà Montagut X, Wheater S (2025). dsBaseClient: ‘DataSHIELD’ Client Side Base Functions. R package version 6.3.4.

+

[2] Gaye A, Marcon Y, Isaeva J, LaFlamme P, Turner A, Jones E, Minion J, Boyd A, Newby C, Nuotio M, Wilson R, Butters O, Murtagh B, Demir I, Doiron D, Giepmans L, Wallace S, Budin-Ljøsne I, Oliver Schmidt C, Boffetta P, Boniol M, Bota M, Carter K, deKlerk N, Dibben C, Francis R, Hiekkalinna T, Hveem K, Kvaløy K, Millar S, Perry I, Peters A, Phillips C, Popham F, Raab G, Reischl E, Sheehan N, Waldenberger M, Perola M, van den Heuvel E, Macleod J, Knoppers B, Stolk R, Fortier I, Harris J, Woffenbuttel B, Murtagh M, Ferretti V, Burton P (2014). “DataSHIELD: taking the analysis to the data, not the data to the analysis.” International Journal of Epidemiology, 43(6), 1929-1944. https://doi.org/10.1093/ije/dyu188.

+

[3] Wilson R, W. Butters O, Avraam D, Baker J, Tedds J, Turner A, Murtagh M, R. Burton P (2017). “DataSHIELD – New Directions and Dimensions.” Data Science Journal, 16(21), 1-21. https://doi.org/10.5334/dsj-2017-021.

+

[4] Avraam D, Wilson R, Aguirre Chan N, Banerjee S, Bishop T, Butters O, Cadman T, Cederkvist L, Duijts L, Escribà Montagut X, Garner H, Gonçalves G, González J, Haakma S, Hartlev M, Hasenauer J, Huth M, Hyde E, Jaddoe V, Marcon Y, Mayrhofer M, Molnar-Gabor F, Morgan A, Murtagh M, Nestor M, Nybo Andersen A, Parker S, Pinot de Moira A, Schwarz F, Strandberg-Larsen K, Swertz M, Welten M, Wheater S, Burton P (2024). “DataSHIELD: mitigating disclosure risk in a multi-site federated analysis platform.” Bioinformatics Advances, 5(1), 1-21. https://doi.org/10.1093/bioadv/vbaf046.

+
+

Note: Apple Mx architecture users, please be aware that there are some numerical limitations on this platform, which leads to unexpected results when using base R packages, like stats​.

+

x <- c(0, 3, 7)

+

1 - cor(x, x)​

+

The above should result in a value of zero.

+

Also See: For more details see https://cran.r-project.org/doc/FAQ/R-FAQ.html#Why-doesn_0027t-R-think-these-numbers-are-equal_003f and the bug report: https://bugs.r-project.org/show_bug.cgi?id=18941

+
@@ -166,7 +146,20 @@

Citation

Developers

    -
  • DataSHIELD Developers
    Maintainer
  • +
  • Paul Burton
    Author
  • +
  • Rebecca Wilson
    Author
  • +
  • Olly Butters
    Author
  • +
  • Patricia Ryser-Welch
    Author
  • +
  • Alex Westerberg
    Author
  • +
  • Leire Abarrategui
    Author
  • +
  • Roberto Villegas-Diaz
    Author
  • +
  • Demetris Avraam
    Author
  • +
  • Demetris Avraam
    Author
  • +
  • Yannick Marcon
    Author
  • +
  • Tom Bishop
    Author
  • +
  • Amadou Gaye
    Author
  • +
  • Xavier Escribà-Montagut
    Author
  • +
  • Stuart Wheater
    Author, maintainer
@@ -178,21 +171,21 @@

Developers

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/pkgdown.yml b/docs/pkgdown.yml index d053a0a61..ee4120e3d 100644 --- a/docs/pkgdown.yml +++ b/docs/pkgdown.yml @@ -1,6 +1,5 @@ -pandoc: 2.9.2.1 -pkgdown: 2.0.7 +pandoc: 3.1.3 +pkgdown: 2.1.3 pkgdown_sha: ~ articles: {} -last_built: 2023-05-16T20:52Z - +last_built: 2025-09-17T15:10Z diff --git a/docs/reference/checkClass.html b/docs/reference/checkClass.html index 81194bb4a..db54dc45a 100644 --- a/docs/reference/checkClass.html +++ b/docs/reference/checkClass.html @@ -1,9 +1,9 @@ -Checks that an object has the same class in all studies — checkClass • dsBaseClientChecks that an object has the same class in all studies — checkClass • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,25 +44,25 @@

Checks that an object has the same class in all studies

-
checkClass(datasources = NULL, obj = NULL)
+
checkClass(datasources = NULL, obj = NULL)

Arguments

-
datasources
-

a list of DSConnection-class objects obtained after login. If the <datasources> -the default set of connections will be used: see datashield.connections_default.

-
obj
+
datasources
+

a list of DSConnection-class objects obtained after login. If the <datasources> +the default set of connections will be used: see datashield.connections_default.

+ + +
obj

a string character, the name of the object to check for.

Value

- - -

a message or the class of the object if the object has the same class in all studies.

+

a message or the class of the object if the object has the same class in all studies.

Details

@@ -78,19 +78,19 @@

Details

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/colPercent.html b/docs/reference/colPercent.html index aa8115eb2..f17b1a1fe 100644 --- a/docs/reference/colPercent.html +++ b/docs/reference/colPercent.html @@ -1,9 +1,9 @@ -Produces column percentages — colPercent • dsBaseClientProduces column percentages — colPercent • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,20 +44,20 @@

Produces column percentages

-
colPercent(dataframe)
+
colPercent(dataframe)

Arguments

-
dataframe
+ + +
dataframe

a data frame

Value

- - -

a data frame

+

a data frame

Details

@@ -76,19 +76,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/computeWeightedMeans.html b/docs/reference/computeWeightedMeans.html new file mode 100644 index 000000000..6aed4d331 --- /dev/null +++ b/docs/reference/computeWeightedMeans.html @@ -0,0 +1,102 @@ + +Compute Weighted Mean by Group — computeWeightedMeans • dsBaseClient + + +
+
+ + + +
+
+ + +
+

This function is originally from the panelaggregation package. +It has been ported here in order to bypass the package being +kicked off CRAN.

+
+ +
+
computeWeightedMeans(data_table, variables, weight, by)
+
+ +
+

Arguments

+ + +
data_table
+

a data.table

+ + +
variables
+

character name of the variable(s) to focus on. The variables must be in the data.table

+ + +
weight
+

character name of the data.table column that contains a weight.

+ + +
by
+

character vector of the columns to group by

+ +
+
+

Author

+

Matthias Bannert, Gabriel Bucur

+
+ +
+ +
+ + +
+ +
+

Site built with pkgdown 2.1.3.

+
+ +
+ + + + + + + + diff --git a/docs/reference/ds.Boole.html b/docs/reference/ds.Boole.html index caa3646b6..b3e3b55e8 100644 --- a/docs/reference/ds.Boole.html +++ b/docs/reference/ds.Boole.html @@ -1,12 +1,12 @@ -Converts a server-side R object into Boolean indicators — ds.Boole • dsBaseClientConverts a server-side R object into Boolean indicators — ds.Boole • dsBaseClient - +
@@ -32,81 +32,81 @@
- +
-

It compares R objects using the standard set of +

It compares R objects using the standard set of Boolean operators (==, !=, >, >=, <, <=) to create a -vector with Boolean indicators that can be of class logical (TRUE/FALSE) +vector with Boolean indicators that can be of class logical (TRUE/FALSE) or numeric (1/0).

-
ds.Boole(
-  V1 = NULL,
-  V2 = NULL,
-  Boolean.operator = NULL,
-  numeric.output = TRUE,
-  na.assign = "NA",
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.Boole(
+  V1 = NULL,
+  V2 = NULL,
+  Boolean.operator = NULL,
+  numeric.output = TRUE,
+  na.assign = "NA",
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
V1
+ + +
V1

A character string specifying the name of the vector to which the Boolean operator is to be applied.

-
V2
+
V2

A character string specifying the name of the vector to compare with V1.

-
Boolean.operator
+
Boolean.operator

A character string specifying one of six possible Boolean operators: '==', '!=', '>', '>=', '<' and '<='.

-
numeric.output
+
numeric.output

logical. If TRUE the output variable should be of class numeric (1/0). -If FALSE the output variable should be of class logical (TRUE/FALSE). +If FALSE the output variable should be of class logical (TRUE/FALSE). Default TRUE.

-
na.assign
+
na.assign

A character string taking values 'NA','1' or '0'. Default 'NA'. For more information see details.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output object that is stored on the data servers. Default boole.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.Boole returns the object specified by the newobj argument +

ds.Boole returns the object specified by the newobj argument which is written to the server-side. Also, two validity messages are returned -to the client-side indicating the name of the newobj which -has been created in each data source and if +to the client-side indicating the name of the newobj which +has been created in each data source and if it is in a valid form.

@@ -116,10 +116,10 @@

Details

binary/Boolean vectors together. In this way, observations taking the value 1 in every vector will then take the value 1 in the final vector (after multiplication) while all others will take the value 0. Instead the combination using OR operator -can be obtained by the sum of two or more vectors and applying +can be obtained by the sum of two or more vectors and applying ds.Boole using the operator >= 1.

-

In na.assign if 'NA' is specified, the missing values -remain as NAs in the output vector. If '1' or '0' is specified the +

In na.assign if 'NA' is specified, the missing values +remain as NAs in the output vector. If '1' or '0' is specified the missing values are converted to 1 or 0 respectively or TRUE or FALSE depending on the argument numeric.output.

@@ -133,63 +133,63 @@

Author

Examples

-

-if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  # Generating Boolean indicators
-  ds.Boole(V1 = "D$LAB_TSC",
-           V2 = "D$LAB_TRIG",
-           Boolean.operator = ">",
-           numeric.output = TRUE, #Output vector of 0 and 1
-           na.assign = "NA",      
-           newobj = "Boole.vec",
-           datasources = connections[1]) #only the first server is used ("study1")
-           
-  ds.Boole(V1 = "D$LAB_TSC",
-           V2 = "D$LAB_TRIG",
-           Boolean.operator = "<",
-           numeric.output = FALSE, #Output vector of TRUE and FALSE 
-           na.assign = "1", #NA values are converted to TRUE
-           newobj = "Boole.vec",
-           datasources = connections[2]) #only the second server is used ("study2") 
-                      
-  ds.Boole(V1 = "D$LAB_TSC",
-           V2 = "D$LAB_TRIG",
-           Boolean.operator = ">",
-           numeric.output = TRUE, #Output vector of 0 and 1
-           na.assign = "0", #NA values are converted to 0      
-           newobj = "Boole.vec",
-           datasources = connections) #All servers are used
-  
-  # Clear the Datashield R sessions and logout           
-  datashield.logout(connections)
-}
- 
+    

+if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  # Generating Boolean indicators
+  ds.Boole(V1 = "D$LAB_TSC",
+           V2 = "D$LAB_TRIG",
+           Boolean.operator = ">",
+           numeric.output = TRUE, #Output vector of 0 and 1
+           na.assign = "NA",      
+           newobj = "Boole.vec",
+           datasources = connections[1]) #only the first server is used ("study1")
+           
+  ds.Boole(V1 = "D$LAB_TSC",
+           V2 = "D$LAB_TRIG",
+           Boolean.operator = "<",
+           numeric.output = FALSE, #Output vector of TRUE and FALSE 
+           na.assign = "1", #NA values are converted to TRUE
+           newobj = "Boole.vec",
+           datasources = connections[2]) #only the second server is used ("study2") 
+                      
+  ds.Boole(V1 = "D$LAB_TSC",
+           V2 = "D$LAB_TRIG",
+           Boolean.operator = ">",
+           numeric.output = TRUE, #Output vector of 0 and 1
+           na.assign = "0", #NA values are converted to 0      
+           newobj = "Boole.vec",
+           datasources = connections) #All servers are used
+  
+  # Clear the Datashield R sessions and logout           
+  datashield.logout(connections)
+} # }
+ 
 
@@ -200,19 +200,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.abs.html b/docs/reference/ds.abs.html index f053757f6..d986834af 100644 --- a/docs/reference/ds.abs.html +++ b/docs/reference/ds.abs.html @@ -1,10 +1,10 @@ -Computes the absolute values of a variable — ds.abs • dsBaseClientComputes the absolute values of a variable — ds.abs • dsBaseClient - +
@@ -30,55 +30,55 @@
- +
-

Computes the absolute values for a specified numeric or integer vector. +

Computes the absolute values for a specified numeric or integer vector. This function is similar to R function abs.

-
ds.abs(x = NULL, newobj = NULL, datasources = NULL)
+
ds.abs(x = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string providing the name of a numeric or an integer vector.

-
newobj
+
newobj

a character string that provides the name for the output variable that is stored on the data servers. Default name is set to abs.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.abs assigns a vector for each study that includes the absolute values of +

ds.abs assigns a vector for each study that includes the absolute values of the input numeric or integer vector specified in the argument x. The created vectors are stored in the servers.

Details

-

The function calls the server-side function absDS that computes the -absolute values of the elements of a numeric or integer vector and assigns a new vector -with those absolute values on the server-side. The name of the new generated vector is +

The function calls the server-side function absDS that computes the +absolute values of the elements of a numeric or integer vector and assigns a new vector +with those absolute values on the server-side. The name of the new generated vector is specified by the user through the argument newobj, otherwise is named by default to abs.newobj.

@@ -89,57 +89,57 @@

Author

Examples

-
if (FALSE) {
-
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-                 
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Example 1: Generate a normally distributed variable with zero mean and variance equal
-  #  to one and then get their absolute values
-  ds.rNorm(samp.size=100, mean=0, sd=1, newobj='var.norm', datasources=connections)
-  # check the quantiles
-  ds.summary(x='var.norm', datasources=connections)
-  ds.abs(x='var.norm', newobj='var.norm.abs', datasources=connections)
-  # check now the changes in the quantiles
-  ds.summary(x='var.norm.abs', datasources=connections)  
-
-  # Example 2: Generate a sequence of negative integer numbers from -200 to -100
-  # and then get their absolute values
-  ds.seq(FROM.value.char = '-200', TO.value.char = '-100', BY.value.char = '1', 
-         newobj='negative.integers', datasources=connections)
-  # check the quantiles
-  ds.summary(x='negative.integers', datasources=connections)
-  ds.abs(x='negative.integers', newobj='positive.integers', datasources=connections)
-  # check now the changes in the quantiles
-  ds.summary(x='positive.integers', datasources=connections)
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+                 
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Example 1: Generate a normally distributed variable with zero mean and variance equal
+  #  to one and then get their absolute values
+  ds.rNorm(samp.size=100, mean=0, sd=1, newobj='var.norm', datasources=connections)
+  # check the quantiles
+  ds.summary(x='var.norm', datasources=connections)
+  ds.abs(x='var.norm', newobj='var.norm.abs', datasources=connections)
+  # check now the changes in the quantiles
+  ds.summary(x='var.norm.abs', datasources=connections)  
+
+  # Example 2: Generate a sequence of negative integer numbers from -200 to -100
+  # and then get their absolute values
+  ds.seq(FROM.value.char = '-200', TO.value.char = '-100', BY.value.char = '1', 
+         newobj='negative.integers', datasources=connections)
+  # check the quantiles
+  ds.summary(x='negative.integers', datasources=connections)
+  ds.abs(x='negative.integers', newobj='positive.integers', datasources=connections)
+  # check now the changes in the quantiles
+  ds.summary(x='positive.integers', datasources=connections)
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+
+} # }
+
 
@@ -150,19 +150,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.asCharacter.html b/docs/reference/ds.asCharacter.html index 4eeedc6d7..9b2b1fdaf 100644 --- a/docs/reference/ds.asCharacter.html +++ b/docs/reference/ds.asCharacter.html @@ -1,10 +1,10 @@ -Converts a server-side R object into a character class — ds.asCharacter • dsBaseClientConverts a server-side R object into a character class — ds.asCharacter • dsBaseClient - +
@@ -30,50 +30,50 @@
- +
-

Converts the input object into a character class. +

Converts the input object into a character class. This function is based on the native R function as.character.

-
ds.asCharacter(x.name = NULL, newobj = NULL, datasources = NULL)
+
ds.asCharacter(x.name = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x.name
+ + +
x.name

a character string providing the name of the input object to be coerced to class character.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default ascharacter.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.asCharacter returns the object converted into a class character +

ds.asCharacter returns the object converted into a class character that is written to the server-side. Also, two validity messages are returned to the client-side -indicating the name of the newobj which has been created in each data source and if +indicating the name of the newobj which has been created in each data source and if it is in a valid form.

@@ -87,41 +87,41 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Converting the R object into a class character
-  ds.asCharacter(x.name = "D$LAB_TSC",
-                 newobj = "char.obj",
-                 datasources = connections[1]) #only the first Opal server is used ("study1")
-                 
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Converting the R object into a class character
+  ds.asCharacter(x.name = "D$LAB_TSC",
+                 newobj = "char.obj",
+                 datasources = connections[1]) #only the first Opal server is used ("study1")
+                 
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
 
@@ -132,19 +132,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.asDataMatrix.html b/docs/reference/ds.asDataMatrix.html index 5cb0ce258..eb7b66e38 100644 --- a/docs/reference/ds.asDataMatrix.html +++ b/docs/reference/ds.asDataMatrix.html @@ -1,10 +1,10 @@ -Converts a server-side R object into a matrix — ds.asDataMatrix • dsBaseClientConverts a server-side R object into a matrix — ds.asDataMatrix • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,36 +46,36 @@

Converts a server-side R object into a matrix

-
ds.asDataMatrix(x.name = NULL, newobj = NULL, datasources = NULL)
+
ds.asDataMatrix(x.name = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x.name
-

a character string providing the name of the input object to be coerced to + + +

x.name
+

a character string providing the name of the input object to be coerced to a matrix.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default asdatamatrix.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.asDataMatrix returns the object converted into a matrix -that is written to the server-side. Also, two validity messages are returned +

ds.asDataMatrix returns the object converted into a matrix +that is written to the server-side. Also, two validity messages are returned to the client-side -indicating the name of the newobj which -has been created in each data source and if +indicating the name of the newobj which +has been created in each data source and if it is in a valid form.

@@ -90,41 +90,41 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Converting the R object into a matrix
-  ds.asDataMatrix(x.name = "D",
-                  newobj = "mat.obj",
-                  datasources = connections[1]) #only the first Opal server is used ("study1")
-                 
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Converting the R object into a matrix
+  ds.asDataMatrix(x.name = "D",
+                  newobj = "mat.obj",
+                  datasources = connections[1]) #only the first Opal server is used ("study1")
+                 
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
 
@@ -135,19 +135,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.asFactor.html b/docs/reference/ds.asFactor.html index 4d015764f..a2db6d261 100644 --- a/docs/reference/ds.asFactor.html +++ b/docs/reference/ds.asFactor.html @@ -1,9 +1,9 @@ -Converts a server-side numeric vector into a factor — ds.asFactor • dsBaseClientConverts a server-side numeric vector into a factor — ds.asFactor • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,60 +44,60 @@

Converts a server-side numeric vector into a factor

-
ds.asFactor(
-  input.var.name = NULL,
-  newobj.name = NULL,
-  forced.factor.levels = NULL,
-  fixed.dummy.vars = FALSE,
-  baseline.level = 1,
-  datasources = NULL
-)
+
ds.asFactor(
+  input.var.name = NULL,
+  newobj.name = NULL,
+  forced.factor.levels = NULL,
+  fixed.dummy.vars = FALSE,
+  baseline.level = 1,
+  datasources = NULL
+)

Arguments

-
input.var.name
-

a character string which provides + + +

input.var.name
+

a character string which provides the name of the variable to be converted to a factor.

-
newobj.name
-

a character string that provides the name for the output variable +

newobj.name
+

a character string that provides the name for the output variable that is stored on the data servers. Default asfactor.newobj.

-
forced.factor.levels
-

the levels that the user wants to split the input variable. +

forced.factor.levels
+

the levels that the user wants to split the input variable. If NULL (default) a vector with all unique levels from all studies are created.

-
fixed.dummy.vars
-

boolean. If TRUE the input variable is converted to a factor -but presented as a matrix of dummy variables. +

fixed.dummy.vars
+

boolean. If TRUE the input variable is converted to a factor +but presented as a matrix of dummy variables. If FALSE (default) the input variable is converted to a factor and assigned as a vector.

-
baseline.level
-

an integer indicating the baseline level -to be used in the creation of the matrix with dummy variables. +

baseline.level
+

an integer indicating the baseline level +to be used in the creation of the matrix with dummy variables. If the fixed.dummy.vars is set to FALSE then any value of the baseline level is not taken into account.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.asFactor returns the unique levels of the converted -variable in ascending order and a validity -message with the name of the created object on the client-side and +

ds.asFactor returns the unique levels of the converted +variable in ascending order and a validity +message with the name of the created object on the client-side and the output matrix or vector in the server-side.

@@ -140,30 +140,30 @@

Details

variable to a factor. Because you do not need to share unique factor levels between sources, there is then no disclosure issue.

To understand how the matrix of the dummy variable is created let's assume that we have the vector -(1, 2, 1, 3, 4, 4, 1, 3, 4, 5) of ten integer numbers. +(1, 2, 1, 3, 4, 4, 1, 3, 4, 5) of ten integer numbers. If we set the argument fixed.dummy.vars = TRUE, -baseline.level = 1 and forced.factor.levels = c(1,2,3,4,5). +baseline.level = 1 and forced.factor.levels = c(1,2,3,4,5). The input vector is converted to the following matrix of dummy variables:

DV2DV3DV4DV50
00010
00000
00100
00100
01000
00010
00010

For the same example if the baseline.level = 3 then the matrix is:

-

DV1DV2DV4DV5
1000
0100
1000
0000
0010
0010
1000
0000
0010
0001

In the first instance the first row of the matrix has zeros in all entries indicating -that the first data point belongs to level 1 (as the baseline level is equal to 1). +

DV1DV2DV4DV5
1000
0100
1000
0000
0010
0010
1000
0000
0010
0001

In the first instance the first row of the matrix has zeros in all entries indicating +that the first data point belongs to level 1 (as the baseline level is equal to 1). The second row has 1 at the first (DV2) column and zeros elsewhere, -indicating that the second data point belongs to level 2. -In the second instance (second matrix) where the baseline level is equal to 3, +indicating that the second data point belongs to level 2. +In the second instance (second matrix) where the baseline level is equal to 3, the first row of the matrix has 1 at the -first (DV1) column and zeros elsewhere, +first (DV1) column and zeros elsewhere, indicating again that the first data point belongs to level 1. -Also as we can see the fourth row of the second matrix has all its elements equal +Also as we can see the fourth row of the second matrix has all its elements equal to zero indicating that the fourth data point belongs to level 3 (as the baseline level, in that case, is 3).

-

If the baseline.level is set to be equal to a value +

If the baseline.level is set to be equal to a value that is not one of the levels of the factor then a matrix of dummy variables is created having as many columns as the number of levels. -In that case in each row there is a unique entry equal to 1 at a +In that case in each row there is a unique entry equal to 1 at a certain column indicating the level of each data point. So, for the above example where the vector has five levels if we set the baseline.level equal to a value that does not -belong to those five levels (baseline.level=8) +belong to those five levels (baseline.level=8) the matrix of dummy variables is:

DV1DV2DV3DV4DV5
10000
01000
10000
00100
00010
00010
10000
00100
00010
00001

Server functions called: asFactorDS1 and asFactorDS2

@@ -174,51 +174,51 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  ds.asFactor(input.var.name = "D$PM_BMI_CATEGORICAL", 
-              newobj.name = "fact.obj", 
-              forced.factor.levels = NULL, #a vector with all unique levels 
-                                           #from all studies is created
-              fixed.dummy.vars = TRUE, #create a matrix of dummy variables
-              baseline.level = 1,
-              datasources = connections)#all the Opal servers are used, in this case 3 
-                                        #(see above the connection to the servers) 
-  ds.asFactor(input.var.name = "D$PM_BMI_CATEGORICAL", 
-              newobj.name = "fact.obj", 
-              forced.factor.levels = c(2,3), #the variable is split in 2 levels
-              fixed.dummy.vars = TRUE, #create a matrix of dummy variables
-              baseline.level = 1,
-              datasources = connections[1])#only the first Opal server is used ("study1")
-
-   # Clear the Datashield R sessions and logout  
-   datashield.logout(connections) 
-}
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  ds.asFactor(input.var.name = "D$PM_BMI_CATEGORICAL", 
+              newobj.name = "fact.obj", 
+              forced.factor.levels = NULL, #a vector with all unique levels 
+                                           #from all studies is created
+              fixed.dummy.vars = TRUE, #create a matrix of dummy variables
+              baseline.level = 1,
+              datasources = connections)#all the Opal servers are used, in this case 3 
+                                        #(see above the connection to the servers) 
+  ds.asFactor(input.var.name = "D$PM_BMI_CATEGORICAL", 
+              newobj.name = "fact.obj", 
+              forced.factor.levels = c(2,3), #the variable is split in 2 levels
+              fixed.dummy.vars = TRUE, #create a matrix of dummy variables
+              baseline.level = 1,
+              datasources = connections[1])#only the first Opal server is used ("study1")
+
+   # Clear the Datashield R sessions and logout  
+   datashield.logout(connections) 
+} # }
 
@@ -229,19 +229,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.asFactorSimple.html b/docs/reference/ds.asFactorSimple.html index 85d13d8c5..32283795b 100644 --- a/docs/reference/ds.asFactorSimple.html +++ b/docs/reference/ds.asFactorSimple.html @@ -1,10 +1,10 @@ -Converts a numeric vector into a factor — ds.asFactorSimple • dsBaseClientConverts a numeric vector into a factor — ds.asFactorSimple • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,42 +46,42 @@

Converts a numeric vector into a factor

-
ds.asFactorSimple(
-  input.var.name = NULL,
-  newobj.name = NULL,
-  datasources = NULL
-)
+
ds.asFactorSimple(
+  input.var.name = NULL,
+  newobj.name = NULL,
+  datasources = NULL
+)

Arguments

-
input.var.name
-

a character string which provides + + +

input.var.name
+

a character string which provides the name of the variable to be converted to a factor.

-
newobj.name
-

a character string that provides the name for the output variable +

newobj.name
+

a character string that provides the name for the output variable that is stored on the data servers. Default asfactor.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

an output vector of class factor to the serverside. In addition, returns a validity +

an output vector of class factor to the serverside. In addition, returns a validity message with the name of the created object on the client-side and if creation fails an error message which can be viewed using datashield.errors().

Details

-

The function converts the input variable into a factor. Unlike +

The function converts the input variable into a factor. Unlike ds.asFactor and its serverside functions, ds.asFactorSimple does no more than coerce the class of a variable to make it a factor on the serverside in each data source. It does not check for or enforce consistency of factor levels across sources or allow you to @@ -103,19 +103,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.asInteger.html b/docs/reference/ds.asInteger.html index 19017ac54..0bb6cce03 100644 --- a/docs/reference/ds.asInteger.html +++ b/docs/reference/ds.asInteger.html @@ -1,10 +1,10 @@ -Converts a server-side R object into an integer class — ds.asInteger • dsBaseClientConverts a server-side R object into an integer class — ds.asInteger • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,45 +46,45 @@

Converts a server-side R object into an integer class

-
ds.asInteger(x.name = NULL, newobj = NULL, datasources = NULL)
+
ds.asInteger(x.name = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x.name
-

a character string providing the name of the input object to be coerced to + + +

x.name
+

a character string providing the name of the input object to be coerced to an integer.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default asinteger.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.asInteger returns the R object converted into an integer +

ds.asInteger returns the R object converted into an integer that is written to the server-side. Also, two validity messages are returned to the -client-side indicating the name of the newobj which -has been created in each data source and if +client-side indicating the name of the newobj which +has been created in each data source and if it is in a valid form.

Details

-

This function is based on the native R function as.integer. - The only difference is that the DataSHIELD -function first converts the values of the input object into characters and then convert -those to integers. This addition, +

This function is based on the native R function as.integer. + The only difference is that the DataSHIELD +function first converts the values of the input object into characters and then convert +those to integers. This addition, it is important for the case where the input object is of class -factor having integers as levels. +factor having integers as levels. In that case, the native R as.integer function returns the underlying level codes and not the values as integers. For example as.integer in R @@ -102,42 +102,42 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Converting the R object into an integer
-  ds.asInteger(x.name = "D$LAB_TSC",
-                  newobj = "int.obj",
-                  datasources = connections[1]) #only the first Opal server is used ("study1")
-  ds.class(x = "int.obj", datasources = connections[1])   
-  
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Converting the R object into an integer
+  ds.asInteger(x.name = "D$LAB_TSC",
+                  newobj = "int.obj",
+                  datasources = connections[1]) #only the first Opal server is used ("study1")
+  ds.class(x = "int.obj", datasources = connections[1])   
+  
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
 
@@ -148,19 +148,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.asList.html b/docs/reference/ds.asList.html index 200fa81c2..45437c4b3 100644 --- a/docs/reference/ds.asList.html +++ b/docs/reference/ds.asList.html @@ -1,10 +1,10 @@ -Converts a server-side R object into a list — ds.asList • dsBaseClientConverts a server-side R object into a list — ds.asList • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,34 +46,34 @@

Converts a server-side R object into a list

-
ds.asList(x.name = NULL, newobj = NULL, datasources = NULL)
+
ds.asList(x.name = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x.name
-

a character string providing the name of the input object to be coerced to + + +

x.name
+

a character string providing the name of the input object to be coerced to a list.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default aslist.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.asList returns the R object converted into a list +

ds.asList returns the R object converted into a list which is written to the server-side. Also, two validity messages are returned to the -client-side indicating the name of the newobj which has been created in each data +client-side indicating the name of the newobj which has been created in each data source and if it is in a valid form.

@@ -87,42 +87,42 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Converting the R object into a List
-  ds.asList(x.name = "D",
-  newobj = "D.asList", 
-  datasources = connections[1]) #only the first Opal server is used ("study1")
-  ds.class(x = "D.asList", datasources = connections[1])   
-              
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Converting the R object into a List
+  ds.asList(x.name = "D",
+  newobj = "D.asList", 
+  datasources = connections[1]) #only the first Opal server is used ("study1")
+  ds.class(x = "D.asList", datasources = connections[1])   
+              
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
 
@@ -133,19 +133,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.asLogical.html b/docs/reference/ds.asLogical.html index a6f3006dd..58658647e 100644 --- a/docs/reference/ds.asLogical.html +++ b/docs/reference/ds.asLogical.html @@ -1,10 +1,10 @@ -Converts a server-side R object into a logical class — ds.asLogical • dsBaseClientConverts a server-side R object into a logical class — ds.asLogical • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,35 +46,35 @@

Converts a server-side R object into a logical class

-
ds.asLogical(x.name = NULL, newobj = NULL, datasources = NULL)
+
ds.asLogical(x.name = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x.name
-

a character string providing the name of the + + +

x.name
+

a character string providing the name of the input object to be coerced to a logical.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default aslogical.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.asLogical returns the R object converted into a logical +

ds.asLogical returns the R object converted into a logical that is written to the server-side. Also, two validity messages are returned -to the client-side indicating the name of the newobj which -has been created in each data source and if +to the client-side indicating the name of the newobj which +has been created in each data source and if it is in a valid form.

@@ -88,42 +88,42 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Converting the R object into a logical
-  ds.asLogical(x.name = "D$LAB_TSC", 
-               newobj = "logical.obj", 
-               datasources =connections[1]) #only the first Opal server is used ("study1")
-  ds.class(x = "logical.obj", datasources = connections[1])  
-               
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Converting the R object into a logical
+  ds.asLogical(x.name = "D$LAB_TSC", 
+               newobj = "logical.obj", 
+               datasources =connections[1]) #only the first Opal server is used ("study1")
+  ds.class(x = "logical.obj", datasources = connections[1])  
+               
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
 
@@ -134,19 +134,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.asMatrix.html b/docs/reference/ds.asMatrix.html index fb183f53e..c961e2584 100644 --- a/docs/reference/ds.asMatrix.html +++ b/docs/reference/ds.asMatrix.html @@ -1,10 +1,10 @@ -Converts a server-side R object into a matrix — ds.asMatrix • dsBaseClientConverts a server-side R object into a matrix — ds.asMatrix • dsBaseClient - +
@@ -30,55 +30,55 @@
- +
-

Coerces an R object into a matrix. +

Coerces an R object into a matrix. This converts all columns into character class.

-
ds.asMatrix(x.name = NULL, newobj = NULL, datasources = NULL)
+
ds.asMatrix(x.name = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x.name
-

a character string providing the name of the input object to be coerced to + + +

x.name
+

a character string providing the name of the input object to be coerced to a matrix.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default asmatrix.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.asMatrix returns the object converted into a matrix +

ds.asMatrix returns the object converted into a matrix that is written to the server-side. Also, two validity messages are returned -to the client-side indicating the name of the newobj which +to the client-side indicating the name of the newobj which has been created in each data source and if it is in a valid form.

Details

-

This function is based on the native R function as.matrix. +

This function is based on the native R function as.matrix. If this function is applied to a data frame, all columns are converted into a character class. If you wish to convert a data frame to a matrix but maintain all data columns in their original class you should use the function ds.asDataMatrix.

@@ -91,41 +91,41 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Converting the R object into a matrix
-  ds.asMatrix(x.name = "D",
-              newobj = "mat.obj",
-              datasources = connections[1]) #only the first Opal server is used ("study1")
-                 
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-} 
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Converting the R object into a matrix
+  ds.asMatrix(x.name = "D",
+              newobj = "mat.obj",
+              datasources = connections[1]) #only the first Opal server is used ("study1")
+                 
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # } 
 
@@ -136,19 +136,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.asNumeric.html b/docs/reference/ds.asNumeric.html index 73e33afbf..76ec51363 100644 --- a/docs/reference/ds.asNumeric.html +++ b/docs/reference/ds.asNumeric.html @@ -1,10 +1,10 @@ -Converts a server-side R object into a numeric class — ds.asNumeric • dsBaseClientConverts a server-side R object into a numeric class — ds.asNumeric • dsBaseClient - +
@@ -30,67 +30,67 @@
- +
-

Coerces an R object into a numeric class. +

Coerces an R object into a numeric class. This function is based on the native R function as.numeric.

-
ds.asNumeric(x.name = NULL, newobj = NULL, datasources = NULL)
+
ds.asNumeric(x.name = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x.name
-

a character string providing the name of the input object to be coerced to + + +

x.name
+

a character string providing the name of the input object to be coerced to a numeric.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default asnumeric.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.asNumeric returns the R object converted into a numeric class -that is written to the server-side. Also, two validity messages are returned -to the client-side indicating the name of the newobj which -has been created in each data source and if +

ds.asNumeric returns the R object converted into a numeric class +that is written to the server-side. Also, two validity messages are returned +to the client-side indicating the name of the newobj which +has been created in each data source and if it is in a valid form.

Details

This function is based on the native R function as.numeric. -However, it behaves differently with some specific classes of variables. For example, if the input +However, it behaves differently with some specific classes of variables. For example, if the input object is of class factor, it first converts its values into characters and then convert those to -numerics. This behavior is important for the case where the input object is of class factor having -numbers as levels. In that case, the native R -as.numeric function returns the underlying level codes and not the values as numbers. +numerics. This behaviour is important for the case where the input object is of class factor having +numbers as levels. In that case, the native R +as.numeric function returns the underlying level codes and not the values as numbers. For example as.numeric in R converts the factor vector:
0 1 1 2 1 0 1 0 2 2 2 1
Levels: 0 1 2
to the following numeric vector: 1 2 2 3 2 1 2 1 3 3 3 2
-In contrast DataSHIELD converts an input factor with numeric +In contrast DataSHIELD converts an input factor with numeric levels to its original numeric values.

Server function called: asNumericDS

@@ -101,42 +101,42 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Converting the R object into a numeric class
-  ds.asNumeric(x.name = "D$LAB_TSC",
-                  newobj = "num.obj",
-                  datasources = connections[1]) #only the first Opal server is used ("study1")
-  ds.class(x = "num.obj", datasources = connections[1]) 
-                
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Converting the R object into a numeric class
+  ds.asNumeric(x.name = "D$LAB_TSC",
+                  newobj = "num.obj",
+                  datasources = connections[1]) #only the first Opal server is used ("study1")
+  ds.class(x = "num.obj", datasources = connections[1]) 
+                
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
 
@@ -147,19 +147,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.assign.html b/docs/reference/ds.assign.html index 64fd38ac7..af6b3e020 100644 --- a/docs/reference/ds.assign.html +++ b/docs/reference/ds.assign.html @@ -1,9 +1,9 @@ -Assigns an R object to a name in the server-side — ds.assign • dsBaseClientAssigns an R object to a name in the server-side — ds.assign • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,38 +44,38 @@

Assigns an R object to a name in the server-side

-
ds.assign(toAssign = NULL, newobj = NULL, datasources = NULL)
+
ds.assign(toAssign = NULL, newobj = NULL, datasources = NULL)

Arguments

-
toAssign
+ + +
toAssign

a character string providing the object to assign.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default assign.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.assign returns the R object assigned to a name +

ds.assign returns the R object assigned to a name that is written to the server-side.

Details

The new object is stored on the server-side.

-

ds.assign causes a remote assignment by using DSI::datashield.assign. -The toAssign argument is checked at the server and +

ds.assign causes a remote assignment by using DSI::datashield.assign. +The toAssign argument is checked at the server and assigned the variable called newobj on the server-side.

@@ -85,42 +85,42 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Assign a variable to a name
-  ds.assign(toAssign = "D$LAB_TSC",
-            newobj = "labtsc",
-            datasources = connections[1]) #only the first Opal server is used ("study1")
-                
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
-
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Assign a variable to a name
+  ds.assign(toAssign = "D$LAB_TSC",
+            newobj = "labtsc",
+            datasources = connections[1]) #only the first Opal server is used ("study1")
+                
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
+
 
@@ -131,19 +131,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.auc.html b/docs/reference/ds.auc.html index c41390649..2c93a3083 100644 --- a/docs/reference/ds.auc.html +++ b/docs/reference/ds.auc.html @@ -1,10 +1,10 @@ -Calculates the Area under the curve (AUC) — ds.auc • dsBaseClientCalculates the Area under the curve (AUC) — ds.auc • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,31 +46,31 @@

Calculates the Area under the curve (AUC)

-
ds.auc(pred = NULL, y = NULL, datasources = NULL)
+
ds.auc(pred = NULL, y = NULL, datasources = NULL)

Arguments

-
pred
+ + +
pred

the name of the vector of the predicted values

-
y
-

the name of the outcome variable. Note that this variable should include +

y
+

the name of the outcome variable. Note that this variable should include the complete cases that are used in the regression model.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

returns the AUC and its standard error

+

returns the AUC and its standard error

Details

@@ -89,19 +89,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.boxPlot.html b/docs/reference/ds.boxPlot.html index ba2490d31..c7b9a1e0b 100644 --- a/docs/reference/ds.boxPlot.html +++ b/docs/reference/ds.boxPlot.html @@ -1,10 +1,10 @@ -Draw boxplot — ds.boxPlot • dsBaseClientDraw boxplot — ds.boxPlot • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,64 +46,64 @@

Draw boxplot

-
ds.boxPlot(
-  x,
-  variables = NULL,
-  group = NULL,
-  group2 = NULL,
-  xlabel = "x axis",
-  ylabel = "y axis",
-  type = "pooled",
-  datasources = NULL
-)
+
ds.boxPlot(
+  x,
+  variables = NULL,
+  group = NULL,
+  group2 = NULL,
+  xlabel = "x axis",
+  ylabel = "y axis",
+  type = "pooled",
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

character Name of the data frame (or numeric vector) on the server side that holds the information to be plotted

-
variables
+
variables

character vector Name of the column(s) of the data frame to include on the boxplot

-
group
+
group

character (default NULL) Name of the first grouping variable.

-
group2
+
group2

character (default NULL) Name of the second grouping variable.

-
xlabel
+
xlabel

caracter (default "x axis") Label to put on the x axis of the plot

-
ylabel
+
ylabel

caracter (default "y axis") Label to put on the y axis of the plot

-
type
+
type

character Return a pooled plot ("pooled") or a split plot (one for each study server "split")

-
datasources
+
datasources

a list of DSConnection-class (default NULL) objects obtained after login

Value

- - -

ggplot object

+

ggplot object

Examples

- +
- - + + diff --git a/docs/reference/ds.boxPlotGG.html b/docs/reference/ds.boxPlotGG.html index eb55056c0..1d5456390 100644 --- a/docs/reference/ds.boxPlotGG.html +++ b/docs/reference/ds.boxPlotGG.html @@ -1,10 +1,10 @@ -Renders boxplot — ds.boxPlotGG • dsBaseClientRenders boxplot — ds.boxPlotGG • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,21 +46,23 @@

Renders boxplot

-
ds.boxPlotGG(
-  x,
-  group = NULL,
-  group2 = NULL,
-  xlabel = "x axis",
-  ylabel = "y axis",
-  type = "pooled",
-  datasources = NULL
-)
+
ds.boxPlotGG(
+  x,
+  group = NULL,
+  group2 = NULL,
+  xlabel = "x axis",
+  ylabel = "y axis",
+  type = "pooled",
+  datasources = NULL
+)

Arguments

-
x
-

character Name on the server side of the data frame to form a boxplot. Structure on the server + + +

x
+

character Name on the server side of the data frame to form a boxplot. Structure on the server of this object must be:

Column 'x': Names on the X axis of the boxplot, aka variables to plot
Column 'value': Values for that variable (raw data of columns rbinded)
@@ -68,36 +70,34 @@

Arguments

Column 'group2': (Optional) Values of the second grouping variable

-
group
+
group

character (default NULL) Name of the first grouping variable.

-
group2
+
group2

character (default NULL) Name of the second grouping variable.

-
xlabel
+
xlabel

caracter (default "x axis") Label to put on the x axis of the plot

-
ylabel
+
ylabel

caracter (default "y axis") Label to put on the y axis of the plot

-
type
+
type

character Return a pooled plot ("pooled") or a split plot (one for each study server "split")

-
datasources
+
datasources

a list of DSConnection-class (default NULL) objects obtained after login

Value

- - -

ggplot object

+

ggplot object

@@ -108,19 +108,19 @@

Value

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.boxPlotGG_data_Treatment.html b/docs/reference/ds.boxPlotGG_data_Treatment.html index 3c89a8c20..4ad0dbdc6 100644 --- a/docs/reference/ds.boxPlotGG_data_Treatment.html +++ b/docs/reference/ds.boxPlotGG_data_Treatment.html @@ -1,9 +1,9 @@ -Take a data frame on the server side an arrange it to pass it to the boxplot function — ds.boxPlotGG_data_Treatment • dsBaseClientTake a data frame on the server side an arrange it to pass it to the boxplot function — ds.boxPlotGG_data_Treatment • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,58 +44,47 @@

Take a data frame on the server side an arrange it to pass it to the boxplot

-
ds.boxPlotGG_data_Treatment(
-  table,
-  variables,
-  group = NULL,
-  group2 = NULL,
-  datasources = NULL
-)
+
ds.boxPlotGG_data_Treatment(
+  table,
+  variables,
+  group = NULL,
+  group2 = NULL,
+  datasources = NULL
+)

Arguments

-
table
+ + +
table

character Name of the table on the server side that holds the information to be plotted later

-
variables
+
variables

character vector Name of the column(s) of the data frame to include on the boxplot

-
group
+
group

character (default NULL) Name of the first grouping variable.

-
group2
+
group2

character (default NULL) Name of the second grouping variable.

-
datasources
+
datasources

a list of DSConnection-class (default NULL) objects obtained after login

Value

- - -

Does not return nothing, it creates the table "boxPlotRawData" on the server arranged to be passed to the +

Does not return nothing, it creates the table "boxPlotRawData" on the server arranged to be passed to the ggplot boxplot function. Structure of the created table:

- - - -

Column 'x': Names on the X axis of the boxplot, aka variables to plot

- - -

Column 'value': Values for that variable (raw data of columns rbinded)

- - -

Column 'group': (Optional) Values of the grouping variable

- - -

Column 'group2': (Optional) Values of the second grouping variable

- - +

Column 'x': Names on the X axis of the boxplot, aka variables to plot
+ Column 'value': Values for that variable (raw data of columns rbinded)
+ Column 'group': (Optional) Values of the grouping variable
+ Column 'group2': (Optional) Values of the second grouping variable

@@ -106,19 +95,19 @@

Value

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.boxPlotGG_data_Treatment_numeric.html b/docs/reference/ds.boxPlotGG_data_Treatment_numeric.html index e3a2e7251..4e760a434 100644 --- a/docs/reference/ds.boxPlotGG_data_Treatment_numeric.html +++ b/docs/reference/ds.boxPlotGG_data_Treatment_numeric.html @@ -1,9 +1,9 @@ -Take a vector on the server side an arrange it to pass it to the boxplot function — ds.boxPlotGG_data_Treatment_numeric • dsBaseClientTake a vector on the server side an arrange it to pass it to the boxplot function — ds.boxPlotGG_data_Treatment_numeric • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,34 +44,27 @@

Take a vector on the server side an arrange it to pass it to the boxplot fun

-
ds.boxPlotGG_data_Treatment_numeric(vector, datasources = NULL)
+
ds.boxPlotGG_data_Treatment_numeric(vector, datasources = NULL)

Arguments

-
vector
+ + +
vector

character Name of the table on the server side that holds the information to be plotted later

-
datasources
+
datasources

a list of DSConnection-class (default NULL) objects obtained after login

Value

- - -

Does not return nothing, it creates the table "boxPlotRawDataNumeric" on the server arranged to be passed to the +

Does not return nothing, it creates the table "boxPlotRawDataNumeric" on the server arranged to be passed to the ggplot boxplot function. Structure of the created table:

- - - -

Column 'x': Names on the X axis of the boxplot, aka name of the vector (vector argument)

- - -

Column 'value': Values for that variable

- - +

Column 'x': Names on the X axis of the boxplot, aka name of the vector (vector argument)
+ Column 'value': Values for that variable

@@ -82,19 +75,19 @@

Value

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.boxPlotGG_numeric.html b/docs/reference/ds.boxPlotGG_numeric.html index 0e0f87e37..a2f69f988 100644 --- a/docs/reference/ds.boxPlotGG_numeric.html +++ b/docs/reference/ds.boxPlotGG_numeric.html @@ -1,9 +1,9 @@ -Draw boxplot with information from a numeric vector — ds.boxPlotGG_numeric • dsBaseClientDraw boxplot with information from a numeric vector — ds.boxPlotGG_numeric • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,43 +44,43 @@

Draw boxplot with information from a numeric vector

-
ds.boxPlotGG_numeric(
-  x,
-  xlabel = "x axis",
-  ylabel = "y axis",
-  type = "pooled",
-  datasources = NULL
-)
+
ds.boxPlotGG_numeric(
+  x,
+  xlabel = "x axis",
+  ylabel = "y axis",
+  type = "pooled",
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

character Name of the numeric vector on the server side that holds the information to be plotted

-
xlabel
+
xlabel

caracter (default "x axis") Label to put on the x axis of the plot

-
ylabel
+
ylabel

caracter (default "y axis") Label to put on the y axis of the plot

-
type
+
type

character Return a pooled plot ("pooled") or a split plot (one for each study server "split")

-
datasources
+
datasources

a list of DSConnection-class (default NULL) objects obtained after login

Value

- - -

ggplot object

+

ggplot object

@@ -91,19 +91,19 @@

Value

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.boxPlotGG_table.html b/docs/reference/ds.boxPlotGG_table.html index e06bcbe5f..46f7ed910 100644 --- a/docs/reference/ds.boxPlotGG_table.html +++ b/docs/reference/ds.boxPlotGG_table.html @@ -1,9 +1,9 @@ -Draw boxplot with information from a data frame — ds.boxPlotGG_table • dsBaseClientDraw boxplot with information from a data frame — ds.boxPlotGG_table • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,58 +44,58 @@

Draw boxplot with information from a data frame

-
ds.boxPlotGG_table(
-  x,
-  variables,
-  group = NULL,
-  group2 = NULL,
-  xlabel = "x axis",
-  ylabel = "y axis",
-  type = "pooled",
-  datasources = NULL
-)
+
ds.boxPlotGG_table(
+  x,
+  variables,
+  group = NULL,
+  group2 = NULL,
+  xlabel = "x axis",
+  ylabel = "y axis",
+  type = "pooled",
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

character Name of the table on the server side that holds the information to be plotted

-
variables
+
variables

character vector Name of the column(s) of the data frame to include on the boxplot

-
group
+
group

character (default NULL) Name of the first grouping variable.

-
group2
+
group2

character (default NULL) Name of the second grouping variable.

-
xlabel
+
xlabel

caracter (default "x axis") Label to put on the x axis of the plot

-
ylabel
+
ylabel

caracter (default "y axis") Label to put on the y axis of the plot

-
type
+
type

character Return a pooled plot ("pooled") or a split plot (one for each study server "split")

-
datasources
+
datasources

a list of DSConnection-class (default NULL) objects obtained after login

Value

- - -

ggplot object

+

ggplot object

@@ -106,19 +106,19 @@

Value

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.bp_standards.html b/docs/reference/ds.bp_standards.html index 85fcebb6d..e3ab71fd8 100644 --- a/docs/reference/ds.bp_standards.html +++ b/docs/reference/ds.bp_standards.html @@ -1,12 +1,12 @@ -Calculates Blood pressure z-scores — ds.bp_standards • dsBaseClientCalculates Blood pressure z-scores — ds.bp_standards • dsBaseClient - +
@@ -32,78 +32,78 @@
- +

The function calculates blood pressure z-scores in two steps: -Step 1. Calculates z-score of height according to CDC growth chart (Not the +Step 1. Calculates z-score of height according to CDC growth chart (Not the WHO growth chart!). Step 2. Calculates z-score of BP according to the fourth report on BP management, USA

-
ds.bp_standards(
-  sex = NULL,
-  age = NULL,
-  height = NULL,
-  bp = NULL,
-  systolic = TRUE,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.bp_standards(
+  sex = NULL,
+  age = NULL,
+  height = NULL,
+  bp = NULL,
+  systolic = TRUE,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
sex
+ + +
sex

the name of the sex variable. The variable should be coded as 1 for males -and 2 for females. If it is coded differently (e.g. 0/1), then you can use the -ds.recodeValues function to recode the categories to 1/2 before the use of +and 2 for females. If it is coded differently (e.g. 0/1), then you can use the +ds.recodeValues function to recode the categories to 1/2 before the use of ds.bp_standards

-
age
+
age

the name of the age variable in years.

-
height
+
height

the name of the height variable in cm.

-
bp
+
bp

the name of the blood pressure variable.

-
systolic
-

logical. If TRUE (default) the function assumes conversion of -systolic blood pressure. If FALSE the function assumes conversion of diastolic +

systolic
+

logical. If TRUE (default) the function assumes conversion of +systolic blood pressure. If FALSE the function assumes conversion of diastolic blood pressure.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default name is set to bp.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

assigns a new object on the server-side. The assigned object is a list +

assigns a new object on the server-side. The assigned object is a list with two elements: the 'Zbp' which is the zscores of the blood pressure and 'perc' which is the percentiles of the BP zscores.

@@ -126,19 +126,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.c.html b/docs/reference/ds.c.html index 24c264350..d15251bad 100644 --- a/docs/reference/ds.c.html +++ b/docs/reference/ds.c.html @@ -1,9 +1,9 @@ -Combines values into a vector or list in the server-side — ds.c • dsBaseClientCombines values into a vector or list in the server-side — ds.c • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,38 +44,38 @@

Combines values into a vector or list in the server-side

-
ds.c(x = NULL, newobj = NULL, datasources = NULL)
+
ds.c(x = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a vector of character string providing the names of the objects to be combined.

-
newobj
-

a character string that provides the name for the output object +

newobj
+

a character string that provides the name for the output object that is stored on the data servers. Default c.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.c returns the vector of concatenating R +

ds.c returns the vector of concatenating R objects which are written to the server-side.

Details

-

To avoid combining the character names and not the -vectors on the client-side, the names are coerced into a list -and the server-side function loops through that list to +

To avoid combining the character names and not the +vectors on the client-side, the names are coerced into a list +and the server-side function loops through that list to concatenate the list's elements into a vector.

Server function called: cDS

@@ -86,42 +86,42 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Create a vector with combined objects
-  myvect <- c("D$LAB_TSC", "D$LAB_HDL")
-  ds.c(x = myvect,
-       newobj = "new.vect",
-       datasources = connections[1]) #only the first Opal server is used ("study1")
-                
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}    
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Create a vector with combined objects
+  myvect <- c("D$LAB_TSC", "D$LAB_HDL")
+  ds.c(x = myvect,
+       newobj = "new.vect",
+       datasources = connections[1]) #only the first Opal server is used ("study1")
+                
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }    
 
@@ -132,19 +132,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.cbind.html b/docs/reference/ds.cbind.html index f49ce00c0..91ef0d49a 100644 --- a/docs/reference/ds.cbind.html +++ b/docs/reference/ds.cbind.html @@ -1,10 +1,10 @@ -Combines R objects by columns in the server-side — ds.cbind • dsBaseClientCombines R objects by columns in the server-side — ds.cbind • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,23 +46,25 @@

Combines R objects by columns in the server-side

-
ds.cbind(
-  x = NULL,
-  DataSHIELD.checks = FALSE,
-  force.colnames = NULL,
-  newobj = NULL,
-  datasources = NULL,
-  notify.of.progress = FALSE
-)
+
ds.cbind(
+  x = NULL,
+  DataSHIELD.checks = FALSE,
+  force.colnames = NULL,
+  newobj = NULL,
+  datasources = NULL,
+  notify.of.progress = FALSE
+)

Arguments

-
x
+ + +
x

a character vector with the name of the objects to be combined.

-
DataSHIELD.checks
+
DataSHIELD.checks

logical. if TRUE does four checks:
1. the input object(s) is(are) defined in all the studies.
2. the input object(s) is(are) of the same legal class in all the studies.
@@ -71,45 +73,41 @@

Arguments

Default FALSE.

-
force.colnames
-

can be NULL (recommended) or a vector of characters that specifies -column names of the output object. If it is not NULL the user should take some caution. +

force.colnames
+

can be NULL (recommended) or a vector of characters that specifies +column names of the output object. If it is not NULL the user should take some caution. For more information see Details.

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Defaults cbind.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

-
notify.of.progress
+
notify.of.progress

specifies if console output should be produced to indicate progress. Default FALSE.

Value

- - -

ds.cbind returns a data frame combining the columns of the R -objects specified in the function which is written to the server-side. -It also returns to the client-side two messages with the name of newobj

- - -

that has been created in each data source and DataSHIELD.checks result.

+

ds.cbind returns a data frame combining the columns of the R +objects specified in the function which is written to the server-side. +It also returns to the client-side two messages with the name of newobj +that has been created in each data source and DataSHIELD.checks result.

Details

A sequence of vector, matrix or data-frame arguments is combined column by column to produce a data-frame that is written to the server-side.

This function is similar to the native R function cbind.

-

In DataSHIELD.checks the checks are relatively slow. +

In DataSHIELD.checks the checks are relatively slow. Default DataSHIELD.checks value is FALSE.

If force.colnames is NULL (which is recommended), the column names are inferred from the names or column names of the first object specified in the x argument. @@ -128,76 +126,76 @@

Author

Examples

-

-if (FALSE) {
-  ## Version 6, for version 5 see the Wiki 
-  
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  # Example 1: Assign the exponent of a numeric variable at each server and cbind it 
-  # to the data frame D
-  
-  ds.exp(x = "D$LAB_HDL",
-         newobj = "LAB_HDL.exp",
-         datasources = connections) 
-         
-  ds.cbind(x = c("D", "LAB_HDL.exp"),
-           DataSHIELD.checks = FALSE,
-           newobj = "D.cbind.1",
-           datasources = connections)
-             
-  # Example 2: If there are duplicated column names in the input objects the function adds
-  # a suffix '.k' to the kth replicate". If also the argument DataSHIELD.checks is set to TRUE
-  # the function returns a warning message notifying the user for the existence of any duplicated
-  # column names in each study
-  
-  ds.cbind(x = c("LAB_HDL.exp", "LAB_HDL.exp"), 
-           DataSHIELD.checks = TRUE,
-           newobj = "D.cbind.2",
-           datasources = connections)
-           
-  ds.colnames(x = "D.cbind.2",
-              datasources = connections)            
-             
-  # Example 3: Generate a random normally distributed variable of length 100 at each study,
-  # and cbind it to the data frame D. This example fails and  returns an error as the length
-  # of the generated variable "norm.var" is not the same as the number of rows in the data frame D
-  
-  ds.rNorm(samp.size = 100,
-           newobj = "norm.var",
-           datasources = connections) 
-           
-  ds.cbind(x = c("D", "norm.var"), 
-           DataSHIELD.checks = FALSE,
-           newobj = "D.cbind.3", 
-           datasources = connections)                 
-                   
-  # Clear the Datashield R sessions and logout  
-  datashield.logout(connections) 
-  }
-
+    

+if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki 
+  
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  # Example 1: Assign the exponent of a numeric variable at each server and cbind it 
+  # to the data frame D
+  
+  ds.exp(x = "D$LAB_HDL",
+         newobj = "LAB_HDL.exp",
+         datasources = connections) 
+         
+  ds.cbind(x = c("D", "LAB_HDL.exp"),
+           DataSHIELD.checks = FALSE,
+           newobj = "D.cbind.1",
+           datasources = connections)
+             
+  # Example 2: If there are duplicated column names in the input objects the function adds
+  # a suffix '.k' to the kth replicate". If also the argument DataSHIELD.checks is set to TRUE
+  # the function returns a warning message notifying the user for the existence of any duplicated
+  # column names in each study
+  
+  ds.cbind(x = c("LAB_HDL.exp", "LAB_HDL.exp"), 
+           DataSHIELD.checks = TRUE,
+           newobj = "D.cbind.2",
+           datasources = connections)
+           
+  ds.colnames(x = "D.cbind.2",
+              datasources = connections)            
+             
+  # Example 3: Generate a random normally distributed variable of length 100 at each study,
+  # and cbind it to the data frame D. This example fails and  returns an error as the length
+  # of the generated variable "norm.var" is not the same as the number of rows in the data frame D
+  
+  ds.rNorm(samp.size = 100,
+           newobj = "norm.var",
+           datasources = connections) 
+           
+  ds.cbind(x = c("D", "norm.var"), 
+           DataSHIELD.checks = FALSE,
+           newobj = "D.cbind.3", 
+           datasources = connections)                 
+                   
+  # Clear the Datashield R sessions and logout  
+  datashield.logout(connections) 
+  } # }
+
 
@@ -208,19 +206,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.changeRefGroup.html b/docs/reference/ds.changeRefGroup.html index 61a41f2a3..30b39d214 100644 --- a/docs/reference/ds.changeRefGroup.html +++ b/docs/reference/ds.changeRefGroup.html @@ -1,11 +1,11 @@ -Changes the reference level of a factor in the server-side — ds.changeRefGroup • dsBaseClientChanges the reference level of a factor in the server-side — ds.changeRefGroup • dsBaseClient - +
@@ -31,72 +31,72 @@
- +
-

Change the reference level of a factor, by putting +

Change the reference level of a factor, by putting the reference group first.

This function is similar to R function relevel.

-
ds.changeRefGroup(
-  x = NULL,
-  ref = NULL,
-  newobj = NULL,
-  reorderByRef = FALSE,
-  datasources = NULL
-)
+
ds.changeRefGroup(
+  x = NULL,
+  ref = NULL,
+  newobj = NULL,
+  reorderByRef = FALSE,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string providing the name of the input vector of type factor.

-
ref
+
ref

the reference level.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the server-side. Default changerefgroup.newobj.

-
reorderByRef
+
reorderByRef

logical, if TRUE the new vector should be ordered by the reference group (i.e. putting the reference group first). The default is to not re-order (see the reasons in the details).

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.changeRefGroup returns a new vector with the specified level as a reference +

ds.changeRefGroup returns a new vector with the specified level as a reference which is written to the server-side.

Details

This function allows the user to re-order the vector, putting the reference -group first. It should be mentioned that by default the reference is -the first level in the vector of levels. +group first. It should be mentioned that by default the reference is +the first level in the vector of levels. If the user chooses the re-order a warning is issued as this can introduce a mismatch of values if the vector is put back into a table that is not reordered in the same way. Such mismatch @@ -118,77 +118,77 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  # Changing the reference group in the server-side
- 
-    # Example 1: rename the categories and change the reference with re-ordering
-      # print out the levels of the initial vector
-      ds.levels(x= "D$PM_BMI_CATEGORICAL",
-                datasources = connections)
-
-      # define a vector with the new levels and recode the initial levels
-      newNames <- c("normal", "overweight", "obesity")
-      ds.recodeLevels(x = "D$PM_BMI_CATEGORICAL",
-                      newCategories = newNames,
-                      newobj = "bmi_new",
-                      datasources = connections)
-
-      # print out the levels of the new vector
-      ds.levels(x = "bmi_new",
-                datasources = connections)
-
-      # Set the reference to "obesity" without changing the order (default)
-      ds.changeRefGroup(x = "bmi_new",
-                        ref = "obesity",
-                        newobj = "bmi_ob",
-                        datasources = connections)
-
-      # print out the levels; the first listed level (i.e. the reference) is now 'obesity'
-      ds.levels(x = "bmi_ob",
-                datasources = connections)
-
-    # Example 2: change the reference and re-order by the reference level
-      # If re-ordering is sought, the action is completed but a warning is issued
-      ds.recodeLevels(x = "D$PM_BMI_CATEGORICAL",
-                      newCategories = newNames,
-                      newobj = "bmi_new",
-                     datasources = connections)
-      ds.changeRefGroup(x = "bmi_new",
-                        ref = "obesity",
-                        newobj = "bmi_ob",
-                        reorderByRef = TRUE,
-                        datasources = connections)
-
-           
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  # Changing the reference group in the server-side
+ 
+    # Example 1: rename the categories and change the reference with re-ordering
+      # print out the levels of the initial vector
+      ds.levels(x= "D$PM_BMI_CATEGORICAL",
+                datasources = connections)
+
+      # define a vector with the new levels and recode the initial levels
+      newNames <- c("normal", "overweight", "obesity")
+      ds.recodeLevels(x = "D$PM_BMI_CATEGORICAL",
+                      newCategories = newNames,
+                      newobj = "bmi_new",
+                      datasources = connections)
+
+      # print out the levels of the new vector
+      ds.levels(x = "bmi_new",
+                datasources = connections)
+
+      # Set the reference to "obesity" without changing the order (default)
+      ds.changeRefGroup(x = "bmi_new",
+                        ref = "obesity",
+                        newobj = "bmi_ob",
+                        datasources = connections)
+
+      # print out the levels; the first listed level (i.e. the reference) is now 'obesity'
+      ds.levels(x = "bmi_ob",
+                datasources = connections)
+
+    # Example 2: change the reference and re-order by the reference level
+      # If re-ordering is sought, the action is completed but a warning is issued
+      ds.recodeLevels(x = "D$PM_BMI_CATEGORICAL",
+                      newCategories = newNames,
+                      newobj = "bmi_new",
+                     datasources = connections)
+      ds.changeRefGroup(x = "bmi_new",
+                        ref = "obesity",
+                        newobj = "bmi_ob",
+                        reorderByRef = TRUE,
+                        datasources = connections)
+
+           
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
 
@@ -199,19 +199,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.class.html b/docs/reference/ds.class.html index 705d3bd2c..7906dc578 100644 --- a/docs/reference/ds.class.html +++ b/docs/reference/ds.class.html @@ -1,10 +1,10 @@ -Class of the R object in the server-side — ds.class • dsBaseClientClass of the R object in the server-side — ds.class • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,26 +46,26 @@

Class of the R object in the server-side

-
ds.class(x = NULL, datasources = NULL)
+
ds.class(x = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string providing the name of the input R object.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.class returns the type of the R object.

+

ds.class returns the type of the R object.

Details

@@ -83,43 +83,43 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  # Getting the class of the R objects stored in the server-side
-  ds.class(x = "D", #whole dataset
-           datasources = connections[1]) #only the first server ("study1") is used
-
-  ds.class(x = "D$LAB_TSC", #select a variable
-           datasources = connections[1]) #only the first server ("study1") is used
-           
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  # Getting the class of the R objects stored in the server-side
+  ds.class(x = "D", #whole dataset
+           datasources = connections[1]) #only the first server ("study1") is used
+
+  ds.class(x = "D$LAB_TSC", #select a variable
+           datasources = connections[1]) #only the first server ("study1") is used
+           
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
 
@@ -130,19 +130,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.colnames.html b/docs/reference/ds.colnames.html index f8d94bb79..64853c41a 100644 --- a/docs/reference/ds.colnames.html +++ b/docs/reference/ds.colnames.html @@ -1,10 +1,10 @@ -Produces column names of the R object in the server-side — ds.colnames • dsBaseClientProduces column names of the R object in the server-side — ds.colnames • dsBaseClient - +
@@ -30,42 +30,42 @@
- +
-

Retrieves column names of an R object on the server-side. +

Retrieves column names of an R object on the server-side. This function is similar to R function colnames.

-
ds.colnames(x = NULL, datasources = NULL)
+
ds.colnames(x = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string providing the name of the input data frame or matrix.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.colnames returns the column names of +

ds.colnames returns the column names of the specified server-side data frame or matrix.

@@ -84,39 +84,39 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  # Getting column names of the R objects stored in the server-side
-  ds.colnames(x = "D",
-              datasources = connections[1]) #only the first server ("study1") is used
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  # Getting column names of the R objects stored in the server-side
+  ds.colnames(x = "D",
+              datasources = connections[1]) #only the first server ("study1") is used
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
 
@@ -127,19 +127,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.completeCases.html b/docs/reference/ds.completeCases.html index cc57ca71c..6fa86c83f 100644 --- a/docs/reference/ds.completeCases.html +++ b/docs/reference/ds.completeCases.html @@ -1,10 +1,10 @@ -Identifies complete cases in server-side R objects — ds.completeCases • dsBaseClientIdentifies complete cases in server-side R objects — ds.completeCases • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,42 +46,42 @@

Identifies complete cases in server-side R objects

-
ds.completeCases(x1 = NULL, newobj = NULL, datasources = NULL)
+
ds.completeCases(x1 = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x1
+ + +
x1

a character denoting the name of the input object which can be a data frame, matrix or vector.

-
newobj
+
newobj

a character string that provides the name for the complete-cases object -that is stored on the data servers. If the user does not specify a name, then the function -generates a name for the generated object that is the name of the input object with the +that is stored on the data servers. If the user does not specify a name, then the function +generates a name for the generated object that is the name of the input object with the suffix "_complete.cases"

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified, the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.completeCases generates a modified data frame, matrix or vector from which +

ds.completeCases generates a modified data frame, matrix or vector from which all rows containing at least one NA have been deleted. The output object is stored on the server-side. Only two validity messages are returned to the client-side indicating the name of the newobj that has been created in each data source and if it is in a valid form.

Details

-

In the case of a data frame or matrix, ds.completeCases deletes -all rows containing one or more missing values. However ds.completeCases +

In the case of a data frame or matrix, ds.completeCases deletes +all rows containing one or more missing values. However ds.completeCases in vectors only deletes the observation recorded as NA.

Server function called: completeCasesDS

@@ -92,51 +92,51 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  # Select complete cases from different R objects
-
-  ds.completeCases(x1 = "D", #data frames in the Opal servers 
-                             #(see above the connection to the Opal servers)
-                   newobj = "D.completeCases", # name for the output object 
-                                               # that is stored in the Opal servers
-                   datasources = connections)  # All Opal servers are used 
-                                               # (see above the connection to the Opal servers)
-                 
-  ds.completeCases(x1 = "D$LAB_TSC", #vector (variable) of the data frames in the Opal servers 
-                                     #(see above the connection to the Opal servers)
-                   newobj = "LAB_TSC.completeCases", #name for the output variable 
-                                                     #that is stored in the Opal servers
-                   datasources = connections[2]) #only the second Opal server is used ("study2")
-                   
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-  }
-  
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  # Select complete cases from different R objects
+
+  ds.completeCases(x1 = "D", #data frames in the Opal servers 
+                             #(see above the connection to the Opal servers)
+                   newobj = "D.completeCases", # name for the output object 
+                                               # that is stored in the Opal servers
+                   datasources = connections)  # All Opal servers are used 
+                                               # (see above the connection to the Opal servers)
+                 
+  ds.completeCases(x1 = "D$LAB_TSC", #vector (variable) of the data frames in the Opal servers 
+                                     #(see above the connection to the Opal servers)
+                   newobj = "LAB_TSC.completeCases", #name for the output variable 
+                                                     #that is stored in the Opal servers
+                   datasources = connections[2]) #only the second Opal server is used ("study2")
+                   
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+  } # }
+  
 
@@ -147,19 +147,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.contourPlot.html b/docs/reference/ds.contourPlot.html index c6d904352..2c4cd97d2 100644 --- a/docs/reference/ds.contourPlot.html +++ b/docs/reference/ds.contourPlot.html @@ -1,10 +1,10 @@ -Generates a contour plot — ds.contourPlot • dsBaseClientGenerates a contour plot — ds.contourPlot • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,46 +46,48 @@

Generates a contour plot

-
ds.contourPlot(
-  x = NULL,
-  y = NULL,
-  type = "combine",
-  show = "all",
-  numints = 20,
-  method = "smallCellsRule",
-  k = 3,
-  noise = 0.25,
-  datasources = NULL
-)
+
ds.contourPlot(
+  x = NULL,
+  y = NULL,
+  type = "combine",
+  show = "all",
+  numints = 20,
+  method = "smallCellsRule",
+  k = 3,
+  noise = 0.25,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string providing the name of a numerical vector.

-
y
+
y

a character string providing the name of a numerical vector.

-
type
+
type

a character string that represents the type of graph to display. If type is set to 'combine', a combined contour plot displayed and if type is set to 'split', each contour is plotted separately.

-
show
+
show

a character that represents where the plot should focus. If show is set to 'all', the ranges of the variables are used as plot limits. If show is set to 'zoomed', the plot is zoomed to the region where the actual data are.

-
numints
+
numints

number of intervals for a density grid object.

-
method
+
method

a character that defines which contour will be created. If method is set to 'smallCellsRule' (default), the contour plot of the actual variables is created but grids with low counts are replaced with grids with zero counts. If method is @@ -94,31 +96,29 @@

Arguments

method is set to 'probabilistic', then the contour of 'noisy' variables is generated.

-
k
+
k

the number of the nearest neighbours for which their centroid is calculated. For more information see details.

-
noise
+
noise

the percentage of the initial variance that is used as the variance of the embedded noise if the argument method is set to 'probabilistic'. For more information see details.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.contourPlot returns a contour plot to the client-side.

+

ds.contourPlot returns a contour plot to the client-side.

Details

-

The ds.contourPlot function first generates +

The ds.contourPlot function first generates a density grid and uses it to plot the graph. The cells of the grid density matrix that hold a count of less than the filter set by DataSHIELD (usually 5) are considered invalid and turned into 0 to avoid potential @@ -126,19 +126,19 @@

Details

The ranges returned by each study and used in the process of getting the grid density matrix are not the exact minimum and maximum values but rather close approximates of the real minimum and maximum value. This was done to reduce the risk of potential disclosure.

-

In the k parameter the user can choose any value for k equal to or greater -than the pre-specified threshold used as a disclosure control for this method -and lower than the number of observations minus the value of this threshold. -k default value is 3 (we suggest k to be equal to, or bigger than, 3). +

In the k parameter the user can choose any value for k equal to or greater +than the pre-specified threshold used as a disclosure control for this method +and lower than the number of observations minus the value of this threshold. +k default value is 3 (we suggest k to be equal to, or bigger than, 3). Note that the function fails if the user -uses the default value but the study has set a bigger threshold. -The value of k is used only if the argument method is set to 'deterministic'. +uses the default value but the study has set a bigger threshold. +The value of k is used only if the argument method is set to 'deterministic'. Any value of k is ignored if the argument method is set to 'probabilistic' or 'smallCellsRule'.

In noise any value of noise is ignored if the argument method is set to 'deterministic' or 'smallCellsRule'. The user can choose any value for noise equal to or greater than the pre-specified threshold 'nfilter.noise'. -Default noise value is 0.25. +Default noise value is 0.25. The added noise follows a normal distribution with zero mean and variance equal to a percentage of the initial variance of each input variable.

Server functions called: heatmapPlotDS, rangeDS and densityGridDS

@@ -150,49 +150,49 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Generating contour plots
-
-  ds.contourPlot(x = "D$LAB_TSC",
-                 y = "D$LAB_HDL",
-                 type = "combine", 
-                 show = "all",
-                 numints = 20,
-                 method = "smallCellsRule",  
-                 k = 3, 
-                 noise = 0.25,
-                 datasources = connections)
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-}
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Generating contour plots
+
+  ds.contourPlot(x = "D$LAB_TSC",
+                 y = "D$LAB_HDL",
+                 type = "combine", 
+                 show = "all",
+                 numints = 20,
+                 method = "smallCellsRule",  
+                 k = 3, 
+                 noise = 0.25,
+                 datasources = connections)
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+} # }
 
@@ -203,19 +203,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.cor.html b/docs/reference/ds.cor.html index bc4579bcd..19251198b 100644 --- a/docs/reference/ds.cor.html +++ b/docs/reference/ds.cor.html @@ -1,10 +1,10 @@ -Calculates the correlation of R objects in the server-side — ds.cor • dsBaseClientCalculates the correlation of R objects in the server-side — ds.cor • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,37 +46,37 @@

Calculates the correlation of R objects in the server-side

-
ds.cor(x = NULL, y = NULL, type = "split", datasources = NULL)
+
ds.cor(x = NULL, y = NULL, type = "split", datasources = NULL)

Arguments

-
x
+ + +
x

a character string providing the name of the input vector, data frame or matrix.

-
y
+
y

a character string providing the name of the input vector, data frame or matrix. Default NULL.

-
type
-

a character string that represents the type of analysis to carry out. +

type
+

a character string that represents the type of analysis to carry out. This must be set to 'split' or 'combine'. Default 'split'. For more information see details.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.cor returns a list containing the number of missing values in each variable, -the number of missing variables casewise, the correlation matrix, +

ds.cor returns a list containing the number of missing values in each variable, +the number of missing variables casewise, the correlation matrix, the number of used complete cases. The function applies two disclosure controls. The first disclosure control checks that the number of variables is not bigger than a percentage of the individual-level records (the allowed percentage is pre-specified by the 'nfilter.glm'). The second disclosure control checks that none of them is dichotomous @@ -88,7 +88,7 @@

Details

number of complete cases and a table outlining the number of missing values to allow the user to decide the 'relevance' of the correlation based on the number of complete cases included in the correlation calculations.

-

If the argument y is not NULL, the dimensions of the object have to be +

If the argument y is not NULL, the dimensions of the object have to be compatible with the argument x.

The function calculates the pairwise correlations based on casewise complete cases which means that it omits all the rows in the input data frame that include at least one cell with a missing value, @@ -97,7 +97,7 @@

Details

variance-correlation matrix of an input data frame and the number of complete cases and missing values are returned for every single study. If type is set to 'combine', the pooled correlation, the total number of complete cases and the total number of missing values aggregated -from all the involved studies, are returned.

+from all the involved studies, are returned.

Server function called: corDS

@@ -107,45 +107,45 @@

Author

Examples

-
if (FALSE) {
-
-## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Example 1: Get the correlation matrix of two continuous variables
-  ds.cor(x="D$LAB_TSC", y="D$LAB_TRIG", type="combine", datasources = connections)
-  
-  # Example 2: Get the correlation matrix of the variables in a dataframe
-  ds.dataFrame(x=c("D$LAB_TSC", "D$LAB_TRIG", "D$LAB_HDL", "D$PM_BMI_CONTINUOUS"), 
-               newobj="D.new", check.names=FALSE, datasources=connections)
-  ds.cor("D.new", type="combine", datasources = connections)
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-}
+    
if (FALSE) { # \dontrun{
+
+## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Example 1: Get the correlation matrix of two continuous variables
+  ds.cor(x="D$LAB_TSC", y="D$LAB_TRIG", type="combine", datasources = connections)
+  
+  # Example 2: Get the correlation matrix of the variables in a dataframe
+  ds.dataFrame(x=c("D$LAB_TSC", "D$LAB_TRIG", "D$LAB_HDL", "D$PM_BMI_CONTINUOUS"), 
+               newobj="D.new", check.names=FALSE, datasources=connections)
+  ds.cor("D.new", type="combine", datasources = connections)
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+} # }
 
@@ -156,19 +156,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.corTest.html b/docs/reference/ds.corTest.html index 78221f110..fdd369b9c 100644 --- a/docs/reference/ds.corTest.html +++ b/docs/reference/ds.corTest.html @@ -1,9 +1,9 @@ -Tests for correlation between paired samples in the server-side — ds.corTest • dsBaseClientTests for correlation between paired samples in the server-side — ds.corTest • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,63 +44,63 @@

Tests for correlation between paired samples in the server-side

-
ds.corTest(
-  x = NULL,
-  y = NULL,
-  method = "pearson",
-  exact = NULL,
-  conf.level = 0.95,
-  type = "split",
-  datasources = NULL
-)
+
ds.corTest(
+  x = NULL,
+  y = NULL,
+  method = "pearson",
+  exact = NULL,
+  conf.level = 0.95,
+  type = "split",
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string providing the name of a numerical vector.

-
y
+
y

a character string providing the name of a numerical vector.

-
method
+
method

a character string indicating which correlation coefficient is to be -used for the test. One of "pearson", "kendall", or "spearman", can be abbreviated. +used for the test. One of "pearson", "kendall", or "spearman", can be abbreviated. Default is set to "pearson".

-
exact
+
exact

a logical indicating whether an exact p-value should be computed. Used for -Kendall's tau and Spearman's rho. See ‘Details’ of R stats function cor.test for +Kendall's tau and Spearman's rho. See Details of R stats function cor.test for the meaning of NULL (the default).

-
conf.level
+
conf.level

confidence level for the returned confidence interval. Currently only used for the Pearson product moment correlation coefficient if there are at least 4 complete pairs of observations. Default is set to 0.95.

-
type
-

a character string that represents the type of analysis to carry out. -This must be set to 'split' or 'combine'. Default is set to 'split'. If -type is set to "combine" then an approximated pooled correlation is estimated based on +

type
+

a character string that represents the type of analysis to carry out. +This must be set to 'split' or 'combine'. Default is set to 'split'. If +type is set to "combine" then an approximated pooled correlation is estimated based on Fisher's z transformation.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.corTest returns to the client-side the results of the correlation test.

+

ds.corTest returns to the client-side the results of the correlation test.

Details

@@ -115,43 +115,43 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # test for correlation
-  ds.corTest(x = "D$LAB_TSC",
-             y = "D$LAB_HDL",
-             datasources = connections[1]) #Only first server is used ("study1")
-                
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # test for correlation
+  ds.corTest(x = "D$LAB_TSC",
+             y = "D$LAB_HDL",
+             datasources = connections[1]) #Only first server is used ("study1")
+                
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
+
 
@@ -162,19 +162,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.cov.html b/docs/reference/ds.cov.html index 22792ef3b..3637997c7 100644 --- a/docs/reference/ds.cov.html +++ b/docs/reference/ds.cov.html @@ -1,10 +1,10 @@ -Calculates the covariance of R objects in the server-side — ds.cov • dsBaseClientCalculates the covariance of R objects in the server-side — ds.cov • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,53 +46,53 @@

Calculates the covariance of R objects in the server-side

-
ds.cov(
-  x = NULL,
-  y = NULL,
-  naAction = "pairwise.complete",
-  type = "split",
-  datasources = NULL
-)
+
ds.cov(
+  x = NULL,
+  y = NULL,
+  naAction = "pairwise.complete",
+  type = "split",
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string providing the name of the input vector, data frame or matrix.

-
y
-

a character string providing the name of the input vector, +

y
+

a character string providing the name of the input vector, data frame or matrix. Default NULL.

-
naAction
+
naAction

a character string giving a method for computing covariances in the presence of missing values. This must be set to 'casewise.complete' or 'pairwise.complete'. Default 'pairwise.complete'. For more information see details.

-
type
-

a character string that represents the type of analysis to carry out. +

type
+

a character string that represents the type of analysis to carry out. This must be set to 'split' or 'combine'. Default 'split'. For more information see details.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.cov returns a list containing the number of missing values in each variable, the number of missing values +

ds.cov returns a list containing the number of missing values in each variable, the number of missing values casewise or pairwise depending on the argument naAction, the covariance matrix, the number of used complete cases and an error message which indicates whether or not the input variables pass the disclosure controls. The first disclosure control checks that the number of variables is not bigger than a percentage of the individual-level records (the allowed percentage is pre-specified by the 'nfilter.glm'). The second disclosure control checks that none of them is dichotomous -with a level having fewer counts than the pre-specified 'nfilter.tab' threshold. If any of the input variables do not pass +with a level having fewer counts than the pre-specified 'nfilter.tab' threshold. If any of the input variables do not pass the disclosure controls then all the output values are replaced with NAs. If all the variables are valid and pass the controls, then the output matrices are returned and also an error message is returned but it is replaced by NA.

@@ -102,20 +102,20 @@

Details

number of complete cases and a table outlining the number of missing values to allow for the user to decide about the 'relevance' of the covariance based on the number of complete cases included in the covariance calculations.

-

If the argument y is not NULL, the dimensions of the object have to be +

If the argument y is not NULL, the dimensions of the object have to be compatible with the argument x.

If naAction is set to 'casewise.complete', then the function omits all the rows in the whole data frame that include at least one cell with a missing value before the calculation of covariances. If naAction is set to 'pairwise.complete' (default), -then the function divides the input data frame to -subset data frames formed by each pair between two variables +then the function divides the input data frame to +subset data frames formed by each pair between two variables (all combinations are considered) and omits the rows with missing values at each pair separately and then calculates the covariances of those pairs.

If type is set to 'split' (default), the covariance of two variables or the -variance-covariance matrix of an input data frame and the number of -complete cases and missing values are returned for every single study. -If type is set to 'combine', the pooled covariance, the total number of complete cases -and the total number of missing values aggregated from all the involved studies, are returned.

+variance-covariance matrix of an input data frame and the number of +complete cases and missing values are returned for every single study. +If type is set to 'combine', the pooled covariance, the total number of complete cases +and the total number of missing values aggregated from all the involved studies, are returned.

Server function called: covDS

@@ -125,52 +125,52 @@

Author

Examples

-
if (FALSE) {
-
-## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Calculate the covariance between two vectors
-  ds.assign(newobj='labhdl', toAssign='D$LAB_HDL', datasources = connections)
-  ds.assign(newobj='labtsc', toAssign='D$LAB_TSC', datasources = connections)
-  ds.assign(newobj='gender', toAssign='D$GENDER', datasources = connections)
-  ds.cov(x = 'labhdl',
-         y = 'labtsc',
-         naAction = 'pairwise.complete',
-         type = 'combine',
-         datasources = connections)
-  ds.cov(x = 'labhdl',
-         y = 'gender',
-         naAction = 'pairwise.complete',
-         type = 'combine',
-         datasources = connections[1]) #only the first Opal server is used ("study1")
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-}
+    
if (FALSE) { # \dontrun{
+
+## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Calculate the covariance between two vectors
+  ds.assign(newobj='labhdl', toAssign='D$LAB_HDL', datasources = connections)
+  ds.assign(newobj='labtsc', toAssign='D$LAB_TSC', datasources = connections)
+  ds.assign(newobj='gender', toAssign='D$GENDER', datasources = connections)
+  ds.cov(x = 'labhdl',
+         y = 'labtsc',
+         naAction = 'pairwise.complete',
+         type = 'combine',
+         datasources = connections)
+  ds.cov(x = 'labhdl',
+         y = 'gender',
+         naAction = 'pairwise.complete',
+         type = 'combine',
+         datasources = connections[1]) #only the first Opal server is used ("study1")
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+} # }
 
@@ -181,19 +181,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.dataFrame.html b/docs/reference/ds.dataFrame.html index 491b5f521..791095af2 100644 --- a/docs/reference/ds.dataFrame.html +++ b/docs/reference/ds.dataFrame.html @@ -1,10 +1,10 @@ -Generates a data frame object in the server-side — ds.dataFrame • dsBaseClientGenerates a data frame object in the server-side — ds.dataFrame • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,55 +46,57 @@

Generates a data frame object in the server-side

-
ds.dataFrame(
-  x = NULL,
-  row.names = NULL,
-  check.rows = FALSE,
-  check.names = TRUE,
-  stringsAsFactors = TRUE,
-  completeCases = FALSE,
-  DataSHIELD.checks = FALSE,
-  newobj = NULL,
-  datasources = NULL,
-  notify.of.progress = FALSE
-)
+
ds.dataFrame(
+  x = NULL,
+  row.names = NULL,
+  check.rows = FALSE,
+  check.names = TRUE,
+  stringsAsFactors = TRUE,
+  completeCases = FALSE,
+  DataSHIELD.checks = FALSE,
+  newobj = NULL,
+  datasources = NULL,
+  notify.of.progress = FALSE
+)

Arguments

-
x
+ + +
x

a character string that provides the name of the objects to be combined.

-
row.names
+
row.names

NULL, integer or character string that provides the row names of the output data frame.

-
check.rows
+
check.rows

logical. If TRUE then the rows are checked for consistency of length and names. Default is FALSE.

-
check.names
-

logical. If TRUE the column names +

check.names
+

logical. If TRUE the column names in the data frame are checked to ensure that is unique. Default is TRUE.

-
stringsAsFactors
+
stringsAsFactors

logical. If true the character vectors are converted to factors. Default TRUE.

-
completeCases
-

logical. If TRUE rows with one or more +

completeCases
+

logical. If TRUE rows with one or more missing values will be deleted from the output data frame. Default is FALSE.

-
DataSHIELD.checks
-

logical. Default FALSE. If TRUE undertakes all DataSHIELD checks +

DataSHIELD.checks
+

logical. Default FALSE. If TRUE undertakes all DataSHIELD checks (time-consuming) which are:
1. the input object(s) is(are) defined in all the studies
2. the input object(s) is(are) of the same legal class in all the studies
@@ -103,27 +105,25 @@

Arguments

are the same

-
newobj
+
newobj

a character string that provides the name for the output data frame that is stored on the data servers. Default dataframe.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. -If the datasources argument is not specified +

datasources
+

a list of DSConnection-class objects obtained after login. +If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

-
notify.of.progress
+
notify.of.progress

specifies if console output should be produced to indicate progress. Default is FALSE.

Value

- - -

ds.dataFrame returns the object specified by the newobj argument +

ds.dataFrame returns the object specified by the newobj argument which is written to the serverside. Also, two validity messages are returned to the client-side indicating the name of the newobj that has been created in each data source and if it is in a valid form.

@@ -132,8 +132,8 @@

Value

Details

It creates a data frame by combining pre-existing data frames, matrices or variables.

-

The length of all component variables and the number of rows -of the data frames or matrices must be the same. The output +

The length of all component variables and the number of rows +of the data frames or matrices must be the same. The output data frame will have the same number of rows.

Server functions called: classDS, colnamesDS, dataFrameDS

@@ -144,51 +144,51 @@

Author

Examples

-

-if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki 
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-                 
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Create a new data frame
-  ds.dataFrame(x = c("D$LAB_TSC","D$GENDER","D$PM_BMI_CATEGORICAL"),
-               row.names = NULL,
-               check.rows = FALSE,
-               check.names = TRUE,
-               stringsAsFactors = TRUE, #character variables are converted to a factor 
-               completeCases = TRUE, #only rows with not missing values are selected
-               DataSHIELD.checks = FALSE,
-               newobj = "df1",
-               datasources = connections[1], #only the first Opal server is used ("study1")
-               notify.of.progress = FALSE)
-
-
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
+    

+if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki 
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+                 
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Create a new data frame
+  ds.dataFrame(x = c("D$LAB_TSC","D$GENDER","D$PM_BMI_CATEGORICAL"),
+               row.names = NULL,
+               check.rows = FALSE,
+               check.names = TRUE,
+               stringsAsFactors = TRUE, #character variables are converted to a factor 
+               completeCases = TRUE, #only rows with not missing values are selected
+               DataSHIELD.checks = FALSE,
+               newobj = "df1",
+               datasources = connections[1], #only the first Opal server is used ("study1")
+               notify.of.progress = FALSE)
+
+
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
 
@@ -199,19 +199,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.dataFrameFill.html b/docs/reference/ds.dataFrameFill.html index 79c0ab9b8..7278740e1 100644 --- a/docs/reference/ds.dataFrameFill.html +++ b/docs/reference/ds.dataFrameFill.html @@ -1,9 +1,9 @@ -Creates missing values columns in the server-side — ds.dataFrameFill • dsBaseClientCreates missing values columns in the server-side — ds.dataFrameFill • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,32 +44,32 @@

Creates missing values columns in the server-side

-
ds.dataFrameFill(df.name = NULL, newobj = NULL, datasources = NULL)
+
ds.dataFrameFill(df.name = NULL, newobj = NULL, datasources = NULL)

Arguments

-
df.name
+ + +
df.name

a character string representing the name of the input data frame that will be filled with extra columns of missing values.

-
newobj
+
newobj

a character string that provides the name for the output data frame that is stored on the data servers. Default value is "dataframefill.newobj".

-
datasources
-

a list of DSConnection-class objects obtained after login. -If the datasources argument is not specified +

datasources
+

a list of DSConnection-class objects obtained after login. +If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.dataFrameFill returns the object specified by the newobj argument which +

ds.dataFrameFill returns the object specified by the newobj argument which is written to the server-side. Also, two validity messages are returned to the client-side indicating the name of the newobj that has been created in each data source and if it is in a valid form.

@@ -79,8 +79,8 @@

Details

This function checks if the input data frames have the same variables (i.e. the same column names) in all of the used studies. When a study does not have some of the variables, the function generates those variables as vectors of missing values and combines them as columns to -the input data frame. If any of the generated variables are of class factor, the function -assigns to those the corresponding levels of the factors given from the studies where such +the input data frame. If any of the generated variables are of class factor, the function +assigns to those the corresponding levels of the factors given from the studies where such factors exist.

Server function called: dataFrameFillDS

@@ -91,55 +91,55 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki 
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-                 
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Create two data frames with one different column
-  
-  ds.dataFrame(x = c("D$LAB_TSC","D$LAB_TRIG","D$LAB_HDL",
-                     "D$LAB_GLUC_ADJUSTED","D$PM_BMI_CONTINUOUS"),
-               newobj = "df1",
-               datasources = connections[1])
-               
-  ds.dataFrame(x = c("D$LAB_TSC","D$LAB_TRIG","D$LAB_HDL","D$LAB_GLUC_ADJUSTED"),
-               newobj = "df1",
-               datasources = connections[2])
-  
-  # Fill the data frame with NA columns
-  
-  ds.dataFrameFill(df.name = "df1",
-                   newobj = "D.Fill",
-                   datasources = connections[c(1,2)]) # Two servers are used
-
-
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki 
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+                 
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Create two data frames with one different column
+  
+  ds.dataFrame(x = c("D$LAB_TSC","D$LAB_TRIG","D$LAB_HDL",
+                     "D$LAB_GLUC_ADJUSTED","D$PM_BMI_CONTINUOUS"),
+               newobj = "df1",
+               datasources = connections[1])
+               
+  ds.dataFrame(x = c("D$LAB_TSC","D$LAB_TRIG","D$LAB_HDL","D$LAB_GLUC_ADJUSTED"),
+               newobj = "df1",
+               datasources = connections[2])
+  
+  # Fill the data frame with NA columns
+  
+  ds.dataFrameFill(df.name = "df1",
+                   newobj = "D.Fill",
+                   datasources = connections[c(1,2)]) # Two servers are used
+
+
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
 
@@ -150,19 +150,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.dataFrameSort.html b/docs/reference/ds.dataFrameSort.html index c4830af66..99e75c797 100644 --- a/docs/reference/ds.dataFrameSort.html +++ b/docs/reference/ds.dataFrameSort.html @@ -1,9 +1,9 @@ -Sorts data frames in the server-side — ds.dataFrameSort • dsBaseClientSorts data frames in the server-side — ds.dataFrameSort • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,71 +44,71 @@

Sorts data frames in the server-side

-
ds.dataFrameSort(
-  df.name = NULL,
-  sort.key.name = NULL,
-  sort.descending = FALSE,
-  sort.method = "default",
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.dataFrameSort(
+  df.name = NULL,
+  sort.key.name = NULL,
+  sort.descending = FALSE,
+  sort.method = "default",
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
df.name
+ + +
df.name

a character string providing the name of the data frame to be sorted.

-
sort.key.name
+
sort.key.name

a character string providing the name for the sort key.

-
sort.descending
+
sort.descending

logical, if TRUE the data frame will be sorted. by the sort key in descending order. Default = FALSE (sort order ascending).

-
sort.method
-

a character string that specifies the method to be used -to sort the data frame. This can be set as +

sort.method
+

a character string that specifies the method to be used +to sort the data frame. This can be set as "alphabetic","a" or "numeric", "n".

-
newobj
-

a character string that provides the name for the output data frame -that is stored on the data servers. Default dataframesort.newobj. +

newobj
+

a character string that provides the name for the output data frame +that is stored on the data servers. Default dataframesort.newobj. where df.name is the first argument of ds.dataFrameSort().

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.dataFrameSort returns the sorted data frame is written to the server-side. +

ds.dataFrameSort returns the sorted data frame is written to the server-side. Also, two validity messages are returned to the client-side -indicating the name of the newobj which -has been created in each data source and if +indicating the name of the newobj which +has been created in each data source and if it is in a valid form.

Details

It sorts a specified data.frame on the serverside using a sort key also on the server-side. The -sort key can either sit in the data.frame or outside it. +sort key can either sit in the data.frame or outside it. The sort key can be forced to be interpreted as alphabetic or numeric.

-

When a numeric vector is sorted alphabetically, the order can look confusing. -For example, if we have a numeric vector to sort:
+

When a numeric vector is sorted alphabetically, the order can look confusing. +For example, if we have a numeric vector to sort:
vector.2.sort = c(-192, 76, 841, NA, 1670, 163, 147, 101, -112, -231, -9, 119, 112, NA)

When sorting numbers in an ascending (default) manner, -the largest negative numbers get ordered first +the largest negative numbers get ordered first leading up to the largest positive numbers and finally (by default in R) NAs being positioned at the end of the vector:
numeric.sort = c(-231, -192, -112, -9, 76, 101, 112, 119, 147, 163, 841, 1670, NA, NA)

@@ -123,44 +123,44 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Sorting the data frame
-  ds.dataFrameSort(df.name = "D",
-                   sort.key.name = "D$LAB_TSC",
-                   sort.descending = TRUE,
-                   sort.method = "numeric",
-                   newobj = "df.sort",
-                   datasources = connections[1]) #only the first Opal server is used ("study1")
-                   
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Sorting the data frame
+  ds.dataFrameSort(df.name = "D",
+                   sort.key.name = "D$LAB_TSC",
+                   sort.descending = TRUE,
+                   sort.method = "numeric",
+                   newobj = "df.sort",
+                   datasources = connections[1]) #only the first Opal server is used ("study1")
+                   
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
 
@@ -171,19 +171,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.dataFrameSubset.html b/docs/reference/ds.dataFrameSubset.html index 75d1a504b..73448ea53 100644 --- a/docs/reference/ds.dataFrameSubset.html +++ b/docs/reference/ds.dataFrameSubset.html @@ -1,9 +1,9 @@ -Sub-sets data frames in the server-side — ds.dataFrameSubset • dsBaseClientSub-sets data frames in the server-side — ds.dataFrameSubset • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,94 +44,94 @@

Sub-sets data frames in the server-side

-
ds.dataFrameSubset(
-  df.name = NULL,
-  V1.name = NULL,
-  V2.name = NULL,
-  Boolean.operator = NULL,
-  keep.cols = NULL,
-  rm.cols = NULL,
-  keep.NAs = NULL,
-  newobj = NULL,
-  datasources = NULL,
-  notify.of.progress = FALSE
-)
+
ds.dataFrameSubset(
+  df.name = NULL,
+  V1.name = NULL,
+  V2.name = NULL,
+  Boolean.operator = NULL,
+  keep.cols = NULL,
+  rm.cols = NULL,
+  keep.NAs = NULL,
+  newobj = NULL,
+  datasources = NULL,
+  notify.of.progress = FALSE
+)

Arguments

-
df.name
-

a character string providing the name of the data frame to be subseted.

-
V1.name
-

A character string specifying the name of the vector +

df.name
+

a character string providing the name of the data frame to be subset.

+ + +
V1.name
+

A character string specifying the name of the vector to which the Boolean operator is to be applied to define the subset. For more information see details.

-
V2.name
-

A character string specifying the name of the vector to compare +

V2.name
+

A character string specifying the name of the vector to compare with V1.name.

-
Boolean.operator
+
Boolean.operator

A character string specifying one of six possible Boolean operators: '==', '!=', '>', '>=', '<' and '<='.

-
keep.cols
+
keep.cols

a numeric vector specifying the numbers of the columns to be kept in the final subset.

-
rm.cols
-

a numeric vector specifying the numbers of the columns to be removed from +

rm.cols
+

a numeric vector specifying the numbers of the columns to be removed from the final subset.

-
keep.NAs
-

logical, if TRUE the missing values are included in the subset. +

keep.NAs
+

logical, if TRUE the missing values are included in the subset. If FALSE or NULL all rows with at least one missing values are removed from the subset.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output object that is stored on the data servers. Default dataframesubset.newobj.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login. If the datasources the default set of connections will be used: see datashield.connections_default.

-
notify.of.progress
+
notify.of.progress

specifies if console output should be produced to indicate progress. Default FALSE.

Value

- - -

ds.dataFrameSubset returns +

ds.dataFrameSubset returns the object specified by the newobj argument -which is written to the server-side. +which is written to the server-side. Also, two validity messages are returned to the client-side indicating the name of the newobj which has been created in each data source and if it is in a valid form.

Details

-

Subset a pre-existing data frame using the standard -set of Boolean operators (==, !=, >, >=, <, <=). +

Subset a pre-existing data frame using the standard +set of Boolean operators (==, !=, >, >=, <, <=). The subsetting is made by rows, but it is also possible to select columns to keep or remove. Instead, if you wish to keep all rows in the subset (e.g. if the primary plan is to subset by columns -and not by rows) the V1.name and V2.name parameters can be used +and not by rows) the V1.name and V2.name parameters can be used to specify a vector of the same length -as the data frame to be subsetted in each study in which every element is 1 and +as the data frame to be subsetted in each study in which every element is 1 and there are no missing values. For more information see the example 2 below.

Server functions called: dataFrameSubsetDS1 and dataFrameSubsetDS2

@@ -142,70 +142,70 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Subsetting a data frame
-  #Example 1: Include some rows and all columns in the subset
-  ds.dataFrameSubset(df.name = "D",
-                     V1.name = "D$LAB_TSC",
-                     V2.name = "D$LAB_TRIG",
-                     Boolean.operator = ">",
-                     keep.cols = NULL, #All columns are included in the new subset
-                     rm.cols = NULL, #All columns are included in the new subset
-                     keep.NAs = FALSE, #All rows with NAs are removed
-                     newobj = "new.subset",
-                     datasources = connections[1],#only the first server is used ("study1")
-                     notify.of.progress = FALSE)
-  #Example 2: Include all rows and some columns in the new subset
-    #Select complete cases (rows without NA)
-    ds.completeCases(x1 = "D",
-                     newobj = "complet",
-                     datasources = connections)
-    #Create a vector with all ones
-    ds.make(toAssign = "complet$LAB_TSC-complet$LAB_TSC+1",
-            newobj = "ONES",
-            datasources = connections) 
-    #Subset the data
-    ds.dataFrameSubset(df.name = "complet",
-                       V1.name = "ONES",
-                       V2.name = "ONES",
-                       Boolean.operator = "==",
-                       keep.cols = c(1:4,10), #only columns 1, 2, 3, 4 and 10 are selected
-                       rm.cols = NULL,
-                       keep.NAs = FALSE,
-                       newobj = "subset.all.rows",
-                       datasources = connections, #all servers are used
-                       notify.of.progress = FALSE)                
-                     
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Subsetting a data frame
+  #Example 1: Include some rows and all columns in the subset
+  ds.dataFrameSubset(df.name = "D",
+                     V1.name = "D$LAB_TSC",
+                     V2.name = "D$LAB_TRIG",
+                     Boolean.operator = ">",
+                     keep.cols = NULL, #All columns are included in the new subset
+                     rm.cols = NULL, #All columns are included in the new subset
+                     keep.NAs = FALSE, #All rows with NAs are removed
+                     newobj = "new.subset",
+                     datasources = connections[1],#only the first server is used ("study1")
+                     notify.of.progress = FALSE)
+  #Example 2: Include all rows and some columns in the new subset
+    #Select complete cases (rows without NA)
+    ds.completeCases(x1 = "D",
+                     newobj = "complet",
+                     datasources = connections)
+    #Create a vector with all ones
+    ds.make(toAssign = "complet$LAB_TSC-complet$LAB_TSC+1",
+            newobj = "ONES",
+            datasources = connections) 
+    #Subset the data
+    ds.dataFrameSubset(df.name = "complet",
+                       V1.name = "ONES",
+                       V2.name = "ONES",
+                       Boolean.operator = "==",
+                       keep.cols = c(1:4,10), #only columns 1, 2, 3, 4 and 10 are selected
+                       rm.cols = NULL,
+                       keep.NAs = FALSE,
+                       newobj = "subset.all.rows",
+                       datasources = connections, #all servers are used
+                       notify.of.progress = FALSE)                
+                     
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
 
@@ -216,19 +216,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.densityGrid.html b/docs/reference/ds.densityGrid.html index 2ade85f90..4390d2b43 100644 --- a/docs/reference/ds.densityGrid.html +++ b/docs/reference/ds.densityGrid.html @@ -1,10 +1,10 @@ -Generates a density grid in the client-side — ds.densityGrid • dsBaseClientGenerates a density grid in the client-side — ds.densityGrid • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,49 +46,49 @@

Generates a density grid in the client-side

-
ds.densityGrid(
-  x = NULL,
-  y = NULL,
-  numints = 20,
-  type = "combine",
-  datasources = NULL
-)
+
ds.densityGrid(
+  x = NULL,
+  y = NULL,
+  numints = 20,
+  type = "combine",
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string providing the name of the input numerical vector.

-
y
+
y

a character string providing the name of the input numerical vector.

-
numints
-

an integer, the number of intervals for the grid density object. +

numints
+

an integer, the number of intervals for the grid density object. The default value is 20.

-
type
-

a character string that represents the type of graph to display. +

type
+

a character string that represents the type of graph to display. If type is set to -'combine', a pooled grid density matrix is generated, +'combine', a pooled grid density matrix is generated, instead if type is set to 'split' one grid density matrix is generated. Default 'combine'.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.densityGrid returns a grid density matrix.

+

ds.densityGrid returns a grid density matrix.

Details

@@ -96,10 +96,10 @@

Details

and the count is set to 0.

In DataSHIELD the user does not have access to the micro-data so and extreme values such as the maximum and the minimum are potentially non-disclosive so this function does not allow -for the user to set the limits of the density grid and +for the user to set the limits of the density grid and the minimum and maximum values of the x -and y vectors. These elements are set by the server-side function -densityGridDS to 'valid' values +and y vectors. These elements are set by the server-side function +densityGridDS to 'valid' values (i.e. values that do not lead to leakage of micro-data to the user).

Server function called: densityGridDS

@@ -110,7 +110,7 @@

Author

Examples

- +
- - + + diff --git a/docs/reference/ds.dim.html b/docs/reference/ds.dim.html index ae078f535..037e87c70 100644 --- a/docs/reference/ds.dim.html +++ b/docs/reference/ds.dim.html @@ -1,10 +1,10 @@ -Retrieves the dimension of a server-side R object — ds.dim • dsBaseClientRetrieves the dimension of a server-side R object — ds.dim • dsBaseClient - +
@@ -30,67 +30,67 @@
- +
-

Gives the dimensions of an R object on the server-side. +

Gives the dimensions of an R object on the server-side. This function is similar to R function dim.

-
ds.dim(x = NULL, type = "both", checks = FALSE, datasources = NULL)
+
ds.dim(x = NULL, type = "both", checks = FALSE, datasources = NULL)

Arguments

-
x
+ + +
x

a character string providing the name of the input object.

-
type
-

a character string that represents the type of analysis to carry out. +

type
+

a character string that represents the type of analysis to carry out. If type is set to 'combine', 'combined', 'combines' or 'c', - the global dimension is returned. -If type is set to 'split', 'splits' or 's', + the global dimension is returned. +If type is set to 'split', 'splits' or 's', the dimension is returned separately for each study. If type is set to 'both' or 'b', both sets of outputs are produced. Default 'both'.

-
checks
+
checks

logical. If TRUE undertakes all DataSHIELD checks (time-consuming). Default FALSE.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.dim retrieves to the client-side the dimension of the object +

ds.dim retrieves to the client-side the dimension of the object in the form of a vector where the first element indicates the number of rows and the second element indicates the number of columns.

Details

-

The function returns the dimension of the server-side +

The function returns the dimension of the server-side input object (e.g. array, matrix or data frame) -from every single study and the pooled dimension of the object by summing up the individual +from every single study and the pooled dimension of the object by summing up the individual dimensions returned from each study.

-

In checks parameter is suggested that checks should only be undertaken once the +

In checks parameter is suggested that checks should only be undertaken once the function call has failed.

Server function called: dimDS

@@ -109,54 +109,54 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-
-  # Calculate the dimension
-  ds.dim(x="D", 
-         type="combine", #global dimension
-         checks = FALSE,
-         datasources = connections)#all opal servers are used
-  ds.dim(x="D",
-         type = "both",#separate dimension for each study
-                       #and the pooled dimension (default) 
-         checks = FALSE,
-         datasources = connections)#all opal servers are used
-  ds.dim(x="D", 
-         type="split", #separate dimension for each study
-         checks = FALSE,
-         datasources = connections[1])#only the first opal server is used ("study1")
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+
+  # Calculate the dimension
+  ds.dim(x="D", 
+         type="combine", #global dimension
+         checks = FALSE,
+         datasources = connections)#all opal servers are used
+  ds.dim(x="D",
+         type = "both",#separate dimension for each study
+                       #and the pooled dimension (default) 
+         checks = FALSE,
+         datasources = connections)#all opal servers are used
+  ds.dim(x="D", 
+         type="split", #separate dimension for each study
+         checks = FALSE,
+         datasources = connections[1])#only the first opal server is used ("study1")
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+} # }
+
 
@@ -167,19 +167,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.dmtC2S.html b/docs/reference/ds.dmtC2S.html index 5644c269a..b0c531322 100644 --- a/docs/reference/ds.dmtC2S.html +++ b/docs/reference/ds.dmtC2S.html @@ -1,10 +1,10 @@ -Copy a clientside data.frame, matrix or tibble to the serverside — ds.dmtC2S • dsBaseClientCopy a clientside data.frame, matrix or tibble to the serverside — ds.dmtC2S • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,23 +46,25 @@

Copy a clientside data.frame, matrix or tibble to the serverside

-
ds.dmtC2S(dfdata = NA, newobj = NULL, datasources = NULL)
+
ds.dmtC2S(dfdata = NA, newobj = NULL, datasources = NULL)

Arguments

-
dfdata
-

is a character string that specifies the name of the DMT + + +

dfdata
+

is a character string that specifies the name of the DMT to be copied from the clientside to the serverside

-
newobj
-

A character string specifying the name of the DMT on the serverside +

newobj
+

A character string specifying the name of the DMT on the serverside to which the output is to be written. If no <newobj> argument is specified or it is NULL the name of the copied DMT defaults to "dmt.copied.C2S".

-
datasources
+
datasources

specifies the particular 'connection object(s)' to use. e.g. if you have several data sets in the sources you are working with called opals.a, opals.w2, and connection.xyz, you can choose which of @@ -72,7 +74,7 @@

Arguments

wish to change the connections you wish to use by default the call datashield.connections_default('opals.a') will set 'default.connections' to be 'opals.a' and so in the absence of specific instructions to the contrary -(e.g. by specifiying a particular dataset to be used via the <datasources> +(e.g. by specifying a particular dataset to be used via the <datasources> argument) all subsequent function calls will be to the datasets held in opals.a. If the <datasources> argument is specified, it should be set without inverted commas: e.g. datasources=opals.a or datasources=default.connections. @@ -85,9 +87,7 @@

Arguments

Value

- - -

the object specified by the <newobj> argument (or default name "dmt.copied.C2S") +

the object specified by the <newobj> argument (or default name "dmt.copied.C2S") which is written as a data.frame/matrix/tibble to the serverside.

@@ -96,7 +96,7 @@

Details

function simple (though less flexible), a number of the parameters specifying the DMT to be generated on the serverside are fixed by the characteristics of the DMT to be copied rather than explicitly -specifying them as selected arguments. In consequence, +specifying them as selected arguments. In consequence, they have been removed from the list of arguments and are instead given invariant values in the first few lines of code. These include: from="clientside.dmt", nrows.scalar=NULL, ncols.scalar=NULL, byrow = FALSE. The specific value @@ -120,19 +120,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.elspline.html b/docs/reference/ds.elspline.html index 45f76f36d..b2a8d5d83 100644 --- a/docs/reference/ds.elspline.html +++ b/docs/reference/ds.elspline.html @@ -1,12 +1,12 @@ -Basis for a piecewise linear spline with meaningful coefficients — ds.elspline • dsBaseClientBasis for a piecewise linear spline with meaningful coefficients — ds.elspline • dsBaseClient - +
@@ -32,13 +32,13 @@
- +
@@ -50,51 +50,51 @@

Basis for a piecewise linear spline with meaningful coefficients

-
ds.elspline(
-  x,
-  n,
-  marginal = FALSE,
-  names = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.elspline(
+  x,
+  n,
+  marginal = FALSE,
+  names = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

the name of the input numeric variable

-
n
+
n

integer greater than 2, knots are computed such that they cut n equally-spaced intervals along the range of x

-
marginal
-

logical, how to parametrize the spline, see Details

+
marginal
+

logical, how to parametrise the spline, see Details

-
names
+
names

character, vector of names for constructed variables

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default elspline.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

an object of class "lspline" and "matrix", which its name is specified by the +

an object of class "lspline" and "matrix", which its name is specified by the newobj argument (or its default name "elspline.newobj"), is assigned on the serverside.

@@ -119,19 +119,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.exists.html b/docs/reference/ds.exists.html index f131de48e..f5db40ba5 100644 --- a/docs/reference/ds.exists.html +++ b/docs/reference/ds.exists.html @@ -1,10 +1,10 @@ -Checks if an object is defined on the server-side — ds.exists • dsBaseClientChecks if an object is defined on the server-side — ds.exists • dsBaseClient - +
@@ -30,49 +30,49 @@
- +
-

Looks if an R object of the given name is defined on the server-side. +

Looks if an R object of the given name is defined on the server-side. This function is similar to the R function exists.

-
ds.exists(x = NULL, datasources = NULL)
+
ds.exists(x = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string providing the name of the object to look for.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.exists returns a logical object. +

ds.exists returns a logical object. TRUE if the object is on the server-side and FALSE otherwise.

Details

In DataSHIELD it is not possible to see the data on the servers of the collaborating studies. It is only possible to get summaries of objects stored on the -server-side. +server-side. It is however important to know if an object is defined (i.e. exists) on the server-side. This function checks if an object does exist on the server-side.

Server function called: exists

@@ -90,44 +90,44 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Check if the object exist in the server-side
-  ds.exists(x = "D", 
-            datasources = connections) #All opal servers are used
-  ds.exists(x = "D", 
-            datasources = connections[1]) #Only the first Opal server is used (study1)
-            
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Check if the object exist in the server-side
+  ds.exists(x = "D", 
+            datasources = connections) #All opal servers are used
+  ds.exists(x = "D", 
+            datasources = connections[1]) #Only the first Opal server is used (study1)
+            
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+} # }
+
 
@@ -138,19 +138,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.exp.html b/docs/reference/ds.exp.html index 54fee6fd3..eff914133 100644 --- a/docs/reference/ds.exp.html +++ b/docs/reference/ds.exp.html @@ -1,10 +1,10 @@ -Computes the exponentials in the server-side — ds.exp • dsBaseClientComputes the exponentials in the server-side — ds.exp • dsBaseClient - +
@@ -30,47 +30,47 @@
- +
-

Computes the exponential values for a specified numeric vector. +

Computes the exponential values for a specified numeric vector. This function is similar to R function exp.

-
ds.exp(x = NULL, newobj = NULL, datasources = NULL)
+
ds.exp(x = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string providing the name of a numerical vector.

-
newobj
+
newobj

a character string that provides the name for the output variable that is stored on the data servers. Default exp.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. -If the datasources argument is not specified +

datasources
+

a list of DSConnection-class objects obtained after login. +If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.exp returns a vector for each study of the exponential values for the numeric vector +

ds.exp returns a vector for each study of the exponential values for the numeric vector specified in the argument x. The created vectors are stored in the server-side.

@@ -84,44 +84,44 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki 
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-                 
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # compute exponential function of the 'PM_BMI_CONTINUOUS' variable
-  ds.exp(x = "D$PM_BMI_CONTINUOUS",
-         newobj = "exp.PM_BMI_CONTINUOUS",
-         datasources = connections[1]) #only the first Opal server is used (study1)
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki 
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+                 
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # compute exponential function of the 'PM_BMI_CONTINUOUS' variable
+  ds.exp(x = "D$PM_BMI_CONTINUOUS",
+         newobj = "exp.PM_BMI_CONTINUOUS",
+         datasources = connections[1]) #only the first Opal server is used (study1)
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+
+} # }
+
 
@@ -132,19 +132,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.extractQuantiles.html b/docs/reference/ds.extractQuantiles.html index dbc7b1d5d..0984ace64 100644 --- a/docs/reference/ds.extractQuantiles.html +++ b/docs/reference/ds.extractQuantiles.html @@ -1,7 +1,5 @@ -Secure ranking of a vector across all sources and use of these ranks -to estimate global quantiles across all studies — ds.extractQuantiles • dsBaseClientSecure ranking of a vector across all sources and use of these ranks to estimate global quantiles across all studies — ds.extractQuantiles • dsBaseClient - +
@@ -40,14 +38,13 @@
- +
@@ -65,18 +62,20 @@

Secure ranking of a vector across all sources and use of these ranks

-
ds.extractQuantiles(
-  extract.quantiles,
-  extract.summary.output.ranks.df,
-  extract.ranks.sort.by,
-  extract.rm.residual.objects,
-  extract.datasources = NULL
-)
+
ds.extractQuantiles(
+  extract.quantiles,
+  extract.summary.output.ranks.df,
+  extract.ranks.sort.by,
+  extract.rm.residual.objects,
+  extract.datasources = NULL
+)

Arguments

-
extract.quantiles
+ + +
extract.quantiles

one of a restricted set of character strings. The value of this argument is set in choosing the value of the argument <quantiles.for.estimation> in ds.ranksSecure. In summary: to mitigate @@ -84,7 +83,7 @@

Arguments

generated: c(0.025,0.05,0.10,0.20,0.25,0.30,0.3333,0.40,0.50,0.60,0.6667, 0.70,0.75,0.80,0.90,0.95,0.975). The allowable formats for the argument are of the general form: "0.025-0.975" where the first number is the lowest -quantile to be estimated and the second number is the equivalent highest +quantile to be estimated and the second number is the equivalent highest quantile to estimate. These two quantiles are then estimated along with all allowable quantiles in between. The allowable argument values are then: "0.025-0.975", "0.05-0.95", "0.10-0.90", "0.20-0.80". Two alternative values @@ -93,8 +92,8 @@

Arguments

"secure.global.ranking.docx". Also see the header file for ds.ranksSecure.

-
extract.summary.output.ranks.df
-

a character string which specifies +

extract.summary.output.ranks.df
+

a character string which specifies the optional name for the summary data.frame written to the serverside on each data source that contains 5 of the key output variables from the ranking procedure pertaining to that particular data source. If no name has been @@ -105,12 +104,12 @@

Arguments

called by ds.ranksSecure and almost the final command of ds.extractQuantiles to print out the name of the data frame containing the summarised ranking information generated by ds.ranksSecure and the order in which the -data frame is laid out. This therefore appears as the last output produced +data frame is laid out. This therefore appears as the last output produced when ds.ranksSecure is run, and when this happens it is clear this relates to the main output of ds.ranksSecure not of ds.extractQuantiles.

-
extract.ranks.sort.by
+
extract.ranks.sort.by

a character string taking two possible values. These are "ID.orig" and "vals.orig". This is set via the argument <ranks.sort.by> in ds.ranksSecure. For more details see the associated @@ -118,7 +117,7 @@

Arguments

file for ds.ranksSecure.

-
extract.rm.residual.objects
+
extract.rm.residual.objects

logical value. Default = TRUE: at the beginning and end of each run of ds.ranksSecure delete all extraneous objects that are otherwise left behind. These are not usually needed, but could be of value @@ -126,7 +125,7 @@

Arguments

the residual objects

-
extract.datasources
+
extract.datasources

specifies the particular opal object(s) to use. This is set via the argument<datasources> in ds.ranksSecure. For more details see the associated document entitled "secure.global.ranking.docx". Also see @@ -135,10 +134,8 @@

Arguments

Value

- - -

the final main output of ds.extractQuantiles is a data frame object -named "final.quantile.df". This contains two vectors. The first named +

the final main output of ds.extractQuantiles is a data frame object +named "final.quantile.df". This contains two vectors. The first named "evaluation.quantiles" lists the full set of quantiles you have requested for evaluation as specified by the argument "quantiles.for.estimation" in ds.ranksSecure and explained in more detail above under the information for @@ -158,7 +155,7 @@

Details

be called from within the clientside function ds.ranksSecure.If you try to call ds.extractQuantiles directly(i.e. not by running ds.ranksSecure) you are almost certainly going to have to set up quite a few vectors and scalars -that are normally set by ds.ranksSecure and this is likely to be difficult. +that are normally set by ds.ranksSecure and this is likely to be difficult. ds.extractQuantiles itself calls two serverside functions extractQuantilesDS1 and extractQuantilesDS2. For more details about the cluster of functions that collectively enable secure global ranking and estimation of global quantiles @@ -179,19 +176,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.forestplot.html b/docs/reference/ds.forestplot.html index dc00b99d0..8ad597e4d 100644 --- a/docs/reference/ds.forestplot.html +++ b/docs/reference/ds.forestplot.html @@ -1,10 +1,10 @@ -Forestplot for SLMA models — ds.forestplot • dsBaseClientForestplot for SLMA models — ds.forestplot • dsBaseClient - +
@@ -30,82 +30,84 @@
- +
-

Draws a foresplot of the coefficients for Study-Level Meta-Analysis performed with +

Draws a forestplot of the coefficients for Study-Level Meta-Analysis performed with DataSHIELD

-
ds.forestplot(mod, variable = NULL, method = "ML", layout = "JAMA")
+
ds.forestplot(mod, variable = NULL, method = "ML", layout = "JAMA")

Arguments

-
mod
-

list List outputed by any of the SLMA models of DataSHIELD (ds.glmerSLMA, + + +

mod
+

list List outputted by any of the SLMA models of DataSHIELD (ds.glmerSLMA, ds.glmSLMA, ds.lmerSLMA)

-
variable
-

character (default NULL) Variable to meta-analyze and visualize, by setting this +

variable
+

character (default NULL) Variable to meta-analyse and visualise, by setting this argument to NULL (default) the first independent variable will be used.

-
method
-

character (Default "ML") Method to estimate the between study variance. +

method
+

character (Default "ML") Method to estimate the between study variance. See details from ?meta::metagen for the different options.

-
layout
-

character (default "JAMA") Layout of the plot. +

layout
+

character (default "JAMA") Layout of the plot. See details from ?meta::metagen for the different options.

Examples

-
if (FALSE) {
-  # Run a logistic regression
-  
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Fit the logistic regression model
-
-  mod <- ds.glmSLMA(formula = "DIS_DIAB~GENDER+PM_BMI_CONTINUOUS+LAB_HDL",
-                data = "D",
-                family = "binomial",
-                datasources = connections)
-                
-  # Plot the results of the model
-  ds.forestplot(mod)
-}
-
+    
if (FALSE) { # \dontrun{
+  # Run a logistic regression
+  
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Fit the logistic regression model
+
+  mod <- ds.glmSLMA(formula = "DIS_DIAB~GENDER+PM_BMI_CONTINUOUS+LAB_HDL",
+                data = "D",
+                family = "binomial",
+                datasources = connections)
+                
+  # Plot the results of the model
+  ds.forestplot(mod)
+} # }
+
 
@@ -116,19 +118,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.gamlss.html b/docs/reference/ds.gamlss.html index a7e2ca3f7..6ea9d0609 100644 --- a/docs/reference/ds.gamlss.html +++ b/docs/reference/ds.gamlss.html @@ -1,15 +1,15 @@ -Generalized Additive Models for Location Scale and Shape — ds.gamlss • dsBaseClient - +
@@ -35,166 +35,166 @@
- +
-

This function calls the gamlssDS that is a wrapper function from -the gamlss R package. The function returns an object of class "gamlss", which -is a generalized additive model for location, scale and shape (GAMLSS). The -function also saves the residuals as an object on the server-side with a name -specified by the newobj argument. In addition, if the argument centiles is set -to TRUE, the function calls the centiles function from the gamlss package and +

This function calls the gamlssDS that is a wrapper function from +the gamlss R package. The function returns an object of class "gamlss", which +is a generalized additive model for location, scale and shape (GAMLSS). The +function also saves the residuals as an object on the server-side with a name +specified by the newobj argument. In addition, if the argument centiles is set +to TRUE, the function calls the centiles function from the gamlss package and returns the sample percentages below each centile curve.

-
ds.gamlss(
-  formula = NULL,
-  sigma.formula = "~1",
-  nu.formula = "~1",
-  tau.formula = "~1",
-  family = "NO()",
-  data = NULL,
-  method = "RS",
-  mu.fix = FALSE,
-  sigma.fix = FALSE,
-  nu.fix = FALSE,
-  tau.fix = FALSE,
-  control = c(0.001, 20, 1, 1, 1, 1, Inf),
-  i.control = c(0.001, 50, 30, 0.001),
-  centiles = FALSE,
-  xvar = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.gamlss(
+  formula = NULL,
+  sigma.formula = "~1",
+  nu.formula = "~1",
+  tau.formula = "~1",
+  family = "NO()",
+  data = NULL,
+  method = "RS",
+  mu.fix = FALSE,
+  sigma.fix = FALSE,
+  nu.fix = FALSE,
+  tau.fix = FALSE,
+  control = c(0.001, 20, 1, 1, 1, 1, Inf),
+  i.control = c(0.001, 50, 30, 0.001),
+  centiles = FALSE,
+  xvar = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
formula
-

a formula object, with the response on the left of an ~ operator, + + +

formula
+

a formula object, with the response on the left of an ~ operator, and the terms, separated by + operators, on the right. Nonparametric smoothing -terms are indicated by pb() for penalised beta splines, cs for smoothing splines, -lo for loess smooth terms and random or ra for random terms, +terms are indicated by pb() for penalised beta splines, cs for smoothing splines, +lo for loess smooth terms and random or ra for random terms, e.g. 'y~cs(x,df=5)+x1+x2*x3'.

-
sigma.formula
+
sigma.formula

a formula object for fitting a model to the sigma parameter, as in the formula above, e.g. sigma.formula='~cs(x,df=5)'.

-
nu.formula
-

a formula object for fitting a model to the nu parameter, +

nu.formula
+

a formula object for fitting a model to the nu parameter, e.g. nu.formula='~x'.

-
tau.formula
-

a formula object for fitting a model to the tau parameter, +

tau.formula
+

a formula object for fitting a model to the tau parameter, e.g. tau.formula='~cs(x,df=2)'.

-
family
-

a gamlss.family object, which is used to define the distribution -and the link functions of the various parameters. The distribution families -supported by gamlss() can be found in gamlss.family. Functions such as 'BI()' +

family
+

a gamlss.family object, which is used to define the distribution +and the link functions of the various parameters. The distribution families +supported by gamlss() can be found in gamlss.family. Functions such as 'BI()' (binomial) produce a family object. Also can be given without the parentheses i.e. 'BI'. Family functions can take arguments, as in 'BI(mu.link=probit)'.

-
data
-

a data frame containing the variables occurring in the formula. +

data
+

a data frame containing the variables occurring in the formula. If this is missing, the variables should be on the parent environment.

-
method
+
method

a character indicating the algorithm for GAMLSS. Can be either -'RS', 'CG' or 'mixed'. If method='RS' the function will use the Rigby and -Stasinopoulos algorithm, if method='CG' the function will use the Cole and +'RS', 'CG' or 'mixed'. If method='RS' the function will use the Rigby and +Stasinopoulos algorithm, if method='CG' the function will use the Cole and Green algorithm, and if method='mixed' the function will use the RS algorithm twice before switching to the Cole and Green algorithm for up to 10 extra iterations.

-
mu.fix
+
mu.fix

logical, indicate whether the mu parameter should be kept fixed in the fitting processes.

-
sigma.fix
+
sigma.fix

logical, indicate whether the sigma parameter should be kept fixed in the fitting processes.

-
nu.fix
-

logical, indicate whether the nu parameter should be kept fixed +

nu.fix
+

logical, indicate whether the nu parameter should be kept fixed in the fitting processes.

-
tau.fix
+
tau.fix

logical, indicate whether the tau parameter should be kept fixed in the fitting processes.

-
control
-

this sets the control parameters of the outer iterations algorithm -using the gamlss.control function. This is a vector of 7 numeric values: (i) c.crit -(the convergence criterion for the algorithm), (ii) n.cyc (the number of cycles of -the algorithm), (iii) mu.step (the step length for the parameter mu), (iv) sigma.step +

control
+

this sets the control parameters of the outer iterations algorithm +using the gamlss.control function. This is a vector of 7 numeric values: (i) c.crit +(the convergence criterion for the algorithm), (ii) n.cyc (the number of cycles of +the algorithm), (iii) mu.step (the step length for the parameter mu), (iv) sigma.step (the step length for the parameter sigma), (v) nu.step (the step length for the parameter nu), (vi) tau.step (the step length for the parameter tau), (vii) gd.tol -(global deviance tolerance level). The default values for these 7 parameters are +(global deviance tolerance level). The default values for these 7 parameters are set to c(0.001, 20, 1, 1, 1, 1, Inf).

-
i.control
-

this sets the control parameters of the inner iterations of the -RS algorithm using the glim.control function. This is a vector of 4 numeric values: -(i) cc (the convergence criterion for the algorithm), (ii) cyc (the number of -cycles of the algorithm), (iii) bf.cyc (the number of cycles of the backfitting -algorithm), (iv) bf.tol (the convergence criterion (tolerance level) for the -backfitting algorithm). The default values for these 4 parameters are set to +

i.control
+

this sets the control parameters of the inner iterations of the +RS algorithm using the glim.control function. This is a vector of 4 numeric values: +(i) cc (the convergence criterion for the algorithm), (ii) cyc (the number of +cycles of the algorithm), (iii) bf.cyc (the number of cycles of the backfitting +algorithm), (iv) bf.tol (the convergence criterion (tolerance level) for the +backfitting algorithm). The default values for these 4 parameters are set to c(0.001, 50, 30, 0.001).

-
centiles
-

logical, indicating whether the function centiles() will be used to +

centiles
+

logical, indicating whether the function centiles() will be used to tabulate the sample percentages below each centile curve. Default is set to FALSE.

-
xvar
-

the unique explanatory variable used in the centiles() function. This +

xvar
+

the unique explanatory variable used in the centiles() function. This variable is used only if the centiles argument is set to TRUE. A restriction in the centiles function is that it applies to models with one explanatory variable only.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default gamlss_res.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

a gamlss object with all components as in the native R gamlss function. -Individual-level information like the components y (the response response) and -residuals (the normalised quantile residuals of the model) are not disclosed to +

a gamlss object with all components as in the native R gamlss function. +Individual-level information like the components y (the response response) and +residuals (the normalised quantile residuals of the model) are not disclosed to the client-side.

@@ -215,19 +215,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.getWGSR.html b/docs/reference/ds.getWGSR.html index 8e94dad6e..2b697ccb4 100644 --- a/docs/reference/ds.getWGSR.html +++ b/docs/reference/ds.getWGSR.html @@ -1,10 +1,10 @@ -Computes the WHO Growth Reference z-scores of anthropometric data — ds.getWGSR • dsBaseClientComputes the WHO Growth Reference z-scores of anthropometric data — ds.getWGSR • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,28 +46,30 @@

Computes the WHO Growth Reference z-scores of anthropometric data

-
ds.getWGSR(
-  sex = NULL,
-  firstPart = NULL,
-  secondPart = NULL,
-  index = NULL,
-  standing = NA,
-  thirdPart = NA,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.getWGSR(
+  sex = NULL,
+  firstPart = NULL,
+  secondPart = NULL,
+  index = NULL,
+  standing = NA,
+  thirdPart = NA,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
sex
+ + +
sex

the name of the binary variable that indicates the sex of the subject. This must be coded as 1 = male and 2 = female. If in your project the variable sex has different -levels, you should recode the levels to 1 for males and 2 for females using the +levels, you should recode the levels to 1 for males and 2 for females using the ds.recodeValues DataSHIELD function before the use of the ds.getWGSR.

-
firstPart
+
firstPart

Name of variable specifying:
Weight (kg) for BMI/A, W/A, W/H, or W/L
Head circumference (cm) for HC/A
@@ -80,7 +82,7 @@

Arguments

height, length, head circumference, and MUAC in cm; skinfolds in mm).

-
secondPart
+
secondPart

Name of variable specifying:
Age (days) for H/A, HC/A, L/A, MUAC/A, SSF/A, or TSF/A
Height (cm) for BMI/A, or W/H
@@ -89,7 +91,7 @@

Arguments

height and length in cm).

-
index
+
index

The index to be calculated and added to data. One of:
bfa BMI for age
hca Head circumference for age
@@ -104,14 +106,14 @@

Arguments

Give a quoted index name as in (e.g.) "wfh".

-
standing
+
standing

Variable specifying how stature was measured. If NA (default) then age (for "hfa" or "lfa") or height rules (for "wfh" or "wfl") will be applied. This must be coded as -1 = Standing; 2 = Supine; 3 = Unknown. Missing values will be recoded to 3 = Unknown. +1 = Standing; 2 = Supine; 3 = Unknown. Missing values will be recoded to 3 = Unknown. Give a single value (e.g."1"). If no value is specified then height and age rules will be applied.

-
thirdPart
+
thirdPart

Name of variable specifying age (in days) for BMI/A. Give a quoted variable name as in (e.g.) "age". Be careful with units (age in days). If age is given in different units you should convert it in age in days using the ds.make DataSHIELD function before the use @@ -119,32 +121,30 @@

Arguments

by the formula $age_days=age_months*(365.25/12)$.

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Defaults getWGSR.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.getWGSR assigns a vector for each study that includes the z-scores for the +

ds.getWGSR assigns a vector for each study that includes the z-scores for the specified index. The created vectors are stored in the servers.

Details

-

The function calls the server-side function getWGSRDS that computes the WHO +

The function calls the server-side function getWGSRDS that computes the WHO Growth Reference z-scores of anthropometric data for weight, height or length, MUAC (middle upper arm circumference), head circumference, sub-scapular skinfold and triceps skinfold. Note that the function might fail or return NAs when the variables are outside the ranges given in the WGS (WHO Child Growth Standards) reference (i.e. 45 to 120 cm for height and -0 to 60 months for age). It is up to the user to check the ranges and the units of their +0 to 60 months for age). It is up to the user to check the ranges and the units of their data.

@@ -154,46 +154,46 @@

Author

Examples

-
if (FALSE) {
-
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "ANTHRO.anthro1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "ANTHRO.anthro2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "ANTHRO.anthro3", driver = "OpalDriver")
-                 
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Example 1: Generate the weight-for-height (wfh) index
-  ds.getWGSR(sex = "D$sex", firstPart = "D$weight", secondPart = "D$height",
-           index = "wfh", newobj = "wfh_index", datasources = connections)
-
-  # Example 2: Generate the BMI for age (bfa) index
-  ds.getWGSR(sex = "D$sex", firstPart = "D$weight", secondPart = "D$height",
-           index = "bfa", thirdPart = "D$age", newobj = "bfa_index", datasources = connections)
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "ANTHRO.anthro1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "ANTHRO.anthro2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "ANTHRO.anthro3", driver = "OpalDriver")
+                 
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Example 1: Generate the weight-for-height (wfh) index
+  ds.getWGSR(sex = "D$sex", firstPart = "D$weight", secondPart = "D$height",
+           index = "wfh", newobj = "wfh_index", datasources = connections)
+
+  # Example 2: Generate the BMI for age (bfa) index
+  ds.getWGSR(sex = "D$sex", firstPart = "D$weight", secondPart = "D$height",
+           index = "bfa", thirdPart = "D$age", newobj = "bfa_index", datasources = connections)
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+
+} # }
+
 
@@ -204,19 +204,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.glm.html b/docs/reference/ds.glm.html index 7c0d27281..651c41e04 100644 --- a/docs/reference/ds.glm.html +++ b/docs/reference/ds.glm.html @@ -1,10 +1,10 @@ -Fits Generalized Linear Model — ds.glm • dsBaseClientFits Generalized Linear Model — ds.glm • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,165 +46,145 @@

Fits Generalized Linear Model

-
ds.glm(
-  formula = NULL,
-  data = NULL,
-  family = NULL,
-  offset = NULL,
-  weights = NULL,
-  checks = FALSE,
-  maxit = 20,
-  CI = 0.95,
-  viewIter = FALSE,
-  viewVarCov = FALSE,
-  viewCor = FALSE,
-  datasources = NULL
-)
+
ds.glm(
+  formula = NULL,
+  data = NULL,
+  family = NULL,
+  offset = NULL,
+  weights = NULL,
+  checks = FALSE,
+  maxit = 20,
+  CI = 0.95,
+  viewIter = FALSE,
+  viewVarCov = FALSE,
+  viewCor = FALSE,
+  datasources = NULL
+)

Arguments

-
formula
+ + +
formula

an object of class formula describing -the model to be fitted. For more information see +the model to be fitted. For more information see Details.

-
data
+
data

a character string specifying the name of an (optional) data frame that contains all of the variables in the GLM formula.

-
family
+
family

identifies the error distribution function to use in -the model. -This can be set as "gaussian", "binomial" and "poisson". +the model. +This can be set as "gaussian", "binomial" and "poisson". For more information see Details.

-
offset
+
offset

a character string specifying the name of a variable to be used as an offset. ds.glm does not allow an offset vector to be written directly into the GLM formula. For more information see Details.

-
weights
+
weights

a character string specifying the name of a variable containing -prior regression weights for the fitting process. +prior regression weights for the fitting process. ds.glm does not allow a weights vector to be written directly into the GLM formula.

-
checks
-

logical. If TRUE ds.glm checks the structural integrity +

checks
+

logical. If TRUE ds.glm checks the structural integrity of the model. Default FALSE. For more information see Details.

-
maxit
+
maxit

a numeric scalar denoting the maximum number of iterations that are permitted before ds.glm declares that the model has failed to converge.

-
CI
+
CI

a numeric value specifying the confidence interval. Default 0.95.

-
viewIter
-

logical. If TRUE the results of the intermediate iterations are +

viewIter
+

logical. If TRUE the results of the intermediate iterations are printed. If FALSE only final results are shown. Default FALSE.

-
viewVarCov
+
viewVarCov

logical. If TRUE the variance-covariance matrix of parameter estimates is returned. Default FALSE.

-
viewCor
+
viewCor

logical. If TRUE the correlation matrix of parameter estimates is returned. Default FALSE.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

Many of the elements of the output list returned by ds.glm are +

Many of the elements of the output list returned by ds.glm are equivalent to those returned by the glm() function in native R. However, potentially disclosive elements -such as individual-level residuals and linear predictor values are blocked. +such as individual-level residuals and linear predictor values are blocked. In this case, only non-disclosive elements are returned from each study separately.

- -

The list of elements returned by ds.glm is mentioned below:

- -

Nvalid: total number of valid observational units across all studies.

- -

Nmissing: total number of observational units across all studies with at least one data item missing.

- - -

Ntotal: total of observational units across all studies, the +

Ntotal: total of observational units across all studies, the sum of valid and missing units.

- -

disclosure.risk: risk of disclosure, - the value 1 indicates that one of the disclosure traps + the value 1 indicates that one of the disclosure traps has been triggered in that study.

- -

errorMessage: explanation for any errors or disclosure risks identified.

- - -

nsubs: total number of observational units used by ds.glm function.

-

-

nb usually is the same as nvalid.

- - +

nsubs: total number of observational units used by ds.glm function. + nb usually is the same as nvalid.

iter: total number of iterations before convergence achieved.

- -

family: error family and link function.

- -

formula: model formula, see description of formula as an input parameter (above).

+

coefficients: a matrix with 5 columns:

First
+

: the names of all of the regression parameters (coefficients) in the model

+
second
+

: the estimated values

-

coefficients: a matrix with 5 columns:

-

-
  • First: the names of all of the regression parameters (coefficients) in the model

  • -
  • second: the estimated values

  • -
  • third: corresponding standard errors of the estimated values

  • -
  • fourth: the ratio of estimate/standard error.

  • -
  • fifth: the p-value treating that as a standardised normal deviate

  • -

dev: residual deviance.

+
third
+

: corresponding standard errors of the estimated values

+
fourth
+

: the ratio of estimate/standard error

+. +
fifth
+

: the p-value treating that as a standardised normal deviate

+ +

dev: residual deviance.

df: residual degrees of freedom. nb residual degrees of freedom + number of parameters in model = nsubs.

- - -

output.information: reminder to the user that there +

output.information: reminder to the user that there is more information at the top of the output.

- -

Also, the estimated coefficients and standard errors expanded with estimated confidence intervals - with % coverage specified by ci argument are returned. + with % coverage specified by ci argument are returned. For the poisson model, - the output is generated on the scale of the linear predictor (log rates and log rate ratios) + the output is generated on the scale of the linear predictor (log rates and log rate ratios) and the natural scale after exponentiation (rates and rate ratios).

Details

Fits a GLM on data from a single source or multiple sources on the server-side. -In the latter case, the data are co-analysed (when using ds.glm) +In the latter case, the data are co-analysed (when using ds.glm) by using an approach that is mathematically equivalent to placing all individual-level data from all sources in one central warehouse and analysing those data using the conventional glm() function in R. In this situation marked heterogeneity between sources should be corrected @@ -216,44 +196,51 @@

Details

In formula most shortcut notation for formulas allowed under R's standard glm() function is also allowed by ds.glm.

Many GLMs can be fitted very simply using a formula such as:

-

$$y~a+b+c+d$$

-

which simply means fit a GLM with y as the outcome variable and -a, b, c and d as covariates. +

\(y~a+b+c+d\)

+

which simply means fit a GLM with y as the outcome variable and +a, b, c and d as covariates. By default all such models also include an intercept (regression constant) term.

Instead, if you need to fit a more complex model, for example:

-

$$EVENT~1+TID+SEXF*AGE.60$$

-

In the above model the outcome variable is EVENT -and the covariates -TID (factor variable with level values between 1 and 6 denoting the period time), +

\(EVENT~1+TID+SEXF*AGE.60\)

+

In the above model the outcome variable is EVENT +and the covariates +TID (factor variable with level values between 1 and 6 denoting the period time), SEXF (factor variable denoting sex) -and AGE.60 (quantitative variable representing age-60 in years). +and AGE.60 (quantitative variable representing age-60 in years). The term 1 forces -the model to include an intercept term, in contrast if you use the term 0 the +the model to include an intercept term, in contrast if you use the term 0 the intercept term is removed. The * symbol between SEXF and AGE.60 means fit all possible main effects and interactions for and between those two covariates. - This takes the value 0 in all males 0 * AGE.60 + This takes the value 0 in all males 0 * AGE.60 and in females 1 * AGE.60. - This model is in example 1 of the section Examples. In this case the logarithm of - the survival time is added as an offset (log(survtime)).

+ This model is in example 1 of the section Examples. In this case the logarithm of + the survival time is added as an offset (log(survtime)).

In the family argument can be specified three types of models to fit:

-

  • "gaussian": conventional linear model with normally distributed errors

  • -
  • "binomial": conventional unconditional logistic regression model

  • -
  • "poisson": Poisson regression model which is the most used in survival analysis. +

    "gaussian"
    +

    : conventional linear model with normally distributed errors

    + +
    "binomial"
    +

    : conventional unconditional logistic regression model

    + +
    "poisson"
    +

    : Poisson regression model which is the most used in survival analysis. The model used Piecewise Exponential Regression (PER) which typically closely approximates - Cox regression in its main estimates and standard errors.

  • -

At present the gaussian family is automatically coupled with + Cox regression in its main estimates and standard errors.

+ + +

At present the gaussian family is automatically coupled with an identity link function, the binomial family with a logistic link function and the poisson family with a log link function.

The data argument avoids you having to specify the name of the -data frame in front of each covariate in the formula. +data frame in front of each covariate in the formula. For example, if the data frame is called DataFrame you -avoid having to write: \(DataFrame$y~DataFrame$a+DataFrame$b+DataFrame$c+DataFrame$d\)

-

The checks argument verifies that the variables in the model are all defined (exist) +avoid having to write: \(DataFrame\$y ~ DataFrame\$a + DataFrame\$b + DataFrame\$c + DataFrame\$d\)

+

The checks argument verifies that the variables in the model are all defined (exist) on the server-side at every study -and that they have the correct characteristics required to fit the model. +and that they have the correct characteristics required to fit the model. It is suggested to make checks argument TRUE if an unexplained problem in the model fit is encountered because the running process takes several minutes.

In maxit Logistic regression and Poisson regression @@ -261,7 +248,7 @@

Details

regression constant is far away from its actual value that the GLM is trying to estimate. In consequence we often set maxit=30 but depending on the nature of the models you wish to fit, you may wish -to be alerted much more quickly than this if there is a delay in convergence, +to be alerted much more quickly than this if there is a delay in convergence, or you may wish to all more iterations.

Privacy protected iterative fitting of a GLM is explained here:

@@ -322,114 +309,114 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see Wiki
-  # Connecting to the Opal servers
-  
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-  
-  # Example 1: Fitting GLM for survival analysis
-  # For this analysis we need to load survival data from the server 
-  
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Fit the GLM 
-  
-  # make sure that the outcome is numeric 
-  ds.asNumeric(x.name = "D$cens",
-               newobj = "EVENT",
-               datasources = connections)
-               
-  # convert time id variable to a factor 
-               
-  ds.asFactor(input.var.name = "D$time.id",
-              newobj = "TID",
-              datasources = connections)
-              
-  # create in the server-side the log(survtime) variable
-         
-  ds.log(x = "D$survtime",
-         newobj = "log.surv",
-         datasources = connections)
-  
-  ds.glm(formula = EVENT ~ 1 + TID + female * age.60,
-         data = "D",
-         family = "poisson", 
-         offset = "log.surv",
-         weights = NULL,
-         checks = FALSE,
-         maxit = 20,
-         CI = 0.95,
-         viewIter = FALSE,
-         viewVarCov = FALSE,
-         viewCor = FALSE,
-         datasources = connections)
-         
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-  
-  # Example 2: run a logistic regression without interaction
-  # For this example we are going to load another dataset  
-  
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Fit the logistic regression model
-
-  mod <- ds.glm(formula = "DIS_DIAB~GENDER+PM_BMI_CONTINUOUS+LAB_HDL",
-                data = "D",
-                family = "binomial",
-                datasources = connections)
-                
-  mod #visualize the results of the model
-
-# Example 3: fit a standard Gaussian linear model with an interaction
-# We are using the same data as in example 2. 
-
-mod <- ds.glm(formula = "PM_BMI_CONTINUOUS~DIS_DIAB*GENDER+LAB_HDL",
-              data = "D",
-              family = "gaussian",
-              datasources = connections)
-mod
-
-# Clear the Datashield R sessions and logout
-datashield.logout(connections) 
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see Wiki
+  # Connecting to the Opal servers
+  
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+  
+  # Example 1: Fitting GLM for survival analysis
+  # For this analysis we need to load survival data from the server 
+  
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Fit the GLM 
+  
+  # make sure that the outcome is numeric 
+  ds.asNumeric(x.name = "D$cens",
+               newobj = "EVENT",
+               datasources = connections)
+               
+  # convert time id variable to a factor 
+               
+  ds.asFactor(input.var.name = "D$time.id",
+              newobj = "TID",
+              datasources = connections)
+              
+  # create in the server-side the log(survtime) variable
+         
+  ds.log(x = "D$survtime",
+         newobj = "log.surv",
+         datasources = connections)
+  
+  ds.glm(formula = EVENT ~ 1 + TID + female * age.60,
+         data = "D",
+         family = "poisson", 
+         offset = "log.surv",
+         weights = NULL,
+         checks = FALSE,
+         maxit = 20,
+         CI = 0.95,
+         viewIter = FALSE,
+         viewVarCov = FALSE,
+         viewCor = FALSE,
+         datasources = connections)
+         
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+  
+  # Example 2: run a logistic regression without interaction
+  # For this example we are going to load another dataset  
+  
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Fit the logistic regression model
+
+  mod <- ds.glm(formula = "DIS_DIAB~GENDER+PM_BMI_CONTINUOUS+LAB_HDL",
+                data = "D",
+                family = "binomial",
+                datasources = connections)
+                
+  mod #visualize the results of the model
+
+# Example 3: fit a standard Gaussian linear model with an interaction
+# We are using the same data as in example 2. 
+
+mod <- ds.glm(formula = "PM_BMI_CONTINUOUS~DIS_DIAB*GENDER+LAB_HDL",
+              data = "D",
+              family = "gaussian",
+              datasources = connections)
+mod
+
+# Clear the Datashield R sessions and logout
+datashield.logout(connections) 
+} # }
+
 
@@ -440,19 +427,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.glmPredict.html b/docs/reference/ds.glmPredict.html index c5ced6201..2d4434a01 100644 --- a/docs/reference/ds.glmPredict.html +++ b/docs/reference/ds.glmPredict.html @@ -1,10 +1,10 @@ -Applies predict.glm() to a serverside glm object — ds.glmPredict • dsBaseClientApplies predict.glm() to a serverside glm object — ds.glmPredict • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,28 +46,30 @@

Applies predict.glm() to a serverside glm object

-
ds.glmPredict(
-  glmname = NULL,
-  newdataname = NULL,
-  output.type = "response",
-  se.fit = FALSE,
-  dispersion = NULL,
-  terms = NULL,
-  na.action = "na.pass",
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.glmPredict(
+  glmname = NULL,
+  newdataname = NULL,
+  output.type = "response",
+  se.fit = FALSE,
+  dispersion = NULL,
+  terms = NULL,
+  na.action = "na.pass",
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
glmname
+ + +
glmname

is a character string identifying the glm object on serverside to which predict.glm is to be applied. Equivalent to <object> argument in native R's predict.glm which is described as: a fitted object of class inheriting from 'glm'.

-
newdataname
+
newdataname

is a character string identifying an (optional) dataframe on the serverside in which to look for new covariate values with which to predict. If omitted, @@ -76,7 +78,7 @@

Arguments

predict.glm function in native R.

-
output.type
+
output.type

a character string taking the values 'response', 'link' or 'terms'. The value 'response' generates predictions on the scale of the original outcome, e.g. as proportions in a logistic regression. These @@ -95,7 +97,7 @@

Arguments

native R's predict.glm function.

-
se.fit
+
se.fit

logical if standard errors for the fitted predictions are required. Defaults to FALSE when the output contains only a vector (or vectors) of predicted values. If TRUE, the output also contains corresponding vectors for the standard @@ -105,7 +107,7 @@

Arguments

argument is equivalent to the <type> argument in native R's predict.glm function.

-
dispersion
+
dispersion

numeric value specifying the dispersion of the GLM fit to be assumed in computing the standard errors. If omitted, that returned by summary applied to the glm object is used. e.g. if <dispersion> is unspecified @@ -116,14 +118,14 @@

Arguments

is precisely equivalent to the corresponding argument in predict.glm in native R.

-
terms
+
terms

a character vector specifying a subset of terms to return in the prediction. Only applies if output.type='terms'. ds.glmPredict's <terms> argument is precisely equivalent to the corresponding argument in predict.glm in native R.

-
na.action
+
na.action

character string determining what should be done with missing values in the data.frame identified by <newdataname>. Default is na.pass which predicts from the specified new data.frame with all NAs left in place. na.omit @@ -131,14 +133,14 @@

Arguments

NAs anywhere in the data.frame. For further details see help in native R.

-
newobj
+
newobj

a character string specifying the name of the serverside object to which the output object from the call to ds.glmPredict is to be written in each study. If no <newobj> argument is specified, the output object on the serverside defaults to the name "predict_glm".

-
datasources
+
datasources

specifies the particular 'connection object(s)' to use. e.g. if you have several data sets in the sources you are working with called opals.a, opals.w2, and connection.xyz, you can choose which of @@ -148,7 +150,7 @@

Arguments

wish to change the connections you wish to use by default the call datashield.connections_default('opals.a') will set 'default.connections' to be 'opals.a' and so in the absence of specific instructions to the contrary -(e.g. by specifiying a particular dataset to be used via the <datasources> +(e.g. by specifying a particular dataset to be used via the <datasources> argument) all subsequent function calls will be to the datasets held in opals.a. If the <datasources> argument is specified, it should be set without inverted commas: e.g. datasources=opals.a or datasources=default.connections. @@ -161,9 +163,7 @@

Arguments

Value

- - -

ds.glmPredict calls the serverside assign function glmPredictDS.as +

ds.glmPredict calls the serverside assign function glmPredictDS.as which writes a new object to the serverside containing output precisely equivalent to predict.glm in native R. The name for this serverside object is given by the newobj argument or if that argument is missing or null it is called "predict_glm". @@ -174,14 +174,12 @@

Value

in 'fit'. The non-disclosive summary statistics for the vector(s) include: length, the total number of valid (non-missing) values, the number of missing values, the mean and standard deviation of the valid -values and the 5

- - -

the output always includes: the name of the serverside glm object being predicted from, +values and the 5 +the output always includes: the name of the serverside glm object being predicted from, the name - if one was specified - of the dataframe being used as the basis for predictions, the output.type specified ('link', 'response' or 'terms'), the value of the dispersion parameter if one had been specified and the residual scale parameter (which is -multipled by sqrt(dispersion parameter) if one has been set). If output.type = 'terms', +multiplied by sqrt(dispersion parameter) if one has been set). If output.type = 'terms', the summary statistics for the fit and se.fit vectors are replaced by equivalent summary statistics for each column in fit and se.fit matrices which each have k columns if k terms are being summarised.

@@ -204,7 +202,7 @@

Details

can then be applied to the newobj to interpret the output. For example, it could be used as the basis for regression diagnostic plots. Second, the call to the aggregate function creates a non-disclosive summary of all the -information held in the newobj created by the assign function +information held in the newobj created by the assign function and returns this summary to the clientside. For example, the full list of predicted/fitted values generated by the model could be disclosive. So although the newobj holds the full vector of fitted values, only the @@ -229,19 +227,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.glmSLMA.html b/docs/reference/ds.glmSLMA.html index 296aaebc2..d5ab72ff0 100644 --- a/docs/reference/ds.glmSLMA.html +++ b/docs/reference/ds.glmSLMA.html @@ -1,10 +1,10 @@ -Fit a Generalized Linear Model (GLM) with pooling via Study Level Meta-Analysis (SLMA) — ds.glmSLMA • dsBaseClientFit a Generalized Linear Model (GLM) with pooling via Study Level Meta-Analysis (SLMA) — ds.glmSLMA • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,93 +46,93 @@

Fit a Generalized Linear Model (GLM) with pooling via Study Level Meta-Analy

-
ds.glmSLMA(
-  formula = NULL,
-  family = NULL,
-  offset = NULL,
-  weights = NULL,
-  combine.with.metafor = TRUE,
-  newobj = NULL,
-  dataName = NULL,
-  checks = FALSE,
-  maxit = 30,
-  notify.of.progress = FALSE,
-  datasources = NULL
-)
+
ds.glmSLMA(
+  formula = NULL,
+  family = NULL,
+  offset = NULL,
+  weights = NULL,
+  combine.with.metafor = TRUE,
+  newobj = NULL,
+  dataName = NULL,
+  checks = FALSE,
+  maxit = 30,
+  notify.of.progress = FALSE,
+  datasources = NULL
+)

Arguments

-
formula
+ + +
formula

an object of class formula describing -the model to be fitted. For more information see +the model to be fitted. For more information see Details.

-
family
+
family

identifies the error distribution function to use in the model.

-
offset
+
offset

a character string specifying the name of a variable to be used as an offset.ds.glmSLMA does not allow an offset vector to be written directly into the GLM formula.

-
weights
+
weights

a character string specifying the name of a variable containing prior regression weights for the fitting process. ds.glmSLMA does not allow a weights vector to be written directly into the GLM formula.

-
combine.with.metafor
+
combine.with.metafor

logical. If TRUE the estimates and standard errors for each regression coefficient are pooled across studies using random-effects meta-analysis under maximum likelihood (ML), restricted maximum likelihood (REML) or fixed-effects meta-analysis (FE). Default TRUE.

-
newobj
+
newobj

a character string specifying the name of the object to which the glm object representing the model fit on the serverside in each study is to be written. If no <newobj> argument is specified, the output object defaults to "new.glm.obj".

-
dataName
+
dataName

a character string specifying the name of an (optional) data frame that contains all of the variables in the GLM formula.

-
checks
-

logical. If TRUE ds.glmSLMA checks the structural integrity +

checks
+

logical. If TRUE ds.glmSLMA checks the structural integrity of the model. Default FALSE. For more information see Details.

-
maxit
+
maxit

a numeric scalar denoting the maximum number of iterations that -are permitted before ds.glmSLMA declares that the model has failed to converge. +are permitted before ds.glmSLMA declares that the model has failed to converge. For more information see Details.

-
notify.of.progress
+
notify.of.progress

specifies if console output should be produced to indicate progress. Default FALSE.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

The serverside aggregate functions glmSLMADS1 and glmSLMADS2 return +

The serverside aggregate functions glmSLMADS1 and glmSLMADS2 return output to the clientside, while the assign function glmSLMADS.assign simply writes the glm object to the serverside created by the model fit on a given server as a permanent object on that same server. @@ -145,117 +145,68 @@

Value

ds.glmSLMA include: a mixture of non-disclosive elements of the glm object reported separately by study included in a list object called output.summary; and a series of other list objects that represent inferences aggregated across studies.

- -

the study specific items include:

+

coefficients: a matrix with 5 columns:

First
+

: the names of all of the regression parameters (coefficients) in the model

+
second
+

: the estimated values

-

coefficients: a matrix with 5 columns:

-

-
  • First: the names of all of the regression parameters (coefficients) in the model

  • -
  • second: the estimated values

  • -
  • third: corresponding standard errors of the estimated values

  • -
  • fourth: the ratio of estimate/standard error

  • -
  • fifth: the p-value treating that as a standardised normal deviate

  • -

family: indicates the error distribution and link function used -in the GLM.

+
third
+

: corresponding standard errors of the estimated values

+
fourth
+

: the ratio of estimate/standard error

-

formula: model formula, see description of formula as an input parameter (above).

+
fifth
+

: the p-value treating that as a standardised normal deviate

+

family: indicates the error distribution and link function used +in the GLM.

+

formula: model formula, see description of formula as an input parameter (above).

df.resid: the residual degrees of freedom around the model.

- -

deviance.resid: the residual deviance around the model.

- -

df.null: the degrees of freedom around the null model (with just an intercept).

- -

dev.null: the deviance around the null model (with just an intercept).

- -

CorrMatrix: the correlation matrix of parameter estimates.

- -

VarCovMatrix: the variance-covariance matrix of parameter estimates.

- -

weights: the name of the vector (if any) holding regression weights.

- -

offset: the name of the vector (if any) holding an offset (enters glm with a coefficient of 1.00).

- -

cov.scaled: equivalent to VarCovMatrix.

- -

cov.unscaled: equivalent to VarCovMatrix but assuming dispersion (scale) parameter is 1.

- -

Nmissing: the number of missing observations in the given study.

- -

Nvalid: the number of valid (non-missing) observations in the given study.

- - -

Ntotal: the total number of observations in the given study +

Ntotal: the total number of observations in the given study (Nvalid + Nmissing).

- -

data: equivalent to input parameter dataName (above).

- -

dispersion: the estimated dispersion parameter: deviance.resid/df.resid for a gaussian family multiple regression model, 1.00 for logistic and poisson regression.

- -

call: summary of key elements of the call to fit the model.

- - -

na.action: chosen method of dealing with missing values. This is +

na.action: chosen method of dealing with missing values. This is usually, na.action = na.omit - see help in native R.

- -

iter: the number of iterations required to achieve convergence of the glm model in each separate study.

- - -

Once the study-specific output has been returned, ds.glmSLMA

- - -

returns a series of lists relating to the aggregated inferences across studies. +

Once the study-specific output has been returned, ds.glmSLMA +returns a series of lists relating to the aggregated inferences across studies. These include the following:

- -

num.valid.studies: the number of studies with valid output included in the combined analysis

- -

betamatrix.all: matrix with a row for each regression coefficient and a column for each study reporting the estimated regression coefficients by study.

- -

sematrix.all: matrix with a row for each regression coefficient and a column for each study reporting the standard errors of the estimated regression coefficients by study.

- -

betamatrix.valid: matrix with a row for each regression coefficient and a column for each study reporting the estimated regression coefficients by study but only for studies with valid output (eg not violating disclosure traps)

- -

sematrix.all: matrix with a row for each regression coefficient and a column for each study reporting the standard errors of the estimated regression coefficients by study but only for studies with valid output (eg not violating disclosure traps)

- -

SLMA.pooled.estimates.matrix: a matrix with a row for each regression coefficient and six columns. The first two columns contain the pooled estimate of each regression coefficients and its standard error with @@ -266,19 +217,17 @@

Value

argument combine.with.metafor is set to TRUE. Otherwise, users can take the betamatrix.valid and sematrix.valid matrices and enter them into their meta-analysis package of choice.

- -

is.object.created and validity.check are standard items returned by an assign function when the designated newobj appears to have -been successfuly created on the serverside at each study. This output is +been successfully created on the serverside at each study. This output is produced specifically by the assign function glmSLMADS.assign that writes out the glm object on the serverside

Details

-

ds.glmSLMA specifies the structure of a Generalized Linear Model +

ds.glmSLMA specifies the structure of a Generalized Linear Model to be fitted separately on each study or data source. Calls serverside functions -glmSLMADS1 (aggregate),glmSLMADS2 (aggregate) and glmSLMADS.assign (assign). +glmSLMADS1 (aggregate),glmSLMADS2 (aggregate) and glmSLMADS.assign (assign). From a mathematical perspective, the SLMA approach (using ds.glmSLMA) differs fundamentally from the alternative approach using ds.glm. ds.glm fits the model iteratively across all studies together. At each @@ -332,78 +281,92 @@

Details

In formula Most shortcut notation for formulas allowed under R's standard glm() function is also allowed by ds.glmSLMA.

Many glms can be fitted very simply using a formula such as:

-

$$y~a+b+c+d$$

-

which simply means fit a glm with y as the outcome variable and -a, b, c and d as covariates. +

\(y~a+b+c+d\)

+

which simply means fit a glm with y as the outcome variable and +a, b, c and d as covariates. By default all such models also include an intercept (regression constant) term.

Instead, if you need to fit a more complex model, for example:

-

$$EVENT~1+TID+SEXF*AGE.60$$

-

In the above model the outcome variable is EVENT -and the covariates -TID (factor variable with level values between 1 and 6 denoting the period time), +

\(EVENT~1+TID+SEXF*AGE.60\)

+

In the above model the outcome variable is EVENT +and the covariates +TID (factor variable with level values between 1 and 6 denoting the period time), SEXF (factor variable denoting sex) -and AGE.60 (quantitative variable representing age-60 in years). +and AGE.60 (quantitative variable representing age-60 in years). The term 1 forces -the model to include an intercept term, in contrast if you use the term 0 the +the model to include an intercept term, in contrast if you use the term 0 the intercept term is removed. The * symbol between SEXF and AGE.60 means fit all possible main effects and interactions for and between those two covariates. - This takes the value 0 in all males 0 * AGE.60 - and in females 1 * AGE.60. - This model is in example 1 of the section Examples. In this case the logarithm of + This takes the value 0 in all males 0 * AGE.60 + and in females 1 * AGE.60. + This model is in example 1 of the section Examples. In this case the logarithm of the survival time is added as an offset (log(survtime)).

In the family argument a range of model types can be fitted. This range has recently been extended to include a number of model types that are non-standard but are used relatively widely.

-

The standard models include:

  • "gaussian": conventional linear model with normally distributed errors

  • -
  • "binomial": conventional unconditional logistic regression model

  • -
  • "poisson": Poisson regression model which is often used in epidemiological +

    The standard models include:

    "gaussian"
    +

    : conventional linear model with normally distributed errors

    + +
    "binomial"
    +

    : conventional unconditional logistic regression model

    + +
    "poisson"
    +

    : Poisson regression model which is often used in epidemiological analysis of counts and rates and is also used in survival analysis. The Piecewise Exponential Regression (PER) model typically provides a close approximation - to the Cox regression model in its main estimates and standard errors.

  • -
  • "gamma": a family of models for outcomes characterised by a constant - coefficient of variation, i.e. the variance increases with the square of the expected mean

    -

    The extended range includes:

  • -
  • "quasipoisson": a model with a Poisson variance function - variance + to the Cox regression model in its main estimates and standard errors.

    + +
    "gamma"
    +

    : a family of models for outcomes characterised by a constant + coefficient of variation, i.e. the variance increases with the square of the expected mean

    + + +The extended range includes: +
    "quasipoisson"
    +

    : a model with a Poisson variance function - variance equals expected mean - but the residual variance which is fixed to be 1.00 in a standard Poisson model can then take any value. This is achieved by a dispersion parameter which is estimated during the model fit and if it takes the value K it means that the expected variance is K x the expected mean, which implies that all standard errors will be sqrt(K) times larger than in a standard Poisson model - fitted to the same data. This allows for the extra uncertainty which is associated + fitted to the same data. This allows for the extra uncertainty which is associated with 'overdispersion' that occurs very commonly with Poisson distributed data, and typically arises when the count/rate data being modelled occur in blocks which exhibit heterogeneity of underlying risk which is not being fully modelled, either by including the blocks themselves as a factor or by including covariates for all the determinants that are relevant to that underlying risk. If there is no overdispersion (K=1) the estimates and standard errors from the - quasipoisson model will be almost identical to those from a standard poisson model.

  • -
  • "quasibinomial": a model with a binomial variance function - if P + quasipoisson model will be almost identical to those from a standard poisson model.

    + + +
    "quasibinomial"
    +

    : a model with a binomial variance function - if P is the expected proportion of successes, and N is the number of "trials" (always 1 if analysing binary data which are formally described as having a Bernoulli distribution (binomial distribution with N=1) the variance function is N*(P)*(1-P). But the residual variance which is fixed to be 1.00 in a binomial model can take any value. This is achieved by a dispersion parameter - which is estimated during the model fit (see quasipoisson information above).

  • -

Each class of models has a "canonical link" which represents the link function that + which is estimated during the model fit (see quasipoisson information above).

+ +

Each class of models has a "canonical link" which represents the link function that maximises the information extraction by the model. The gaussian family uses the identity link, the poisson family the log link, the binomial/Bernoulli family the logit link and the the gamma family the reciprocal link.

The dataName argument avoids you having to specify the name of the -data frame in front of each covariate in the formula. +data frame in front of each covariate in the formula. For example, if the data frame is called DataFrame you -avoid having to write: \(DataFrame$y~DataFrame$a+DataFrame$b+DataFrame$c+DataFrame$d\)

-

The checks argument verifies that the variables in the model are all defined (exist) +avoid having to write: \(DataFrame\$y ~ DataFrame\$a + DataFrame\$b + DataFrame\$c + DataFrame\$d\)

+

The checks argument verifies that the variables in the model are all defined (exist) on the server-site at every study -and that they have the correct characteristics required to fit the model. +and that they have the correct characteristics required to fit the model. It is suggested to make checks argument TRUE only if an unexplained - problem in the model fit is encountered because the running process takes several minutes.

+ problem in the model fit is encountered because the running process takes several minutes.

In maxit Logistic regression and Poisson regression models can require many iterations, particularly if the starting value of the regression constant is far away from its actual value that the GLM is trying to estimate. In consequence we often set maxit=30 but depending on the nature of the models you wish to fit, you may wish -to be alerted much more quickly than this if there is a delay in convergence, +to be alerted much more quickly than this if there is a delay in convergence, or you may wish to allow more iterations.

Server functions called: glmSLMADS1, glmSLMADS2, glmSLMADS.assign

@@ -415,111 +378,111 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see Wiki
-  # Connecting to the Opal servers
-  
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-  
-  # Example 1: Fitting GLM for survival analysis
-  # For this analysis we need to load survival data from the server 
-  
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Fit the GLM 
-  
-  # make sure that the outcome is numeric 
-  ds.asNumeric(x.name = "D$cens",
-               newobj = "EVENT",
-               datasources = connections)
-               
-  # convert time id variable to a factor 
-               
-  ds.asFactor(input.var.name = "D$time.id",
-              newobj = "TID",
-              datasources = connections)
-              
-  # create in the server-side the log(survtime) variable
-         
-  ds.log(x = "D$survtime",
-         newobj = "log.surv",
-         datasources = connections)
-  
-  ds.glmSLMA(formula = EVENT ~ 1 + TID + female * age.60,
-         dataName = "D",
-         family = "poisson", 
-         offset = "log.surv",
-         weights = NULL,
-         checks = FALSE,
-         maxit = 20,
-         datasources = connections)
-         
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-  
-  # Example 2: run a logistic regression without interaction
-  # For this example we are going to load another type of data  
-  
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Fit the logistic regression model
-
-  mod <- ds.glmSLMA(formula = "DIS_DIAB~GENDER+PM_BMI_CONTINUOUS+LAB_HDL",
-                dataName = "D",
-                family = "binomial",
-                datasources = connections)
-                
-  mod #visualize the results of the model
-
-# Example 3: fit a standard Gaussian linear model with an interaction
-# We are using the same data as in example 2. It is not necessary to
-# connect again to the server 
-
-mod <- ds.glmSLMA(formula = "PM_BMI_CONTINUOUS~DIS_DIAB*GENDER+LAB_HDL",
-              dataName = "D",
-              family = "gaussian",
-              datasources = connections)
-mod
-
-# Clear the Datashield R sessions and logout
-datashield.logout(connections) 
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see Wiki
+  # Connecting to the Opal servers
+  
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+  
+  # Example 1: Fitting GLM for survival analysis
+  # For this analysis we need to load survival data from the server 
+  
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Fit the GLM 
+  
+  # make sure that the outcome is numeric 
+  ds.asNumeric(x.name = "D$cens",
+               newobj = "EVENT",
+               datasources = connections)
+               
+  # convert time id variable to a factor 
+               
+  ds.asFactor(input.var.name = "D$time.id",
+              newobj = "TID",
+              datasources = connections)
+              
+  # create in the server-side the log(survtime) variable
+         
+  ds.log(x = "D$survtime",
+         newobj = "log.surv",
+         datasources = connections)
+  
+  ds.glmSLMA(formula = EVENT ~ 1 + TID + female * age.60,
+         dataName = "D",
+         family = "poisson", 
+         offset = "log.surv",
+         weights = NULL,
+         checks = FALSE,
+         maxit = 20,
+         datasources = connections)
+         
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+  
+  # Example 2: run a logistic regression without interaction
+  # For this example we are going to load another type of data  
+  
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Fit the logistic regression model
+
+  mod <- ds.glmSLMA(formula = "DIS_DIAB~GENDER+PM_BMI_CONTINUOUS+LAB_HDL",
+                dataName = "D",
+                family = "binomial",
+                datasources = connections)
+                
+  mod #visualize the results of the model
+
+# Example 3: fit a standard Gaussian linear model with an interaction
+# We are using the same data as in example 2. It is not necessary to
+# connect again to the server 
+
+mod <- ds.glmSLMA(formula = "PM_BMI_CONTINUOUS~DIS_DIAB*GENDER+LAB_HDL",
+              dataName = "D",
+              family = "gaussian",
+              datasources = connections)
+mod
+
+# Clear the Datashield R sessions and logout
+datashield.logout(connections) 
+} # }
+
 
@@ -530,19 +493,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.glmSummary.html b/docs/reference/ds.glmSummary.html index 71fdb8aba..5bacf7ad5 100644 --- a/docs/reference/ds.glmSummary.html +++ b/docs/reference/ds.glmSummary.html @@ -1,12 +1,12 @@ -Summarize a glm object on the serverside — ds.glmSummary • dsBaseClientSummarize a glm object on the serverside — ds.glmSummary • dsBaseClient - +
@@ -32,42 +32,44 @@
- +
-

Summarize a glm object on the serverside to create a -summary_glm object. Also identify and return components of +

Summarize a glm object on the serverside to create a +summary_glm object. Also identify and return components of both the glm object and the summary_glm object that can safely be sent to the clientside without a risk of disclosure

-
ds.glmSummary(x.name, newobj = NULL, datasources = NULL)
+
ds.glmSummary(x.name, newobj = NULL, datasources = NULL)

Arguments

-
x.name
+ + +
x.name

a character string providing the name of a glm object on the serverside that has previously been created e.g. using ds.glmSLMA

-
newobj
+
newobj

a character string specifying the name of the object to which the summary_glm object representing the output of summary(glm object) in each study is to be written. If no <newobj> argument is specified, the output object on the serverside defaults to "summary_glm.newobj".

-
datasources
+
datasources

specifies the particular 'connection object(s)' to use. e.g. if you have several data sets in the sources you are working with called opals.a, opals.w2, and connection.xyz, you can choose which of @@ -77,7 +79,7 @@

Arguments

wish to change the connections you wish to use by default the call datashield.connections_default('opals.a') will set 'default.connections' to be 'opals.a' and so in the absence of specific instructions to the contrary -(e.g. by specifiying a particular dataset to be used via the <datasources> +(e.g. by specifying a particular dataset to be used via the <datasources> argument) all subsequent function calls will be to the datasets held in opals.a. If the <datasources> argument is specified, it should be set without inverted commas: e.g. datasources=opals.a or datasources=default.connections. @@ -90,9 +92,7 @@

Arguments

Value

- - -

ds.glmSummary writes a new object to the serverside with name given by +

ds.glmSummary writes a new object to the serverside with name given by the newobj argument or if that argument is missing or null it is called "summary_glm.newobj". In addition, ds.glmSummary returns an object containing two lists to the clientside the two lists are named "glm.obj" and "glm.summary.obj" which contain all of the @@ -100,7 +100,7 @@

Value

but with all potentially disclosive components set to NA or masked in another way see "details" above. The elements that are returned with a non-NA value in the glm.obj list object are: "coefficients", "rank", "family", "deviance", "aic", -"null.deviance", "iter", "df.residual", "df.null", "converged", "boundary", +"null.deviance", "iter", "df.residual", "df.null", "converged", "boundary", "call", "formula", "terms", "data", "control", "method", "contrasts", "xlevels". The elements that are returned with a non-NA value in the glm.summary.obj list object are: "call", "terms", "family", "deviance", @@ -123,7 +123,7 @@

Details

disclosive. They are all therefore set to NA and so convey no information when returned to the clientside: 1.residuals, 2.fitted.values, 3.effects, 4.R, 5.qr, 6.linear.predictors, -7.weights, 8.prior.weights, 9.y, 10.model, 11. na.action, 12.x, 13. offset. +7.weights, 8.prior.weights, 9.y, 10.model, 11. na.action, 12.x, 13. offset. In addition the list element "data" which identifies a data.frame that was identified as containing all of the variables required for the model is also disclosive because it doesn't list @@ -154,19 +154,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.glmerSLMA.html b/docs/reference/ds.glmerSLMA.html index 3bf10c098..ccc7ec131 100644 --- a/docs/reference/ds.glmerSLMA.html +++ b/docs/reference/ds.glmerSLMA.html @@ -1,10 +1,10 @@ -Fits Generalized Linear Mixed-Effect Models via Study-Level Meta-Analysis — ds.glmerSLMA • dsBaseClientFits Generalized Linear Mixed-Effect Models via Study-Level Meta-Analysis — ds.glmerSLMA • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,246 +46,228 @@

Fits Generalized Linear Mixed-Effect Models via Study-Level Meta-Analysis
-
ds.glmerSLMA(
-  formula = NULL,
-  offset = NULL,
-  weights = NULL,
-  combine.with.metafor = TRUE,
-  dataName = NULL,
-  checks = FALSE,
-  datasources = NULL,
-  family = NULL,
-  control_type = NULL,
-  control_value = NULL,
-  nAGQ = 1L,
-  verbose = 0,
-  start_theta = NULL,
-  start_fixef = NULL,
-  notify.of.progress = FALSE,
-  assign = FALSE,
-  newobj = NULL
-)
+
ds.glmerSLMA(
+  formula = NULL,
+  offset = NULL,
+  weights = NULL,
+  combine.with.metafor = TRUE,
+  dataName = NULL,
+  checks = FALSE,
+  datasources = NULL,
+  family = NULL,
+  control_type = NULL,
+  control_value = NULL,
+  nAGQ = 1L,
+  verbose = 0,
+  start_theta = NULL,
+  start_fixef = NULL,
+  notify.of.progress = FALSE,
+  assign = FALSE,
+  newobj = NULL
+)

Arguments

-
formula
-

an object of class formula describing the model to be fitted. + + +

formula
+

an object of class formula describing the model to be fitted. For more information see Details.

-
offset
+
offset

a character string specifying the name of a variable to be used as an offset.

-
weights
+
weights

a character string specifying the name of a variable containing prior regression weights for the fitting process.

-
combine.with.metafor
+
combine.with.metafor

logical. If TRUE the estimates and standard errors for each regression coefficient are pooled across studies using random-effects meta-analysis under maximum likelihood (ML), restricted maximum likelihood (REML) or fixed-effects meta-analysis (FE). Default TRUE.

-
dataName
+
dataName

a character string specifying the name of a data frame that contains all of the variables in the GLME formula. For more information see Details.

-
checks
-

logical. If TRUE ds.glmerSLMA checks the structural integrity +

checks
+

logical. If TRUE ds.glmerSLMA checks the structural integrity of the model. Default FALSE. For more information see Details.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

-
family
+
family

a character string specifying the distribution of the observed value of the outcome variable around the predictions generated by the linear predictor. -This can be set as "binomial" or "poisson". +This can be set as "binomial" or "poisson". For more information see Details.

-
control_type
+
control_type

an optional character string vector specifying the nature of a parameter (or parameters) to be modified in the convergence control options which can be viewed or -modified via the glmerControl function of the package lme4. +modified via the glmerControl function of the package lme4. For more information see Details.

-
control_value
+
control_value

numeric representing the new value which you want to allocate the -control parameter corresponding to the control-type. +control parameter corresponding to the control-type. For more information see Details.

-
nAGQ
-

an integer value indicating the number of points per axis for evaluating the adaptive +

nAGQ
+

an integer value indicating the number of points per axis for evaluating the adaptive Gauss-Hermite approximation to the log-likelihood. Defaults 1, corresponding to the Laplace approximation. For more information see R glmer function help.

-
verbose
+
verbose

an integer value. If \(verbose > 0\) the output is generated during the optimization of -the parameter estimates. If \(verbose > 1\) the output is generated during the individual penalized -iteratively reweighted least squares (PIRLS) steps. Default verbose +the parameter estimates. If \(verbose > 1\) the output is generated during the individual penalized +iteratively reweighted least squares (PIRLS) steps. Default verbose value is 0 which means no additional output.

-
start_theta
+
start_theta

a numeric vector of length equal to the number of random effects. Specify to retain more control over the optimisation. See glmer() for more details.

-
start_fixef
-

a numeric vector of length equal to the number of fixed effects (NB including the intercept). +

start_fixef
+

a numeric vector of length equal to the number of fixed effects (NB including the intercept). Specify to retain more control over the optimisation. See glmer() for more details.

-
notify.of.progress
+
notify.of.progress

specifies if console output should be produced to indicate progress. Default FALSE.

-
assign
+
assign

a logical, indicates whether the function will call a second server-side function (an assign) in order to save the regression outcomes (i.e. a glmerMod object) on each server. Default FALSE.

-
newobj
+
newobj

a character string specifying the name of the object to which the glmerMod object -representing the model fit on the serverside in each study is to be written. This argument is +representing the model fit on the serverside in each study is to be written. This argument is used only when the argument assign is set to TRUE. If no <newobj> argument is specified, the output object defaults to "new.glmer.obj".

Value

- - -

Many of the elements of the output list returned by ds.glmerSLMA are +

Many of the elements of the output list returned by ds.glmerSLMA are equivalent to those returned by the glmer() function in native R. However, potentially disclosive elements -such as individual-level residuals and linear predictor values are blocked. +such as individual-level residuals and linear predictor values are blocked. In this case, only non-disclosive elements are returned from each study separately.

- -

The list of elements returned by ds.glmerSLMA is mentioned below:

+

coefficients: a matrix with 5 columns:

First
+

: the names of all of the regression parameters (coefficients) in the model

+
second
+

: the estimated values

-

coefficients: a matrix with 5 columns:

-

-
  • First: the names of all of the regression parameters (coefficients) in the model

  • -
  • second: the estimated values

  • -
  • third: corresponding standard errors of the estimated values

  • -
  • fourth: the ratio of estimate/standard error

  • -
  • fifth: the p-value treating that as a standardised normal deviate

  • -

CorrMatrix: the correlation matrix of parameter estimates.

+
third
+

: corresponding standard errors of the estimated values

+
fourth
+

: the ratio of estimate/standard error

-

VarCovMatrix: the variance-covariance matrix of parameter estimates.

+
fifth
+

: the p-value treating that as a standardised normal deviate

+

CorrMatrix: the correlation matrix of parameter estimates.

+

VarCovMatrix: the variance-covariance matrix of parameter estimates.

weights: the vector (if any) holding regression weights.

- -

offset: the vector (if any) holding an offset.

- -

cov.scaled: equivalent to VarCovMatrix.

- -

Nmissing: the number of missing observations in the given study.

- -

Nvalid: the number of valid (non-missing) observations in the given study.

- - -

Ntotal: the total number of observations +

Ntotal: the total number of observations in the given study (Nvalid + Nmissing).

- -

data: equivalent to input parameter dataName (above).

- -

call: summary of key elements of the call to fit the model.

- -

Once the study-specific output has been returned, the function returns the number of elements relating to the pooling of estimates across studies via study-level meta-analysis. These are as follows:

- -

input.beta.matrix.for.SLMA: a matrix containing the vector of coefficient estimates from each study.

- -

input.se.matrix.for.SLMA: a matrix containing the vector of standard error estimates for coefficients from each study.

- -

SLMA.pooled.estimates: a matrix containing pooled estimates for each regression coefficient across all studies with pooling under SLMA via random-effects meta-analysis under maximum likelihood (ML), restricted maximum likelihood (REML) or via fixed-effects meta-analysis (FE).

- -

convergence.error.message: reports for each study whether the model converged. If it did not some information about the reason for this is reported.

Details

-

ds.glmerSLMA fits a generalized linear mixed-effects model (GLME) +

ds.glmerSLMA fits a generalized linear mixed-effects model (GLME) - e.g. a logistic or Poisson regression model including both fixed and random effects - - on data from single or multiple sources.

-

This function is similar to glmer function from lme4 package in native R.

-

When there are multiple data sources, the GLME is fitted to convergence + on data from single or multiple sources.

+

This function is similar to glmer function from lme4 package in native R.

+

When there are multiple data sources, the GLME is fitted to convergence in each data source independently. The estimates and standard errors returned to the client-side which enable cross-study pooling using Study-Level Meta-Analysis (SLMA). - The SLMA used by default metafor package - but as the SLMA occurs on the client-side (a standard R environment), the user can choose - any approach to meta-analysis. Additional information about fitting GLMEs - using glmer function can be obtained using R help for glmer and the lme4 package.

+ The SLMA used by default metafor package + but as the SLMA occurs on the client-side (a standard R environment), the user can choose + any approach to meta-analysis. Additional information about fitting GLMEs + using glmer function can be obtained using R help for glmer and the lme4 package.

In formula most shortcut notation allowed by glmer() function is -also allowed by ds.glmerSLMA. +also allowed by ds.glmerSLMA. Many GLMEs can be fitted very simply using a formula like: -$$y~a+b+(1|c)$$ -which simply means fit an GLME with y as the outcome variable (e.g. +\(y~a+b+(1|c)\) +which simply means fit an GLME with y as the outcome variable (e.g. a binary case-control using a logistic regression model or a count or a survival time using a Poisson regression model), a and b as fixed effects, and c as a random effect or grouping factor.

-

It is also possible to fit models with random slopes by specifying a model such as -$$y~a+b+(1+b|c)$$ +

It is also possible to fit models with random slopes by specifying a model such as +\(y~a+b+(1+b|c)\) where the effect of b can vary randomly between groups defined by c. Implicit nesting can be specified with formulas such as: \(y~a+b+(1|c/d)\) or \(y~a+b+(1|c)+(1|c:d)\).

The dataName argument avoids you having to specify the name of the -data frame in front of each covariate in the formula. -For example, if the data frame is called DataFrame you avoid having to write: -\(DataFrame$y~DataFrame$a+DataFrame$b+(1|DataFrame$c)\).

-

The checks argument verifies that the variables in the model are all defined (exist) +data frame in front of each covariate in the formula. +For example, if the data frame is called DataFrame you avoid having to write: +\(DataFrame\$y ~ DataFrame\$a + DataFrame\$b + (1 | DataFrame\$c)\).

+

The checks argument verifies that the variables in the model are all defined (exist) on the server-site at every study -and that they have the correct characteristics required to fit the model. +and that they have the correct characteristics required to fit the model. It is suggested to make checks argument TRUE if an unexplained problem in the model fit is encountered because the running process takes several minutes.

-

In the family argument can be specified two types of models to fit:

  • "binomial": logistic regression models

  • -
  • "poisson": poisson regression models

  • -

Note if you are fitting a gaussian model (a standard linear mixed -model) you should use ds.lmerSLMA and not ds.glmerSLMA. +

In the family argument can be specified two types of models to fit:

"binomial"
+

: logistic regression models

+ +
"poisson"
+

: poisson regression models

+ + +

Note if you are fitting a gaussian model (a standard linear mixed +model) you should use ds.lmerSLMA and not ds.glmerSLMA. For more information you can see R help for lmer and glmer.

In control_type at present only one such parameter can be modified, -namely the tolerance of the convergence criterion to the gradient of the log-likelihood +namely the tolerance of the convergence criterion to the gradient of the log-likelihood at the maximum likelihood achieved. We have enabled this because our practical experience suggests that in situations where the model looks to have converged with sensible parameter values but formal convergence is not being declared if we allow the model to be more @@ -312,81 +294,81 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see Wiki
-  # Connecting to the Opal servers
-  
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-  
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Select all rows without missing values
-  
-  ds.completeCases(x1 = "D", newobj = "D.comp", datasources = connections)
-  
-  # Fit a Poisson regression model
-  
-  ds.glmerSLMA(formula = "LAB_TSC ~ LAB_HDL + (1 | GENDER)",
-               offset = NULL,
-               dataName = "D.comp",
-               datasources = connections,
-               family = "poisson")
-               
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections)
-  
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CLUSTER.CLUSTER_SLO1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CLUSTER.CLUSTER_SLO2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CLUSTER.CLUSTER_SLO3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-   # Log onto the remote Opal training servers
-   connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
-                
-                
-     # Fit a Logistic regression model
-  
-  ds.glmerSLMA(formula = "Male ~  incid_rate +diabetes + (1 | age)",
-               dataName = "D",
-               datasources = connections[2],#only the second server is used (study2)
-               family = "binomial")
-  
-  
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-  }
-  
-
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see Wiki
+  # Connecting to the Opal servers
+  
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+  
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Select all rows without missing values
+  
+  ds.completeCases(x1 = "D", newobj = "D.comp", datasources = connections)
+  
+  # Fit a Poisson regression model
+  
+  ds.glmerSLMA(formula = "LAB_TSC ~ LAB_HDL + (1 | GENDER)",
+               offset = NULL,
+               dataName = "D.comp",
+               datasources = connections,
+               family = "poisson")
+               
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections)
+  
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CLUSTER.CLUSTER_SLO1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CLUSTER.CLUSTER_SLO2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CLUSTER.CLUSTER_SLO3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+   # Log onto the remote Opal training servers
+   connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
+                
+                
+     # Fit a Logistic regression model
+  
+  ds.glmerSLMA(formula = "Male ~  incid_rate +diabetes + (1 | age)",
+               dataName = "D",
+               datasources = connections[2],#only the second server is used (study2)
+               family = "binomial")
+  
+  
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+  } # }
+  
+
+
 
@@ -397,19 +379,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.heatmapPlot.html b/docs/reference/ds.heatmapPlot.html index 4817850d7..b45a3e6f6 100644 --- a/docs/reference/ds.heatmapPlot.html +++ b/docs/reference/ds.heatmapPlot.html @@ -1,9 +1,9 @@ -Generates a Heat Map plot — ds.heatmapPlot • dsBaseClientGenerates a Heat Map plot — ds.heatmapPlot • dsBaseClient - +
@@ -29,13 +29,13 @@

- +
@@ -44,85 +44,85 @@

Generates a Heat Map plot

-
ds.heatmapPlot(
-  x = NULL,
-  y = NULL,
-  type = "combine",
-  show = "all",
-  numints = 20,
-  method = "smallCellsRule",
-  k = 3,
-  noise = 0.25,
-  datasources = NULL
-)
+
ds.heatmapPlot(
+  x = NULL,
+  y = NULL,
+  type = "combine",
+  show = "all",
+  numints = 20,
+  method = "smallCellsRule",
+  k = 3,
+  noise = 0.25,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string specifying the name of a numerical vector.

-
y
+
y

a character string specifying the name of a numerical vector.

-
type
+
type

a character string that represents the type of graph to display. -type argument can be set as 'combine' or 'split'. +type argument can be set as 'combine' or 'split'. Default 'combine'. For more information see Details.

-
show
-

a character string that represents where the plot should be focused. -show argument can be set as 'all' or 'zoomed'. -Default 'all'. +

show
+

a character string that represents where the plot should be focused. +show argument can be set as 'all' or 'zoomed'. +Default 'all'. For more information see Details.

-
numints
-

the number of intervals for a density grid object. +

numints
+

the number of intervals for a density grid object. Default numints value is 20.

-
method
-

a character string that defines which heat map will be created. -The method argument can be set as 'smallCellsRule', -'deterministic' or 'probabilistic'. -Default 'smallCellsRule'. +

method
+

a character string that defines which heat map will be created. +The method argument can be set as 'smallCellsRule', +'deterministic' or 'probabilistic'. +Default 'smallCellsRule'. For more information see Details.

-
k
-

the number of the nearest neighbours for which their centroid is calculated. -Default k value is 3. +

k
+

the number of the nearest neighbours for which their centroid is calculated. +Default k value is 3. For more information see Details.

-
noise
+
noise

the percentage of the initial variance that is used as the variance of the embedded -noise if the argument method is set to 'probabilistic'. +noise if the argument method is set to 'probabilistic'. Default noise value is 0.25. For more information see Details.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.heatmapPlot returns to the client-side a heat map plot and a message specifying +

ds.heatmapPlot returns to the client-side a heat map plot and a message specifying the number of invalid cells in each study.

Details

-

The ds.heatmapPlot function first generates a density grid +

The ds.heatmapPlot function first generates a density grid and uses it to plot the graph. Cells of the grid density matrix that hold a count of less than the filter set by DataSHIELD (usually 5) are considered invalid and turned into 0 to avoid potential @@ -130,37 +130,55 @@

Details

The ranges returned by each study and used in the process of getting the grid density matrix are not the exact minimum and maximum values but rather close approximates of the real minimum and maximum value. This was done to reduce the risk of potential disclosure.

-

In the argument type can be specified two types of graphics to display:

  • 'combine': a combined heat map plot is displayed

  • -
  • 'split': each heat map is plotted separately

  • -

In the argument show can be specified two options:

  • 'all': the ranges of the variables are used as plot limits

  • -
  • 'zoomed': the plot is zoomed to the region where the actual data are

  • -

In the argument method can be specified 3 different heat map to be created:

  • 'smallCellsRule': the heat map of the actual variables is - created but grids with low counts are replaced with grids with zero counts

  • -
  • 'deterministic': the heat map of the scaled centroids of each +

    In the argument type can be specified two types of graphics to display:

    'combine'
    +

    : a combined heat map plot is displayed

    + +
    'split'
    +

    : each heat map is plotted separately

    + + +

    In the argument show can be specified two options:

    'all'
    +

    : the ranges of the variables are used as plot limits

    + +
    'zoomed'
    +

    : the plot is zoomed to the region where the actual data are

    + + +

    In the argument method can be specified 3 different heat map to be created:

    'smallCellsRule'
    +

    : the heat map of the actual variables is + created but grids with low counts are replaced with grids with zero counts

    + +
    'deterministic'
    +

    : the heat map of the scaled centroids of each k nearest neighbours of the - original variables are created, where the value of k is set by the user

  • -
  • 'probabilistic': the heat map of 'noisy' variables is generated. - The added noise follows a normal distribution with + original variables are created, where the value of k is set by the user

    + +
    'probabilistic'
    +

    : the heat map of 'noisy' variables is generated. + The added noise follows a normal distribution with zero mean and variance equal to a percentage of - the initial variance of each input variable. + the initial variance of each input variable. This percentage is specified by the user in the - argument noise

  • -

In the k argument the user can choose any value for + argument noise

+ + + +

In the k argument the user can choose any value for k equal to or greater than the pre-specified threshold used as a disclosure control for this method and lower than the number of observations minus the value of this threshold. By default the value of k is set to be equal to 3 (we suggest k to be equal to, or bigger than, 3). Note that the function fails if the user -uses the default value but the study has set a bigger threshold. +uses the default value but the study has set a bigger threshold. The value of k is used only -if the argument method is set to 'deterministic'. +if the argument method is set to 'deterministic'. Any value of k is ignored if the argument method is set to 'probabilistic' or 'smallCellsRule'.

-

The value of noise is used only if the argument +

The value of noise is used only if the argument method is set to 'probabilistic'. -Any value of noise is ignored if the argument -method is set to 'deterministic' or 'smallCellsRule'. -The user can choose any value for noise equal +Any value of noise is ignored if the argument +method is set to 'deterministic' or 'smallCellsRule'. +The user can choose any value for noise equal to or greater than the pre-specified threshold 'nfilter.noise'.

Server function called: heatmapPlotDS

@@ -171,7 +189,7 @@

Author

Examples

- +
- - + + diff --git a/docs/reference/ds.hetcor.html b/docs/reference/ds.hetcor.html index 1cd051c6e..2373f7117 100644 --- a/docs/reference/ds.hetcor.html +++ b/docs/reference/ds.hetcor.html @@ -1,9 +1,9 @@ -Heterogeneous Correlation Matrix — ds.hetcor • dsBaseClientHeterogeneous Correlation Matrix — ds.hetcor • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,65 +44,65 @@

Heterogeneous Correlation Matrix

-
ds.hetcor(
-  data = NULL,
-  ML = TRUE,
-  std.err = TRUE,
-  bins = 4,
-  pd = TRUE,
-  use = "complete.obs",
-  datasources = NULL
-)
+
ds.hetcor(
+  data = NULL,
+  ML = TRUE,
+  std.err = TRUE,
+  bins = 4,
+  pd = TRUE,
+  use = "complete.obs",
+  datasources = NULL
+)

Arguments

-
data
+ + +
data

the name of a data frame consisting of factors, ordered factors, logical variables, character variables, and/or numeric variables, or the first of several variables.

-
ML
+
ML

if TRUE, compute maximum-likelihood estimates; if FALSE (default), compute quick two-step estimates.

-
std.err
+
std.err

if TRUE (default), compute standard errors.

-
bins
+
bins

number of bins to use for continuous variables in testing bivariate normality; the default is 4.

-
pd
+
pd

if TRUE (default) and if the correlation matrix is not positive-definite, an attempt will be made to adjust it to a positive-definite matrix, using the nearPD function in the Matrix package. Note that default arguments to nearPD are used (except corr=TRUE); for more control call nearPD directly.

-
use
+
use

if "complete.obs", remove observations with any missing data; if "pairwise.complete.obs", compute each correlation using all observations with valid data for that pair of variables.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

Returns an object of class "hetcor" from each study, with the following components: the +

Returns an object of class "hetcor" from each study, with the following components: the correlation matrix; the type of each correlation: "Pearson", "Polychoric", or "Polyserial"; the standard errors of the correlations, if requested; the number (or numbers) of observations on which -the correlations are based; p-values for tests of bivariate normality for each pair of variables; -the method by which any missing data were handled: "complete.obs" or "pairwise.complete.obs"; TRUE +the correlations are based; p-values for tests of bivariate normality for each pair of variables; +the method by which any missing data were handled: "complete.obs" or "pairwise.complete.obs"; TRUE for ML estimates, FALSE for two-step estimates.

@@ -124,19 +124,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.histogram.html b/docs/reference/ds.histogram.html index 5e55cee7c..329bbf7aa 100644 --- a/docs/reference/ds.histogram.html +++ b/docs/reference/ds.histogram.html @@ -1,9 +1,9 @@ -Generates a histogram plot — ds.histogram • dsBaseClientGenerates a histogram plot — ds.histogram • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,118 +44,134 @@

Generates a histogram plot

-
ds.histogram(
-  x = NULL,
-  type = "split",
-  num.breaks = 10,
-  method = "smallCellsRule",
-  k = 3,
-  noise = 0.25,
-  vertical.axis = "Frequency",
-  datasources = NULL
-)
+
ds.histogram(
+  x = NULL,
+  type = "split",
+  num.breaks = 10,
+  method = "smallCellsRule",
+  k = 3,
+  noise = 0.25,
+  vertical.axis = "Frequency",
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string specifying the name of a numerical vector.

-
type
+
type

a character string that represents the type of graph to display. -The type argument can be set as 'combine' or 'split'. +The type argument can be set as 'combine' or 'split'. Default 'split'. For more information see Details.

-
num.breaks
+
num.breaks

a numeric specifying the number of breaks of the histogram. Default value is 10.

-
method
+
method

a character string that defines which histogram will be created. -The method argument can be set as 'smallCellsRule', -'deterministic' or 'probabilistic'. -Default 'smallCellsRule'. +The method argument can be set as 'smallCellsRule', +'deterministic' or 'probabilistic'. +Default 'smallCellsRule'. For more information see Details.

-
k
-

the number of the nearest neighbours for which their centroid is calculated. +

k
+

the number of the nearest neighbours for which their centroid is calculated. Default k value is 3. For more information see Details.

-
noise
+
noise

the percentage of the initial variance that is used as the variance of the embedded -noise if the argument method is set to 'probabilistic'. +noise if the argument method is set to 'probabilistic'. Default noise value is 0.25. For more information see Details.

-
vertical.axis,
+
vertical.axis,

a character string that defines what is shown in the vertical axis of the plot. The vertical.axis argument can be set as 'Frequency' or 'Density'. -Default 'Frequency'. +Default 'Frequency'. For more information see Details.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

one or more histogram objects and plots depending on the argument type

- - +

one or more histogram objects and plots depending on the argument type

Details

ds.histogram function allows the user to plot distinct histograms (one for each study) or a combined histogram that merges the single plots.

-

In the argument type can be specified two types of graphics to display:

  • 'combine': a histogram that merges the single plot is displayed.

  • -
  • 'split': each histogram is plotted separately.

  • -

In the argument method can be specified 3 different histograms to be created:

  • 'smallCellsRule': the histogram of the actual variable is - created but bins with low counts are removed.

  • -
  • 'deterministic': the histogram of the scaled centroids of each +

    In the argument type can be specified two types of graphics to display:

    'combine'
    +

    : a histogram that merges the single plot is displayed.

    + +
    'split'
    +

    : each histogram is plotted separately.

    + + +

    In the argument method can be specified 3 different histograms to be created:

    'smallCellsRule'
    +

    : the histogram of the actual variable is + created but bins with low counts are removed.

    + +
    'deterministic'
    +

    : the histogram of the scaled centroids of each k nearest neighbours of the original variable - where the value of k is set by the user.

  • -
  • 'probabilistic': the histogram shows the original distribution disturbed + where the value of k is set by the user.

    + +
    'probabilistic'
    +

    : the histogram shows the original distribution disturbed by the addition of random stochastic noise. The added noise follows a normal distribution with zero mean and - variance equal to a percentage of the initial variance of the input variable. - This percentage is specified by the user in the argument noise.

  • -

In the k argument the user can choose any value for k equal + variance equal to a percentage of the initial variance of the input variable. + This percentage is specified by the user in the argument noise.

+ + + +

In the k argument the user can choose any value for k equal to or greater than the pre-specified threshold used as a disclosure control for this method and lower than the number of observations minus the value of this threshold. By default the value of k is set to be equal to 3 (we suggest k to be equal to, or bigger than, 3). Note that the function fails if the user -uses the default value but the study has set a bigger threshold. -The value of k is used only if the argument -method is set to 'deterministic'. +uses the default value but the study has set a bigger threshold. +The value of k is used only if the argument +method is set to 'deterministic'. Any value of k is ignored if the argument method is set to 'probabilistic' or 'smallCellsRule'.

-

In the noise argument the percentage of the initial variance +

In the noise argument the percentage of the initial variance that is used as the variance of the embedded -noise if the argument method is set to 'probabilistic'. -Any value of noise is ignored if the argument -method is set to 'deterministic' or 'smallCellsRule'. -The user can choose any value for noise equal to or greater +noise if the argument method is set to 'probabilistic'. +Any value of noise is ignored if the argument +method is set to 'deterministic' or 'smallCellsRule'. +The user can choose any value for noise equal to or greater than the pre-specified threshold 'nfilter.noise'. By default the value of noise is set to be equal to 0.25.

-

In the argument vertical.axis can be specified two types of histograms:

  • 'Frequency': the histogram of the frequencies - is returned.

  • -
  • 'Density': the histogram of the densities - is returned.

  • -

Server function called: histogramDS2

+

In the argument vertical.axis can be specified two types of histograms:

'Frequency'
+

: the histogram of the frequencies + is returned.

+ +
'Density'
+

: the histogram of the densities + is returned.

+ + +

Server function called: histogramDS2

Author

@@ -164,7 +180,7 @@

Author

Examples

- +
- - + + diff --git a/docs/reference/ds.igb_standards.html b/docs/reference/ds.igb_standards.html index 35b830ecf..1f04c7bbb 100644 --- a/docs/reference/ds.igb_standards.html +++ b/docs/reference/ds.igb_standards.html @@ -1,9 +1,9 @@ -Converts birth measurements to intergrowth z-scores/centiles — ds.igb_standards • dsBaseClientConverts birth measurements to intergrowth z-scores/centiles — ds.igb_standards • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,84 +44,90 @@

Converts birth measurements to intergrowth z-scores/centiles

-
ds.igb_standards(
-  gagebrth = NULL,
-  z = 0,
-  p = 50,
-  val = NULL,
-  var = NULL,
-  sex = NULL,
-  fun = "igb_value2zscore",
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.igb_standards(
+  gagebrth = NULL,
+  z = 0,
+  p = 50,
+  val = NULL,
+  var = NULL,
+  sex = NULL,
+  fun = "igb_value2zscore",
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
gagebrth
+ + +
gagebrth

the name of the "gestational age at birth in days" variable.

-
z
+
z

z-score(s) to convert (must be between 0 and 1). Default value is 0. This value is used only if fun is set to "igb_zscore2value".

-
p
-

centile(s) to convert (must be between 0 and 100). Default value is p=50. +

p
+

centile(s) to convert (must be between 0 and 100). Default value is p=50. This value is used only if fun is set to "igb_centile2value".

-
val
+
val

the name of the anthropometric variable to convert.

-
var
+
var

the name of the measurement to convert ("lencm", "wtkg", "hcircm", "wlr").

-
sex
+
sex

the name of the sex factor variable. The variable should be coded as Male/Female. -If it is coded differently (e.g. 0/1), then you can use the ds.recodeValues function to +If it is coded differently (e.g. 0/1), then you can use the ds.recodeValues function to recode the categories to Male/Female before the use of ds.igb_standards.

-
fun
+
fun

the name of the function to be used. This can be one of: "igb_centile2value", "igb_zscore2value", "igb_value2zscore" (default), "igb_value2centile".

-
newobj
+
newobj

a character string that provides the name for the output variable that is stored on the data servers. Default name is set to igb.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

assigns the converted measurement as a new object on the server-side

+

assigns the converted measurement as a new object on the server-side

Note

-

For gestational ages between 24 and 33 weeks, the INTERGROWTH very early preterm +

For gestational ages between 24 and 33 weeks, the INTERGROWTH very early preterm standard is used.

References

-

International standards for newborn weight, length, and head circumference by -gestational age and sex: the Newborn Cross-Sectional Study of the INTERGROWTH-21st Project -Villar, José et al. The Lancet, Volume 384, Issue 9946, 857-868 -INTERGROWTH-21st very preterm size at birth reference charts. Lancet 2016 -doi.org/10.1016/S0140-6736(16) 00384-6. Villar, José et al.

-
+ +
  • Villar, J., Ismail, L.C., Victora, C.G., Ohuma, E.O., Bertino, E., + Altman, D.G., Lambert, A., Papageorghiou, A.T., Carvalho, M., Jaffer, Y.A., + Gravett, M.G., Purwar, M., Frederick, I.O., Noble, A.J., Pang, R., Barros, + F.C., Chumlea, C., Bhutta, Z.A., Kennedy, S.H., 2014. International + standards for newborn weight, length, and head circumference by gestational + age and sex: the Newborn Cross-Sectional Study of the INTERGROWTH-21st + Project. The Lancet 384, 857–868. https://doi.org/10.1016/S0140-6736(14)60932-6

  • +
  • Villar, J., Giuliani, F., Fenton, T.R., Ohuma, E.O., Ismail, L.C., + Kennedy, S.H., 2016. INTERGROWTH-21st very preterm size at birth reference + charts. The Lancet 387, 844–845. https://doi.org/10.1016/S0140-6736(16)00384-6

  • +

Author

Demetris Avraam for DataSHIELD Development Team

@@ -135,19 +141,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.isNA.html b/docs/reference/ds.isNA.html index 10b1266a2..7fa98292c 100644 --- a/docs/reference/ds.isNA.html +++ b/docs/reference/ds.isNA.html @@ -1,10 +1,10 @@ -Checks if a server-side vector is empty — ds.isNA • dsBaseClientChecks if a server-side vector is empty — ds.isNA • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,26 +46,26 @@

Checks if a server-side vector is empty

-
ds.isNA(x = NULL, datasources = NULL)
+
ds.isNA(x = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of the vector to check.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.isNA returns a boolean. If it is TRUE the vector is empty +

ds.isNA returns a boolean. If it is TRUE the vector is empty (all values are NA), FALSE otherwise.

@@ -82,45 +82,45 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # check if all the observation of the variable 'LAB_HDL' are missing (NA)
-  ds.isNA(x = 'D$LAB_HDL',
-          datasources = connections) #all servers are used
-  ds.isNA(x = 'D$LAB_HDL',
-          datasources = connections[1]) #only the first server is used (study1) 
- 
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # check if all the observation of the variable 'LAB_HDL' are missing (NA)
+  ds.isNA(x = 'D$LAB_HDL',
+          datasources = connections) #all servers are used
+  ds.isNA(x = 'D$LAB_HDL',
+          datasources = connections[1]) #only the first server is used (study1) 
+ 
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+} # }
+
 
@@ -131,19 +131,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.isValid.html b/docs/reference/ds.isValid.html index 72b269eb7..3ad491a01 100644 --- a/docs/reference/ds.isValid.html +++ b/docs/reference/ds.isValid.html @@ -1,10 +1,10 @@ -Checks if a server-side object is valid — ds.isValid • dsBaseClientChecks if a server-side object is valid — ds.isValid • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,26 +46,26 @@

Checks if a server-side object is valid

-
ds.isValid(x = NULL, datasources = NULL)
+
ds.isValid(x = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of a vector, dataframe or matrix.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.isValid returns a boolean. If it is TRUE input object is valid, FALSE otherwise.

+

ds.isValid returns a boolean. If it is TRUE input object is valid, FALSE otherwise.

Details

@@ -83,44 +83,44 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Check if the dataframe assigned above is valid
-  ds.isValid(x = 'D',
-             datasources = connections) #all servers are used
-  ds.isValid(x = 'D',
-             datasources = connections[2]) #only the second server is used (study2)
- 
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Check if the dataframe assigned above is valid
+  ds.isValid(x = 'D',
+             datasources = connections) #all servers are used
+  ds.isValid(x = 'D',
+             datasources = connections[2]) #only the second server is used (study2)
+ 
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+} # }
+
 
@@ -131,19 +131,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.kurtosis.html b/docs/reference/ds.kurtosis.html index 347a29c9a..8c2012bea 100644 --- a/docs/reference/ds.kurtosis.html +++ b/docs/reference/ds.kurtosis.html @@ -1,9 +1,9 @@ -Calculates the kurtosis of a numeric variable — ds.kurtosis • dsBaseClientCalculates the kurtosis of a numeric variable — ds.kurtosis • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,44 +44,44 @@

Calculates the kurtosis of a numeric variable

-
ds.kurtosis(x = NULL, method = 1, type = "both", datasources = NULL)
+
ds.kurtosis(x = NULL, method = 1, type = "both", datasources = NULL)

Arguments

-
x
+ + +
x

a string character, the name of a numeric variable.

-
method
+
method

an integer between 1 and 3 selecting one of the algorithms for computing kurtosis detailed below. The default value is set to 1.

-
type
-

a character which represents the type of analysis to carry out. -If type is set to 'combine', 'combined', 'combines' or 'c', the global kurtosis is returned +

type
+

a character which represents the type of analysis to carry out. +If type is set to 'combine', 'combined', 'combines' or 'c', the global kurtosis is returned if type is set to 'split', 'splits' or 's', the kurtosis is returned separately for each study. if type is set to 'both' or 'b', both sets of outputs are produced. The default value is set to 'both'.

-
datasources
-

a list of DSConnection-class objects obtained after login. -If the datasources argument is not specified +

datasources
+

a list of DSConnection-class objects obtained after login. +If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

a matrix showing the kurtosis of the input numeric variable, the number of valid observations and +

a matrix showing the kurtosis of the input numeric variable, the number of valid observations and the validity message.

Details

-

The function calculates the kurtosis of an input variable x with three different methods. +

The function calculates the kurtosis of an input variable x with three different methods. The method is specified by the argument method. If x contains any missings, the function removes those before the calculation of the kurtosis. If method is set to 1 the following formula is used \( kurtosis= \frac{\sum_{i=1}^{N} (x_i - \bar(x))^4 /N}{(\sum_{i=1}^{N} ((x_i - \bar(x))^2) /N)^(2) } - 3\), @@ -103,19 +103,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.length.html b/docs/reference/ds.length.html index bea301fca..f71fef07e 100644 --- a/docs/reference/ds.length.html +++ b/docs/reference/ds.length.html @@ -1,11 +1,11 @@ -Gets the length of an object in the server-side — ds.length • dsBaseClientGets the length of an object in the server-side — ds.length • dsBaseClient - +
@@ -31,60 +31,60 @@
- +

This function gets the length of a vector -or list that is stored on the server-side. +or list that is stored on the server-side. This function is similar to the R function length.

-
ds.length(x = NULL, type = "both", checks = "FALSE", datasources = NULL)
+
ds.length(x = NULL, type = "both", checks = "FALSE", datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of a vector or list.

-
type
+
type

a character that represents the type of analysis to carry out. If type is set to 'combine', 'combined', 'combines' or 'c', a global length is returned -if type is set to 'split', 'splits' or 's', +if type is set to 'split', 'splits' or 's', the length is returned separately for each study. -if type is set to 'both' or 'b', -both sets of outputs are produced. +if type is set to 'both' or 'b', +both sets of outputs are produced. Default 'both'.

-
checks
-

logical. If TRUE the model components are checked. +

checks
+

logical. If TRUE the model components are checked. Default FALSE to save time. It is suggested that checks should only be undertaken once the function call has failed.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.length returns to the client-side the pooled length of a vector or a list, +

ds.length returns to the client-side the pooled length of a vector or a list, or the length of a vector or a list for each study separately.

@@ -98,55 +98,55 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Example 1: Get the total number of observations of the vector of
-  # variable 'LAB_TSC' across all the studies
-  ds.length(x = 'D$LAB_TSC', 
-            type = 'combine',
-            datasources = connections)
-
-  # Example 2: Get the number of observations of the vector of variable
-  # 'LAB_TSC' for each study separately
-  ds.length(x = 'D$LAB_TSC',
-            type = 'split',
-            datasources = connections)
-
-  # Example 3: Get the number of observations on each study and the total
-  # number of observations across all the studies for the variable 'LAB_TSC'
-  ds.length(x = 'D$LAB_TSC',
-            type = 'both',
-            datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Example 1: Get the total number of observations of the vector of
+  # variable 'LAB_TSC' across all the studies
+  ds.length(x = 'D$LAB_TSC', 
+            type = 'combine',
+            datasources = connections)
+
+  # Example 2: Get the number of observations of the vector of variable
+  # 'LAB_TSC' for each study separately
+  ds.length(x = 'D$LAB_TSC',
+            type = 'split',
+            datasources = connections)
+
+  # Example 3: Get the number of observations on each study and the total
+  # number of observations across all the studies for the variable 'LAB_TSC'
+  ds.length(x = 'D$LAB_TSC',
+            type = 'both',
+            datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -157,19 +157,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.levels.html b/docs/reference/ds.levels.html index 34ed926f3..c2f42b3c1 100644 --- a/docs/reference/ds.levels.html +++ b/docs/reference/ds.levels.html @@ -1,11 +1,11 @@ -Produces levels attributes of a server-side factor — ds.levels • dsBaseClientProduces levels attributes of a server-side factor — ds.levels • dsBaseClient - +
@@ -31,43 +31,43 @@
- +

This function provides access to the level attribute of -a factor variable stored on the server-side. +a factor variable stored on the server-side. This function is similar to R function levels.

-
ds.levels(x = NULL, datasources = NULL)
+
ds.levels(x = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of a factor variable.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.levels returns to the client-side the levels of a factor +

ds.levels returns to the client-side the levels of a factor class variable stored in the server-side.

@@ -81,48 +81,48 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Example 1: Get the levels of the PM_BMI_CATEGORICAL variable
-  ds.levels(x = 'D$PM_BMI_CATEGORICAL',
-            datasources = connections)#all servers are used
-  ds.levels(x = 'D$PM_BMI_CATEGORICAL',
-            datasources = connections[2])#only the second server is used (study2)
-
-  # Example 2: Get the levels of the LAB_TSC variable
-  # This example should not work because LAB_TSC is a continuous variable
-  ds.levels(x = 'D$LAB_TSC',
-            datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-}
-
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Example 1: Get the levels of the PM_BMI_CATEGORICAL variable
+  ds.levels(x = 'D$PM_BMI_CATEGORICAL',
+            datasources = connections)#all servers are used
+  ds.levels(x = 'D$PM_BMI_CATEGORICAL',
+            datasources = connections[2])#only the second server is used (study2)
+
+  # Example 2: Get the levels of the LAB_TSC variable
+  # This example should not work because LAB_TSC is a continuous variable
+  ds.levels(x = 'D$LAB_TSC',
+            datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+} # }
+
 
@@ -133,19 +133,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.lexis.html b/docs/reference/ds.lexis.html index f9eb977a5..2f5e87db8 100644 --- a/docs/reference/ds.lexis.html +++ b/docs/reference/ds.lexis.html @@ -1,11 +1,11 @@ -Represents follow-up in multiple states on multiple time scales — ds.lexis • dsBaseClientRepresents follow-up in multiple states on multiple time scales — ds.lexis • dsBaseClient - +
@@ -31,13 +31,13 @@
- +
@@ -48,72 +48,72 @@

Represents follow-up in multiple states on multiple time scales

-
ds.lexis(
-  data = NULL,
-  intervalWidth = NULL,
-  idCol = NULL,
-  entryCol = NULL,
-  exitCol = NULL,
-  statusCol = NULL,
-  variables = NULL,
-  expandDF = NULL,
-  datasources = NULL
-)
+
ds.lexis(
+  data = NULL,
+  intervalWidth = NULL,
+  idCol = NULL,
+  entryCol = NULL,
+  exitCol = NULL,
+  statusCol = NULL,
+  variables = NULL,
+  expandDF = NULL,
+  datasources = NULL
+)

Arguments

-
data
+ + +
data

a character string specifying the name of a data frame containing the survival data to be expanded.

-
intervalWidth
+
intervalWidth

a numeric vector specifying the length of each interval. For more information see Details.

-
idCol
+
idCol

a character string denoting the column name that holds the individual IDs of the subjects. For more information see Details.

-
entryCol
+
entryCol

a character string denoting the column name that holds the entry times (i.e. start of follow up). For more information see Details.

-
exitCol
+
exitCol

a character string denoting the column name that holds the exit times (i.e. end of follow up). For more information see Details.

-
statusCol
+
statusCol

a character string denoting the column name that holds the failure/censoring status of each subject. For more information see Details.

-
variables
+
variables

a vector of character strings denoting the column names of additional variables to include in the final expanded table. For more information see Details.

-
expandDF
+
expandDF

a character string denoting the name of the new data frame containing the expanded data set. Default lexis.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. -If the datasources argument is not specified +

datasources
+

a list of DSConnection-class objects obtained after login. +If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.lexis returns to the server-side a data frame for each study with +

ds.lexis returns to the server-side a data frame for each study with the expanded version of the input table.

@@ -124,10 +124,10 @@

Details

interval in which a given individual is followed - regardless of how short or long that period may be. Each row includes:
(1) CENSOR: a variable indicating failure status for a particular -interval in that interval also known as censoring status. +interval in that interval also known as censoring status. This variable can take two values: 1 representing that the patient has died, relapsed or developed a -disease. 0 representing the lost-to-follow-up +disease. 0 representing the lost-to-follow-up or passed right through the interval without failing.
(2) SURVTIME an exposure-time variable indicating the duration of exposure-to-risk-of-failure the corresponding individual experienced in that interval before he/she failed or was censored.

@@ -140,11 +140,11 @@

Details

failure-status variable will take the value 0 in both intervals.
(3) UID.expanded the expanded data set also includes a unique ID in a form such as 77.13 which identifies that row of the -dataset as relating to the 77th individual in the input data set and his/her experience +dataset as relating to the 77th individual in the input data set and his/her experience (exposure-time and failure status)in the -14th interval. Note that .N indicates the (N+1)th interval because +14th interval. Note that .N indicates the (N+1)th interval because interval 1 has no suffix.
-(4) IDSEQ the first part of UID.expanded (before the '.'). +(4) IDSEQ the first part of UID.expanded (before the '.'). The value of this variable is repeated in every row to which the corresponding individual contributes data (i.e. to every row corresponding to an interval in which that individual was followed).
@@ -154,7 +154,7 @@

Details

the sum of the exposure times across all intervals) and the ultimate failure-status in the final interval to which they were exposed. The value of each of these variables is also repeated in every row corresponding to an interval in which that individual was followed.

-

In intervalWidth argument if the total sum of the duration across all intervals is less +

In intervalWidth argument if the total sum of the duration across all intervals is less than the maximum follow-up of any individual in any contributing study, a final interval will be added by ds.lexis extending from the end of the last interval specified to the maximum follow-up time. If a single numeric value is specified @@ -162,39 +162,39 @@

Details

maximum follow-up time in any single study is exceeded. This argument is subject to disclosure checks.

The idCol argument must be a numeric or character. Note that when a particular variable is -identified as being the main ID to the data repository when the data are first transferred +identified as being the main ID to the data repository when the data are first transferred to the data repository (i.e. before DataSHIELD is used), that ID often ends up being of class character and will then be sorted in -alphabetic order (treating each digit as a character) rather than numeric. +alphabetic order (treating each digit as a character) rather than numeric. For example, containing the sequential IDs 1-1000, the order of the IDs will be:
1,10,100,101,102,103,104,105,106,107,108,109,11 ...
In an alphabetic listing: NOT to the expected order:
1,2,3,4,5,6,7,8,9,10,11,12,13 ...

This alphabetic order or the ID listing will then carry forward to the -expanded dataset. But the nature and order of the original ID +expanded dataset. But the nature and order of the original ID variable held in idCol doesn't -matter to ds.lexis. Provided every individual appears only once -in the original data set (before expansion) the order does not matter because +matter to ds.lexis. Provided every individual appears only once +in the original data set (before expansion) the order does not matter because ds.lexis works on its unique numeric vector -that is allocated from 1:M (where there are M individuals) +that is allocated from 1:M (where there are M individuals) in whatever order they appear in the original dataset.

in entryCol argument rather than using a total survival time variable to identify the -intervals to which any given individual is exposed, ds.lexis -requires an initial entry time and a final exit time. If the data you wish to expand +intervals to which any given individual is exposed, ds.lexis +requires an initial entry time and a final exit time. If the data you wish to expand contain only a total survival time variable and every individual starts follow-up at time 0, the entry times should all -be specified as zero, and the exit times as the total survival time. -So, entryCol should either be the name of the column +be specified as zero, and the exit times as the total survival time. +So, entryCol should either be the name of the column holding the entry time of each individual or else if no entryCol is -specified it will be defaulted to zero anyway and put into a variable +specified it will be defaulted to zero anyway and put into a variable called starttime in the expanded data set.

-

In exitCol argument, if the entry times (entryCol) are set, +

In exitCol argument, if the entry times (entryCol) are set, or defaulted, to zero, the exitCol variable should contain the total survival times.

If variables argument is not set (is -null) but the data argument is set, the expanded data -set will contain all variables in the data frame identified by the data argument. -If neither the data or -variables arguments are set, the expanded data set will only include the ID, +null) but the data argument is set, the expanded data +set will contain all variables in the data frame identified by the data argument. +If neither the data or +variables arguments are set, the expanded data set will only include the ID, exposure time and failure/censoring status variables which may still be useful for plotting survival data once these become available.

This function is particularly meant to be used in preparing data for a piecewise @@ -207,10 +207,10 @@

Details

number of intervals will inevitably improve the approximation to the true baseline hazard, but the addition of many more unnecessary time intervals slows the analysis and can become disclosive and yet will not improve the fit of the model.

-

If the number of failures in one or more +

If the number of failures in one or more periods in a given study is less than the specified disclosure filter determining minimum -acceptable cell size in a table (nfilter.tab) -then the expanded data frame is not created in that study, and a study-side message +acceptable cell size in a table (nfilter.tab) +then the expanded data frame is not created in that study, and a study-side message to this effect is made available in that study via ds.message() function.

Server functions called: lexisDS1, lexisDS2 and lexisDS3

@@ -226,68 +226,68 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see Wiki
-  # Connecting to the Opal servers
-  
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-  
-  # Example 1: Fitting GLM for survival analysis
-  # For this analysis we need to load survival data from the server 
-  
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Example 1: Create the expanded data frame. 
-  #The survival time intervals are to be 0<t<=2.5; 2.5<t<=5.0, 5.0<t<=7.5, 
-  #up to the final interval of duration 2.5
-  #that includes the maximum survival time. 
-
-  ds.lexis(data = "D", 
-           intervalWidth = 2.5,
-           idCol = "D$id",
-           entryCol = "D$starttime",
-           exitCol = "D$endtime",
-           statusCol = "D$cens",
-           expandDF = "EM.new",
-           datasources = connections)
-           
-  #Confirm that the expanded data frame has been ceated
-  ds.ls(datasources = connections) 
-  #Example 2: Create the expanded data frame. 
-  #The survival time intervals are to be 0<t<=1; 1<t<=2.0, 2.0<t<=5.0, 5.0<t<=11.0,
-  
-  ds.lexis(data = "D",
-           intervalWidth = c(1,1,3,6), 
-           idCol = "D$id",
-           entryCol = "D$starttime", 
-           exitCol = "D$endtime", 
-           statusCol = "D$cens",
-           expandDF = "EM.new2",
-           datasources = connections)
-           
-  #Confirm expanded dataframe created
-  ds.ls(datasources = connections) 
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see Wiki
+  # Connecting to the Opal servers
+  
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+  
+  # Example 1: Fitting GLM for survival analysis
+  # For this analysis we need to load survival data from the server 
+  
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Example 1: Create the expanded data frame. 
+  #The survival time intervals are to be 0<t<=2.5; 2.5<t<=5.0, 5.0<t<=7.5, 
+  #up to the final interval of duration 2.5
+  #that includes the maximum survival time. 
+
+  ds.lexis(data = "D", 
+           intervalWidth = 2.5,
+           idCol = "D$id",
+           entryCol = "D$starttime",
+           exitCol = "D$endtime",
+           statusCol = "D$cens",
+           expandDF = "EM.new",
+           datasources = connections)
+           
+  #Confirm that the expanded data frame has been ceated
+  ds.ls(datasources = connections) 
+  #Example 2: Create the expanded data frame. 
+  #The survival time intervals are to be 0<t<=1; 1<t<=2.0, 2.0<t<=5.0, 5.0<t<=11.0,
+  
+  ds.lexis(data = "D",
+           intervalWidth = c(1,1,3,6), 
+           idCol = "D$id",
+           entryCol = "D$starttime", 
+           exitCol = "D$endtime", 
+           statusCol = "D$cens",
+           expandDF = "EM.new2",
+           datasources = connections)
+           
+  #Confirm expanded dataframe created
+  ds.ls(datasources = connections) 
+} # }
+
 
@@ -298,19 +298,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.list.html b/docs/reference/ds.list.html index a1cd9ae69..b6acfeb3c 100644 --- a/docs/reference/ds.list.html +++ b/docs/reference/ds.list.html @@ -1,9 +1,9 @@ -Constructs a list of objects in the server-side — ds.list • dsBaseClientConstructs a list of objects in the server-side — ds.list • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,31 +44,31 @@

Constructs a list of objects in the server-side

-
ds.list(x = NULL, newobj = NULL, datasources = NULL)
+
ds.list(x = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the names of the objects to coerce into a list.

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default list.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.list returns a list of objects for each study that is stored on the server-side.

+

ds.list returns a list of objects for each study that is stored on the server-side.

Details

@@ -83,42 +83,42 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
- # combine the 'LAB_TSC' and 'LAB_HDL' variables into a list
- myobjects <- c('D$LAB_TSC', 'D$LAB_HDL')
- ds.list(x = myobjects,
-         newobj = "new.list",
-         datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+ # combine the 'LAB_TSC' and 'LAB_HDL' variables into a list
+ myobjects <- c('D$LAB_TSC', 'D$LAB_HDL')
+ ds.list(x = myobjects,
+         newobj = "new.list",
+         datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -129,19 +129,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.listClientsideFunctions.html b/docs/reference/ds.listClientsideFunctions.html index 4c7fc9dde..46871aa7d 100644 --- a/docs/reference/ds.listClientsideFunctions.html +++ b/docs/reference/ds.listClientsideFunctions.html @@ -1,9 +1,9 @@ -Lists client-side functions — ds.listClientsideFunctions • dsBaseClientLists client-side functions — ds.listClientsideFunctions • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,14 +44,12 @@

Lists client-side functions

-
ds.listClientsideFunctions()
+
ds.listClientsideFunctions()

Value

- - -

ds.listClientsideFunctions returns a list containing +

ds.listClientsideFunctions returns a list containing all server-side functions.

@@ -68,16 +66,16 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  #Library with all DataSHIELD functions
-  require('dsBaseClient')
-  
-  #Visualise all functions
-  ds.listClientsideFunctions()
-  
-}   
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  #Library with all DataSHIELD functions
+  require('dsBaseClient')
+  
+  #Visualise all functions
+  ds.listClientsideFunctions()
+  
+} # }   
 
@@ -88,19 +86,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.listDisclosureSettings.html b/docs/reference/ds.listDisclosureSettings.html index 6b9f4364c..205f07cfa 100644 --- a/docs/reference/ds.listDisclosureSettings.html +++ b/docs/reference/ds.listDisclosureSettings.html @@ -1,9 +1,9 @@ -Lists disclosure settings — ds.listDisclosureSettings • dsBaseClientLists disclosure settings — ds.listDisclosureSettings • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,22 +44,22 @@

Lists disclosure settings

-
ds.listDisclosureSettings(datasources = NULL)
+
ds.listDisclosureSettings(datasources = NULL)

Arguments

-
datasources
-

a list of DSConnection-class objects obtained after login. + + +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.listDisclosureSettings returns a list containing the current settings of the +

ds.listDisclosureSettings returns a list containing the current settings of the nfilters in each study specified.

@@ -113,45 +113,45 @@

Author

Examples

-
if (FALSE) {
- 
-  ## Version 6, for version 5 see Wiki
-  # Connecting to the Opal servers
-  
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
-  
-  # Call to list current disclosure settings in all data repository servers 
-  
-  ds.listDisclosureSettings(datasources = connections)
-  
-  # Restrict call to list disclosure settings only to the first, or second DS connection (study)
-  
-  ds.listDisclosureSettings(datasources = connections[1]) 
-  ds.listDisclosureSettings(datasources = connections[2])
-            
-   # Clear the Datashield R sessions and logout  
-   datashield.logout(connections) 
-}
-
+    
if (FALSE) { # \dontrun{
+ 
+  ## Version 6, for version 5 see Wiki
+  # Connecting to the Opal servers
+  
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
+  
+  # Call to list current disclosure settings in all data repository servers 
+  
+  ds.listDisclosureSettings(datasources = connections)
+  
+  # Restrict call to list disclosure settings only to the first, or second DS connection (study)
+  
+  ds.listDisclosureSettings(datasources = connections[1]) 
+  ds.listDisclosureSettings(datasources = connections[2])
+            
+   # Clear the Datashield R sessions and logout  
+   datashield.logout(connections) 
+} # }
+
 
@@ -162,19 +162,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.listOpals.html b/docs/reference/ds.listOpals.html index c8fc2ecb2..977c9afc3 100644 --- a/docs/reference/ds.listOpals.html +++ b/docs/reference/ds.listOpals.html @@ -1,10 +1,10 @@ -Lists all Opal objects in the analytic environment — ds.listOpals • dsBaseClientLists all Opal objects in the analytic environment — ds.listOpals • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,14 +46,12 @@

Lists all Opal objects in the analytic environment

-
ds.listOpals()
+
ds.listOpals()

Value

- - -

Lists all of the sets of Opals currently found in the analytic environment and advises +

Lists all of the sets of Opals currently found in the analytic environment and advises the user how best to respond depending whether there are zero, one or multiple Opals detected.

@@ -82,19 +80,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.listServersideFunctions.html b/docs/reference/ds.listServersideFunctions.html index 206d7c6d6..9dd4dc556 100644 --- a/docs/reference/ds.listServersideFunctions.html +++ b/docs/reference/ds.listServersideFunctions.html @@ -1,9 +1,9 @@ -Lists server-side functions — ds.listServersideFunctions • dsBaseClientLists server-side functions — ds.listServersideFunctions • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,30 +44,30 @@

Lists server-side functions

-
ds.listServersideFunctions(datasources = NULL)
+
ds.listServersideFunctions(datasources = NULL)

Arguments

-
datasources
-

a list of DSConnection-class objects obtained after login. + + +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.listServersideFunctions returns to the client-side -a list containing all server-side functions separately for each study. +

ds.listServersideFunctions returns to the client-side +a list containing all server-side functions separately for each study. Firstly lists assign and then aggregate functions.

Details

Uses datashield.methods function from DSI package to list all assign and aggregate functions on the available data repository servers. -The only choice of arguments is in datasources; i.e. which studies to interrogate. +The only choice of arguments is in datasources; i.e. which studies to interrogate. Once the studies have been selected ds.listServersideFunctions lists all assign functions for all of these studies and then all aggregate functions for all of them.

@@ -80,40 +80,40 @@

Author

Examples

-
if (FALSE) {
- 
-  ## Version 6, for version 5 see Wiki
-  # Connecting to the Opal servers
-  
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
-  
-  # List server-side functions
-  
-  ds.listServersideFunctions(datasources = connections)
-            
-  # Clear the Datashield R sessions and logout  
-  datashield.logout(connections) 
-}
+    
if (FALSE) { # \dontrun{
+ 
+  ## Version 6, for version 5 see Wiki
+  # Connecting to the Opal servers
+  
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
+  
+  # List server-side functions
+  
+  ds.listServersideFunctions(datasources = connections)
+            
+  # Clear the Datashield R sessions and logout  
+  datashield.logout(connections) 
+} # }
 
@@ -124,19 +124,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.lmerSLMA.html b/docs/reference/ds.lmerSLMA.html index c1467e253..acc1a8257 100644 --- a/docs/reference/ds.lmerSLMA.html +++ b/docs/reference/ds.lmerSLMA.html @@ -1,11 +1,11 @@ -Fits Linear Mixed-Effect model via Study-Level Meta-Analysis — ds.lmerSLMA • dsBaseClientFits Linear Mixed-Effect model via Study-Level Meta-Analysis — ds.lmerSLMA • dsBaseClient - +
@@ -31,13 +31,13 @@
- +
@@ -48,196 +48,169 @@

Fits Linear Mixed-Effect model via Study-Level Meta-Analysis

-
ds.lmerSLMA(
-  formula = NULL,
-  offset = NULL,
-  weights = NULL,
-  combine.with.metafor = TRUE,
-  dataName = NULL,
-  checks = FALSE,
-  datasources = NULL,
-  REML = TRUE,
-  control_type = NULL,
-  control_value = NULL,
-  optimizer = NULL,
-  verbose = 0,
-  notify.of.progress = FALSE,
-  assign = FALSE,
-  newobj = NULL
-)
+
ds.lmerSLMA(
+  formula = NULL,
+  offset = NULL,
+  weights = NULL,
+  combine.with.metafor = TRUE,
+  dataName = NULL,
+  checks = FALSE,
+  datasources = NULL,
+  REML = TRUE,
+  control_type = NULL,
+  control_value = NULL,
+  optimizer = NULL,
+  verbose = 0,
+  notify.of.progress = FALSE,
+  assign = FALSE,
+  newobj = NULL
+)

Arguments

-
formula
-

an object of class formula describing the model to be fitted. + + +

formula
+

an object of class formula describing the model to be fitted. For more information see Details.

-
offset
+
offset

a character string specifying the name of a variable to be used as an offset.

-
weights
+
weights

a character string specifying the name of a variable containing prior regression weights for the fitting process.

-
combine.with.metafor
+
combine.with.metafor

logical. If TRUE the estimates and standard errors for each regression coefficient are pooled across studies using random-effects meta-analysis under maximum likelihood (ML), restricted maximum likelihood (REML) or fixed-effects meta-analysis (FE). Default TRUE.

-
dataName
+
dataName

a character string specifying the name of an (optional) data frame -that contains all of the variables in the LME formula. +that contains all of the variables in the LME formula. For more information see Details.

-
checks
-

logical. If TRUE ds.lmerSLMA checks the structural integrity +

checks
+

logical. If TRUE ds.lmerSLMA checks the structural integrity of the model. Default FALSE. For more information see Details.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

-
REML
+
REML

logical. If TRUE the REstricted Maximum Likelihood (REML) -is used for parameter optimization. -If FALSE the parameters are optimized using standard ML (maximum likelihood). Default TRUE. +is used for parameter optimization. +If FALSE the parameters are optimized using standard ML (maximum likelihood). Default TRUE. For more information see Details.

-
control_type
+
control_type

an optional character string vector specifying the nature of a parameter (or parameters) to be modified in the convergence control options which can be viewed or -modified via the lmerControl function of the package lme4. +modified via the lmerControl function of the package lme4. For more information see Details.

-
control_value
+
control_value

numeric representing the new value which you want to allocate the -control parameter corresponding to the control-type. +control parameter corresponding to the control-type. For more information see Details.

-
optimizer
+
optimizer

specifies the parameter optimizer that lmer should use. For more information see Details.

-
verbose
+
verbose

an integer value. If \(verbose > 0\) the output is generated during the optimization of -the parameter estimates. If \(verbose > 1\) the output is generated during the individual penalized -iteratively reweighted least squares (PIRLS) steps. Default verbose +the parameter estimates. If \(verbose > 1\) the output is generated during the individual penalized +iteratively reweighted least squares (PIRLS) steps. Default verbose value is 0 which means no additional output.

-
notify.of.progress
+
notify.of.progress

specifies if console output should be produced to indicate progress. Default FALSE.

-
assign
+
assign

a logical, indicates whether the function will call a second server-side function (an assign) in order to save the regression outcomes (i.e. a lmerMod object) on each server. Default FALSE.

-
newobj
+
newobj

a character string specifying the name of the object to which the lmerMod object -representing the model fit on the serverside in each study is to be written. This argument is +representing the model fit on the serverside in each study is to be written. This argument is used only when the argument assign is set to TRUE. If no <newobj> argument is specified, the output object defaults to "new.lmer.obj".

Value

- - -

Many of the elements of the output list returned by ds.lmerSLMA are +

Many of the elements of the output list returned by ds.lmerSLMA are equivalent to those returned by the lmer() function in native R. However, potentially disclosive elements -such as individual-level residuals and linear predictor values are blocked. +such as individual-level residuals and linear predictor values are blocked. In this case, only non-disclosive elements are returned from each study separately.

- -

The list of elements returned by ds.lmerSLMA is mentioned below:

- - -

ds.lmerSLMA returns a list of elements mentioned +

ds.lmerSLMA returns a list of elements mentioned below separately for each study.

+

coefficients: a matrix with 5 columns:

First
+

: the names of all of the regression parameters (coefficients) in the model

+
second
+

: the estimated values

-

coefficients: a matrix with 5 columns:

-

-
  • First: the names of all of the regression parameters (coefficients) in the model

  • -
  • second: the estimated values

  • -
  • third: corresponding standard errors of the estimated values

  • -
  • fourth: the ratio of estimate/standard error

  • -
  • fifth: the p-value treating that as a standardised normal deviate

  • -

CorrMatrix: the correlation matrix of parameter estimates.

+
third
+

: corresponding standard errors of the estimated values

+
fourth
+

: the ratio of estimate/standard error

-

VarCovMatrix: the variance-covariance matrix of parameter estimates.

+
fifth
+

: the p-value treating that as a standardised normal deviate

+

CorrMatrix: the correlation matrix of parameter estimates.

+

VarCovMatrix: the variance-covariance matrix of parameter estimates.

weights: the vector (if any) holding regression weights.

- -

offset: the vector (if any) holding an offset.

- -

cov.scaled: equivalent to VarCovMatrix.

- -

Nmissing: the number of missing observations in the given study.

- -

Nvalid: the number of valid (non-missing) observations in the given study.

- - -

Ntotal: the total number of observations +

Ntotal: the total number of observations in the given study (Nvalid + Nmissing).

- -

data: equivalent to input parameter dataName (above).

- -

call: summary of key elements of the call to fit the model.

- -

There are a small number of more esoteric items of the information returned by ds.lmerSLMA. Additional information about these can be found in the help file for the lmer() function in the lme4 package.

- -

Once the study-specific output has been returned, the function returns several elements relating to the pooling of estimates across studies via study-level meta-analysis. These are as follows:

- -

input.beta.matrix.for.SLMA: a matrix containing the vector of coefficient estimates from each study.

- -

input.se.matrix.for.SLMA: a matrix containing the vector of standard error estimates for coefficients from each study.

- -

SLMA.pooled.estimates: a matrix containing pooled estimates for each regression coefficient across all studies with pooling under SLMA via random-effects meta-analysis under maximum likelihood (ML), restricted maximum likelihood (REML) or via fixed-effects meta-analysis (FE).

- -

convergence.error.message: reports for each study whether the model converged. If it did not some information about the reason for this is reported.

@@ -249,34 +222,34 @@

Details

When there are multiple data sources, the LME is fitted to convergence in each data source independently. The estimates and standard errors returned to the client-side which enable cross-study pooling -using Study-Level Meta-Analysis (SLMA). The SLMA used by default metafor package - but as the SLMA occurs on the client-side (a standard R environment), the user can choose - any approach to meta-analysis. Additional information about fitting -LMEs using the lmer function can be +using Study-Level Meta-Analysis (SLMA). The SLMA used by default metafor package + but as the SLMA occurs on the client-side (a standard R environment), the user can choose + any approach to meta-analysis. Additional information about fitting +LMEs using the lmer function can be obtained using R help for lmer and the lme4 package.

In formula most shortcut notation allowed by lmer() function is also allowed by ds.lmerSLMA. Many LMEs can be fitted very simply using a formula like: -$$y~a+b+(1|c)$$ +\(y ~ a + b + (1 | c)\) which simply means fit an LME with y as the outcome variable with a and b as fixed effects, and c as a random effect or grouping factor.

-

It is also possible to fit models with random slopes by specifying a model such as -$$y~a+b+(1+b|c)$$ +

It is also possible to fit models with random slopes by specifying a model such as +\(y ~ a + b + (1 + b | c)\) where the effect of b can vary randomly between groups defined by c. -Implicit nesting can be specified with formulae such as \(y~a+b+(1|c/d)\) -or \(y~a+b+(1|c)+(1|c:d)\).

+Implicit nesting can be specified with formulae such as \(y ~ a + b + (1 | c / d)\) +or \(y ~ a + b + (1 | c) + (1 | c : d)\).

The dataName argument avoids you having to specify the name of the -data frame in front of each covariate in the formula. -For example, if the data frame is called DataFrame you avoid having to write: -\(DataFrame$y~DataFrame$a+DataFrame$b+(1|DataFrame$c)\).

-

The checks argument verifies that the variables in the model are all defined (exist) +data frame in front of each covariate in the formula. +For example, if the data frame is called DataFrame you avoid having to write: +\(DataFrame\$y ~ DataFrame\$a + DataFrame\$b + (1 | DataFrame\$c)\).

+

The checks argument verifies that the variables in the model are all defined (exist) on the server-site at every study -and that they have the correct characteristics required to fit the model. +and that they have the correct characteristics required to fit the model. It is suggested to make checks argument TRUE if an unexplained problem in the model fit is encountered because the running process takes several minutes.

REML can help to mitigate bias associated with the fixed-effects. See help on the lmer() function for more details.

In control_type at present only one such parameter can be modified, -namely the tolerance of the convergence criterion to the gradient of the log-likelihood +namely the tolerance of the convergence criterion to the gradient of the log-likelihood at the maximum likelihood achieved. We have enabled this because our practical experience suggests that in situations where the model looks to have converged with sensible parameter values but formal convergence is not being declared if we allow the model to be more @@ -306,45 +279,45 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see Wiki
-  # Connecting to the Opal servers
-  
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-  
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CLUSTER.CLUSTER_SLO1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CLUSTER.CLUSTER_SLO2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CLUSTER.CLUSTER_SLO3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-   #Log onto the remote Opal training servers
-   connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
-               
-  # Select all rows without missing values
-  ds.completeCases(x1 = "D", newobj = "D.comp", datasources = connections)
- 
-  # Fit the lmer
-  
-  ds.lmerSLMA(formula = "BMI ~  incid_rate + diabetes + (1 | Male)",
-               dataName = "D.comp",
-               datasources = connections)
-  
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-  }
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see Wiki
+  # Connecting to the Opal servers
+  
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+  
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CLUSTER.CLUSTER_SLO1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CLUSTER.CLUSTER_SLO2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CLUSTER.CLUSTER_SLO3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+   #Log onto the remote Opal training servers
+   connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
+               
+  # Select all rows without missing values
+  ds.completeCases(x1 = "D", newobj = "D.comp", datasources = connections)
+ 
+  # Fit the lmer
+  
+  ds.lmerSLMA(formula = "BMI ~ incid_rate + diabetes + (1 | Male)",
+               dataName = "D.comp",
+               datasources = connections)
+  
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+  } # }
 
@@ -355,19 +328,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.log.html b/docs/reference/ds.log.html index 3156dd336..2380a59fe 100644 --- a/docs/reference/ds.log.html +++ b/docs/reference/ds.log.html @@ -1,10 +1,10 @@ -Computes logarithms in the server-side — ds.log • dsBaseClientComputes logarithms in the server-side — ds.log • dsBaseClient - +
@@ -30,52 +30,52 @@
- +
-

Computes the logarithms for a specified numeric vector. +

Computes the logarithms for a specified numeric vector. This function is similar to the R log function. by default natural logarithms.

-
ds.log(x = NULL, base = exp(1), newobj = NULL, datasources = NULL)
+
ds.log(x = NULL, base = exp(1), newobj = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string providing the name of a numerical vector.

-
base
+
base

a positive number, the base for which logarithms are computed. Default exp(1).

-
newobj
+
newobj

a character string that provides the name for the output variable that is stored on the server-side. Default log.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.log returns a vector for each study of the transformed values for the numeric vector +

ds.log returns a vector for each study of the transformed values for the numeric vector specified in the argument x. The created vectors are stored in the server-side.

@@ -89,45 +89,45 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki 
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-                 
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Calculating the log value of the 'PM_BMI_CONTINUOUS' variable
-  
-  ds.log(x = "D$PM_BMI_CONTINUOUS",
-         base = exp(2),
-         newobj = "log.PM_BMI_CONTINUOUS",
-         datasources = connections[1]) #only the first Opal server is used (study1)
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki 
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+                 
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Calculating the log value of the 'PM_BMI_CONTINUOUS' variable
+  
+  ds.log(x = "D$PM_BMI_CONTINUOUS",
+         base = exp(2),
+         newobj = "log.PM_BMI_CONTINUOUS",
+         datasources = connections[1]) #only the first Opal server is used (study1)
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
+
 
@@ -138,19 +138,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.look.html b/docs/reference/ds.look.html index 2b6f0ce01..398e30953 100644 --- a/docs/reference/ds.look.html +++ b/docs/reference/ds.look.html @@ -1,10 +1,10 @@ -Performs direct call to a server-side aggregate function — ds.look • dsBaseClientPerforms direct call to a server-side aggregate function — ds.look • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,32 +46,32 @@

Performs direct call to a server-side aggregate function

-
ds.look(toAggregate = NULL, checks = FALSE, datasources = NULL)
+
ds.look(toAggregate = NULL, checks = FALSE, datasources = NULL)

Arguments

-
toAggregate
+ + +
toAggregate

a character string specifying the function call to be made. For more information see Details.

-
checks
-

logical. If TRUE the optional checks are undertaken. +

checks
+

logical. If TRUE the optional checks are undertaken. Default FALSE to save time.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

the output from the specified server-side aggregate function to the client-side.

+

the output from the specified server-side aggregate function to the client-side.

Details

@@ -95,48 +95,48 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
-  
-  #Calculate the length of a variable using the server-side function
-  
-  ds.look(toAggregate = "lengthDS(D$age.60)", 
-          checks = FALSE,
-          datasources = connections) 
-          
-  #Calculate the column names of "D" object using the server-side function
-          
-  ds.look(toAggregate = "colnames(D)",
-          checks = FALSE, 
-          datasources = connections)        
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
+  
+  #Calculate the length of a variable using the server-side function
+  
+  ds.look(toAggregate = "lengthDS(D$age.60)", 
+          checks = FALSE,
+          datasources = connections) 
+          
+  #Calculate the column names of "D" object using the server-side function
+          
+  ds.look(toAggregate = "colnames(D)",
+          checks = FALSE, 
+          datasources = connections)        
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
 
@@ -147,19 +147,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.ls.html b/docs/reference/ds.ls.html index 0a013314a..6fd23b812 100644 --- a/docs/reference/ds.ls.html +++ b/docs/reference/ds.ls.html @@ -1,10 +1,10 @@ -lists all objects on a server-side environment — ds.ls • dsBaseClientlists all objects on a server-side environment — ds.ls • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,74 +46,68 @@

lists all objects on a server-side environment

-
ds.ls(
-  search.filter = NULL,
-  env.to.search = 1L,
-  search.GlobalEnv = TRUE,
-  datasources = NULL
-)
+
ds.ls(
+  search.filter = NULL,
+  env.to.search = 1L,
+  search.GlobalEnv = TRUE,
+  datasources = NULL
+)

Arguments

-
search.filter
-

character string (potentially including * symbol) specifying the filter -for the object name that you want to find in the enviroment. For more information see Details.

-
env.to.search
+
search.filter
+

character string (potentially including * symbol) specifying the filter +for the object name that you want to find in the environment. For more information see Details.

+ + +

an integer (e.g. in 2 or 2L format) specifying the position in the search path of the environment to be explored. 1L is the current active analytic environment on the server-side and is the default value of env.to.search. For more information see Details.

-
search.GlobalEnv
+
search.GlobalEnv

Logical. If TRUE, ds.ls will list all objects in the .GlobalEnv R environment on the server-side. If FALSE and if env.to.search is also set as a valid integer, ds.ls will list all objects in the server-side R environment -identified by env.to.search in the search path. +identified by env.to.search in the search path. For more information see Details.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.ls returns to the client-side a list containing:

- - -

(1) the name/details of the server-side R environment which ds.ls has searched;

- - -

(2) a vector of character strings giving the names of +

ds.ls returns to the client-side a list containing:
+(1) the name/details of the server-side R environment which ds.ls has searched;
+(2) a vector of character strings giving the names of all objects meeting the naming criteria specified by the argument search.filter in this -specified R server-side environment;

- - -

(3) the nature of the search filter string as it was applied.

+specified R server-side environment;
+(3) the nature of the search filter string as it was applied.

Details

-

When running analyses one may want to know the objects already generated. This +

When running analyses one may want to know the objects already generated. This request is not disclosive as it only returns the names of the objects and not their contents.

By default, objects in DataSHIELD's Active Serverside Analytic Environment (.GlobalEnv) -will be listed. This is the environment that contains all of the objects that server-side DataSHIELD +will be listed. This is the environment that contains all of the objects that server-side DataSHIELD is using for the main analysis or has written out to the server-side during the process -of managing or undertaking the analysis (variables, scalars, matrices, data frames, etc).

+of managing or undertaking the analysis (variables, scalars, matrices, data frames, etc).

The environment to explore is specified by the argument env.to.search (i.e. environment -to search) to an integer value. The default environment +to search) to an integer value. The default environment which R names as .GlobalEnv is set by specifying env.to.search = 1 or 1L (1L is just an explicit way of writing the integer 1).

-

If the search.GlobalEnv argument is set to TRUE the env.to.search parameter +

If the search.GlobalEnv argument is set to TRUE the env.to.search parameter is set to 1L regardless of what value it is set in the call -or if it is set to NULL. +or if it is set to NULL. So, if search.GlobalEnv is set to TRUE, ds.ls will automatically search the .GlobalEnv R environment on the server-side which contains all of the variables, data frames and other objects read in at the start of the analysis, @@ -125,12 +119,12 @@

Details

env.to.search argument but you can specify search filters including * wildcards using the search.filter argument.

In search.filter you can use the symbol * to find all the object that contains -the specified characters. For example, search.filter = "Sd2*" +the specified characters. For example, search.filter = "Sd2*" will list the names of all objects in the specified -environment with names beginning capital S, lower case d and number 2. +environment with names beginning capital S, lower case d and number 2. Similarly, search.filter="*.ID" will return all objects with names ending with .ID, -for example Study.ID. -If a value is not specified for the search.filter argument or it is set as NULL, the names of +for example Study.ID. +If a value is not specified for the search.filter argument or it is set as NULL, the names of all objects in the specified environment will be returned.

Server function called: lsDS.

@@ -141,59 +135,59 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Example 1: Obtain the list of  all objects on a server-side environment
-  
-  ds.ls(datasources = connections)
-  
-  #Example 2: Obtain the list of all objects that contain "var" character in the name
-  #Create in the server-side variables with "var" character in the name
-  
-  ds.assign(toAssign = "D$LAB_TSC",
-            newobj = "var.LAB_TSC",
-            datasources = connections)
-  ds.assign(toAssign = "D$LAB_TRIG",
-            newobj = "var.LAB_TRIG",
-            datasources = connections)
-  ds.assign(toAssign = "D$LAB_HDL",
-            newobj = "var.LAB_HDL",
-            datasources = connections)
-  
-  ds.ls(search.filter = "var*",
-        env.to.search = 1L,
-        search.GlobalEnv = TRUE,
-        datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Example 1: Obtain the list of  all objects on a server-side environment
+  
+  ds.ls(datasources = connections)
+  
+  #Example 2: Obtain the list of all objects that contain "var" character in the name
+  #Create in the server-side variables with "var" character in the name
+  
+  ds.assign(toAssign = "D$LAB_TSC",
+            newobj = "var.LAB_TSC",
+            datasources = connections)
+  ds.assign(toAssign = "D$LAB_TRIG",
+            newobj = "var.LAB_TRIG",
+            datasources = connections)
+  ds.assign(toAssign = "D$LAB_HDL",
+            newobj = "var.LAB_HDL",
+            datasources = connections)
+  
+  ds.ls(search.filter = "var*",
+        env.to.search = 1L,
+        search.GlobalEnv = TRUE,
+        datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -204,19 +198,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.lspline.html b/docs/reference/ds.lspline.html index e0bd187ee..4b78e6ebb 100644 --- a/docs/reference/ds.lspline.html +++ b/docs/reference/ds.lspline.html @@ -1,12 +1,12 @@ -Basis for a piecewise linear spline with meaningful coefficients — ds.lspline • dsBaseClientBasis for a piecewise linear spline with meaningful coefficients — ds.lspline • dsBaseClient - +
@@ -32,13 +32,13 @@
- +
@@ -50,50 +50,50 @@

Basis for a piecewise linear spline with meaningful coefficients

-
ds.lspline(
-  x,
-  knots = NULL,
-  marginal = FALSE,
-  names = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.lspline(
+  x,
+  knots = NULL,
+  marginal = FALSE,
+  names = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

the name of the input numeric variable

-
knots
+
knots

numeric vector of knot positions

-
marginal
-

logical, how to parametrize the spline, see Details

+
marginal
+

logical, how to parametrise the spline, see Details

-
names
+
names

character, vector of names for constructed variables

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default lspline.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

an object of class "lspline" and "matrix", which its name is specified by the +

an object of class "lspline" and "matrix", which its name is specified by the newobj argument (or its default name "lspline.newobj"), is assigned on the serverside.

@@ -116,19 +116,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.make.html b/docs/reference/ds.make.html index 2c027a9ee..fc0980558 100644 --- a/docs/reference/ds.make.html +++ b/docs/reference/ds.make.html @@ -1,11 +1,11 @@ -Calculates a new object in the server-side — ds.make • dsBaseClientCalculates a new object in the server-side — ds.make • dsBaseClient - +
@@ -31,13 +31,13 @@
- +
@@ -48,31 +48,31 @@

Calculates a new object in the server-side

-
ds.make(toAssign = NULL, newobj = NULL, datasources = NULL)
+
ds.make(toAssign = NULL, newobj = NULL, datasources = NULL)

Arguments

-
toAssign
+ + +
toAssign

a character string specifying the function or the arithmetic expression.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default make.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.make returns the new object which is written to the +

ds.make returns the new object which is written to the server-side. Also a validity message is returned to the client-side indicating whether the new object has been correctly created at each source.

@@ -85,16 +85,16 @@

Details

details of the call to ds.make and the variables/objects which it invokes.

TROUBLESHOOTING: please note we have recently identified an error that makes ds.make fail and DataSHIELD crash.

-

The error arises from a call such as -ds.make(toAssign = '5.3 + beta*xvar', newobj = 'predvals'). +

The error arises from a call such as +ds.make(toAssign = '5.3 + beta*xvar', newobj = 'predvals'). This is a typical call you may make to get the predicted values from a simple linear regression model where -a y variable is regressed against an x variable (xvar) -where the estimated regression intercept is 5.3 +a y variable is regressed against an x variable (xvar) +where the estimated regression intercept is 5.3 and beta is the estimated regression slope.

This call appears to fail because in interpreting the arithmetic function which is its first argument -it first encounters the (length 1) scalar 5.3 +it first encounters the (length 1) scalar 5.3 and when it then encounters the xvar vector which has more than one element it fails - apparently because it does not recognise that you need to replicate the 5.3 value the appropriate number of times @@ -112,7 +112,7 @@

Details

vector of length equal to xvar with all elements 5.3. Finally, you can now issue a modified call to reflect what was originally needed: ds.make(toAssign = 'vect5.3+beta*xvar', 'predvals').

-

(2) Alternatively, if you simply swap the original call around: +

(2) Alternatively, if you simply swap the original call around: ds.make(toAssign = '(beta*xvar)+5.3', newobj = 'predvals') the error seems also to be circumvented. This is presumably because the first element of the arithmetic function is of length equal to xvar and it then knows to @@ -120,7 +120,7 @@

Details

The second work-around is easier, but it is worth knowing about the first trick because creating a vector of ones of equal length to another vector can be useful in other settings. Equally the call: -ds.make(toAssign = 'indID-indID',newobj = 'ZEROS') +ds.make(toAssign = 'indID-indID',newobj = 'ZEROS') to create a vector of zeros of that same length may also be useful.

Server function : messageDS

The ds.make function is a wrapper for the DSI package function datashield.assign

@@ -132,65 +132,65 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-##Example 1: arithmetic operators 
-
-ds.make(toAssign = "D$age.60 + D$bmi.26", 
-        newobj = "exprs1", 
-        datasources = connections)
-        
-ds.make(toAssign = "D$noise.56 + D$pm10.16",
-        newobj = "exprs2", 
-        datasources = connections)
-        
-ds.make(toAssign = "(exprs1*exprs2)/3.2",
-        newobj = "result.example1", 
-        datasources = connections)
-
-##Example 2: miscellaneous operators within functions
-
-ds.make(toAssign = "(D$female)^2",
-        newobj = "female2",
-       datasources = connections)
-       
-ds.make(toAssign = "(2*D$female)+(D$log.surv)-(female2*2)",
-        newobj = "output.test.1",
-        datasources = connections)
-        
-ds.make(toAssign = "exp(output.test.1)",
-        newobj = "output.test",
-       datasources = connections)
-       
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+##Example 1: arithmetic operators 
+
+ds.make(toAssign = "D$age.60 + D$bmi.26", 
+        newobj = "exprs1", 
+        datasources = connections)
+        
+ds.make(toAssign = "D$noise.56 + D$pm10.16",
+        newobj = "exprs2", 
+        datasources = connections)
+        
+ds.make(toAssign = "(exprs1*exprs2)/3.2",
+        newobj = "result.example1", 
+        datasources = connections)
+
+##Example 2: miscellaneous operators within functions
+
+ds.make(toAssign = "(D$female)^2",
+        newobj = "female2",
+       datasources = connections)
+       
+ds.make(toAssign = "(2*D$female)+(D$log.surv)-(female2*2)",
+        newobj = "output.test.1",
+        datasources = connections)
+        
+ds.make(toAssign = "exp(output.test.1)",
+        newobj = "output.test",
+       datasources = connections)
+       
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -201,19 +201,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.matrix.html b/docs/reference/ds.matrix.html index 72a1b4199..6301c538f 100644 --- a/docs/reference/ds.matrix.html +++ b/docs/reference/ds.matrix.html @@ -1,12 +1,12 @@ -Creates a matrix on the server-side — ds.matrix • dsBaseClientCreates a matrix on the server-side — ds.matrix • dsBaseClient - +
@@ -32,90 +32,90 @@
- +
-

Creates a matrix on the server-side -with dimensions specified by nrows.scalar -and ncols.scalar arguments and assigns the +

Creates a matrix on the server-side +with dimensions specified by nrows.scalar +and ncols.scalar arguments and assigns the values of all its elements based on the mdata argument.

-
ds.matrix(
-  mdata = NA,
-  from = "clientside.scalar",
-  nrows.scalar = NULL,
-  ncols.scalar = NULL,
-  byrow = FALSE,
-  dimnames = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.matrix(
+  mdata = NA,
+  from = "clientside.scalar",
+  nrows.scalar = NULL,
+  ncols.scalar = NULL,
+  byrow = FALSE,
+  dimnames = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
mdata
+ + +
mdata

a character string specifying the name of a server-side scalar or vector. Also, a numeric value representing a -scalar specified from the client-side can be speficied. +scalar specified from the client-side can be specified Zeros, negative values and NAs are all allowed. For more information see Details.

-
from
+
from

a character string specifying the source and nature of mdata. This can be set as "serverside.vector", "serverside.scalar" or "clientside.scalar". Default "clientside.scalar".

-
nrows.scalar
-

an integer or a character string that specifies the number +

nrows.scalar
+

an integer or a character string that specifies the number of rows in the matrix to be created. For more information see Details.

-
ncols.scalar
-

an integer or a character string that specifies +

ncols.scalar
+

an integer or a character string that specifies the number of columns in the matrix to be created.

-
byrow
+
byrow

logical. If TRUE and mdata is a vector the matrix -created should be filled row by row. If FALSE the matrix created should +created should be filled row by row. If FALSE the matrix created should be filled column by column. Default = FALSE.

-
dimnames
+
dimnames

a list of length 2 giving the row and column names respectively.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default matrix.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.matrix returns the created matrix which is written on the server-side. +

ds.matrix returns the created matrix which is written on the server-side. In addition, two validity messages are returned indicating whether the new matrix has been created in each data source and if so whether it is in a valid form.

@@ -123,20 +123,20 @@

Value

Details

This function is similar to the R native function matrix().

-

If in the mdata argument a vector is specified this should have +

If in the mdata argument a vector is specified this should have the same length as the total number of elements in the matrix. If this is not TRUE the values in mdata will be used repeatedly until all elements in the matrix are full. If mdata argument is a scalar, all elements in the matrix will take that value.

-

In the nrows.scalar argument can be a character string specifying -the name of a server-side scalar. For example, +

In the nrows.scalar argument can be a character string specifying +the name of a server-side scalar. For example, if a server-side scalar named ss.scalar exists and holds the value 23, then by specifying nrows.scalar = "ss.scalar", the matrix created will -have 23 rows. Also this argument can be +have 23 rows. Also this argument can be a numeric value from the -client-side. The same rules are applied to ncols.scalar argument but in this -case the column numbers are specified. +client-side. The same rules are applied to ncols.scalar argument but in this +case the column numbers are specified. In both arguments a zero, negative, NULL or missing value is not permitted.

Server function called: matrixDS

@@ -148,94 +148,94 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Example 1: create a matrix with -13 value in all elements
-  
-  ds.matrix(mdata = -13,
-            from = "clientside.scalar",
-            nrows.scalar = 3,
-            ncols.scalar = 8,
-            newobj = "cs.block",
-            datasources = connections)
-            
-  #Example 2: create a matrix of missing values 
-
-  ds.matrix(NA,
-            from = "clientside.scalar",
-            nrows.scalar = 4,
-            ncols.scalar = 5,
-            newobj = "cs.block.NA",
-            datasources = connections)
-
-  #Example 3: create a matrix using a server-side vector
-  #create a vector in the server-side
-  
-  ds.rUnif(samp.size = 45,
-           min = -10.5,
-           max = 10.5,
-           newobj = "ss.vector",
-           seed.as.integer = 8321,
-           force.output.to.k.decimal.places = 0,
-           datasources = connections)
-           
-  ds.matrix(mdata = "ss.vector",
-            from = "serverside.vector",
-            nrows.scalar = 5,
-            ncols.scalar = 9,
-            newobj = "sv.block",
-            datasources = connections)
-            
-  #Example 4: create a matrix using a server-side vector and specifying
-  #the row a column names
-
-  ds.rUnif(samp.size = 9,
-           min = -10.5,
-           max = 10.5,
-           newobj = "ss.vector.9",
-           seed.as.integer = 5575,
-           force.output.to.k.decimal.places = 0,
-           datasources = connections)
-           
-  ds.matrix(mdata = "ss.vector.9",
-            from = "serverside.vector",
-            nrows.scalar = 5,
-            ncols.scalar = 9,
-            byrow = TRUE,
-            dimnames = list(c("a","b","c","d","e")),
-            newobj = "sv.block.9.dimnames1",
-            datasources = connections)
-
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Example 1: create a matrix with -13 value in all elements
+  
+  ds.matrix(mdata = -13,
+            from = "clientside.scalar",
+            nrows.scalar = 3,
+            ncols.scalar = 8,
+            newobj = "cs.block",
+            datasources = connections)
+            
+  #Example 2: create a matrix of missing values 
+
+  ds.matrix(NA,
+            from = "clientside.scalar",
+            nrows.scalar = 4,
+            ncols.scalar = 5,
+            newobj = "cs.block.NA",
+            datasources = connections)
+
+  #Example 3: create a matrix using a server-side vector
+  #create a vector in the server-side
+  
+  ds.rUnif(samp.size = 45,
+           min = -10.5,
+           max = 10.5,
+           newobj = "ss.vector",
+           seed.as.integer = 8321,
+           force.output.to.k.decimal.places = 0,
+           datasources = connections)
+           
+  ds.matrix(mdata = "ss.vector",
+            from = "serverside.vector",
+            nrows.scalar = 5,
+            ncols.scalar = 9,
+            newobj = "sv.block",
+            datasources = connections)
+            
+  #Example 4: create a matrix using a server-side vector and specifying
+  #the row a column names
+
+  ds.rUnif(samp.size = 9,
+           min = -10.5,
+           max = 10.5,
+           newobj = "ss.vector.9",
+           seed.as.integer = 5575,
+           force.output.to.k.decimal.places = 0,
+           datasources = connections)
+           
+  ds.matrix(mdata = "ss.vector.9",
+            from = "serverside.vector",
+            nrows.scalar = 5,
+            ncols.scalar = 9,
+            byrow = TRUE,
+            dimnames = list(c("a","b","c","d","e")),
+            newobj = "sv.block.9.dimnames1",
+            datasources = connections)
+
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -246,19 +246,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.matrixDet.html b/docs/reference/ds.matrixDet.html index b97ee3f7b..09e6bab9a 100644 --- a/docs/reference/ds.matrixDet.html +++ b/docs/reference/ds.matrixDet.html @@ -1,12 +1,12 @@ -Calculates de determinant of a matrix in the server-side — ds.matrixDet • dsBaseClientCalculates de determinant of a matrix in the server-side — ds.matrixDet • dsBaseClient - +
@@ -32,55 +32,55 @@
- +
-

Calculates the determinant of a square matrix that is -written on the server-side. -This operation is only +

Calculates the determinant of a square matrix that is +written on the server-side. +This operation is only possible if the number of columns and rows of the matrix are the same.

-
ds.matrixDet(M1 = NULL, newobj = NULL, logarithm = FALSE, datasources = NULL)
+
ds.matrixDet(M1 = NULL, newobj = NULL, logarithm = FALSE, datasources = NULL)

Arguments

-
M1
+ + +
M1

a character string specifying the name of the matrix.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default matrixdet.newobj.

-
logarithm
+
logarithm

logical. If TRUE the logarithm of the modulus of the determinant is calculated. Default FALSE.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.matrixDet returns the determinant of an existing matrix on the server-side. -The created new object is stored on the server-side. +

ds.matrixDet returns the determinant of an existing matrix on the server-side. +The created new object is stored on the server-side. Also, two validity messages are returned indicating whether the matrix has been created in each data source and if so whether it is in a valid form.

@@ -98,62 +98,62 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Create the matrix in the server-side
-  
-  ds.rUnif(samp.size = 9,
-           min = -10.5,
-           max = 10.5,
-           newobj = "ss.vector.9",
-           seed.as.integer = 5575,
-           force.output.to.k.decimal.places = 0,
-           datasources = connections)
-           
-  ds.matrix(mdata = "ss.vector.9",
-            from = "serverside.vector",
-            nrows.scalar = 9,ncols.scalar = 9,
-            byrow = TRUE,
-            newobj = "matrix",
-            datasources = connections)
-            
-  #Calculate the determinant of the matrix
-  
-  ds.matrixDet(M1 = "matrix", 
-               newobj = "matrixDet", 
-               logarithm = FALSE, 
-               datasources = connections)
-  
-
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Create the matrix in the server-side
+  
+  ds.rUnif(samp.size = 9,
+           min = -10.5,
+           max = 10.5,
+           newobj = "ss.vector.9",
+           seed.as.integer = 5575,
+           force.output.to.k.decimal.places = 0,
+           datasources = connections)
+           
+  ds.matrix(mdata = "ss.vector.9",
+            from = "serverside.vector",
+            nrows.scalar = 9,ncols.scalar = 9,
+            byrow = TRUE,
+            newobj = "matrix",
+            datasources = connections)
+            
+  #Calculate the determinant of the matrix
+  
+  ds.matrixDet(M1 = "matrix", 
+               newobj = "matrixDet", 
+               logarithm = FALSE, 
+               datasources = connections)
+  
+
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
 
@@ -164,19 +164,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.matrixDet.report.html b/docs/reference/ds.matrixDet.report.html index 4707d25ce..4fdbd6b1f 100644 --- a/docs/reference/ds.matrixDet.report.html +++ b/docs/reference/ds.matrixDet.report.html @@ -1,10 +1,10 @@ -Returns matrix determinant to the client-side — ds.matrixDet.report • dsBaseClientReturns matrix determinant to the client-side — ds.matrixDet.report • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,38 +46,38 @@

Returns matrix determinant to the client-side

-
ds.matrixDet.report(M1 = NULL, logarithm = FALSE, datasources = NULL)
+
ds.matrixDet.report(M1 = NULL, logarithm = FALSE, datasources = NULL)

Arguments

-
M1
+ + +
M1

a character string specifying the name of the matrix.

-
logarithm
+
logarithm

logical. If TRUE the logarithm of the modulus of the determinant is calculated. Default FALSE.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.matrixDet.report returns to the client-side +

ds.matrixDet.report returns to the client-side the determinant of a matrix that is stored on the server-side.

Details

Calculates and returns to the client-side -the determinant of a square matrix on the server-side. -This function is similar to the native R determinant function. +the determinant of a square matrix on the server-side. +This function is similar to the native R determinant function. This operation is only possible if the number of columns and rows of the matrix are the same.

Server function called: matrixDetDS1

@@ -89,59 +89,59 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Create the matrix in the server-side
-  
-  ds.rUnif(samp.size = 9,
-           min = -10.5,
-           max = 10.5,
-           newobj = "ss.vector.9",
-           seed.as.integer = 5575,
-           force.output.to.k.decimal.places = 0,
-           datasources = connections)
-           
-  ds.matrix(mdata = "ss.vector.9",
-            from = "serverside.vector",
-            nrows.scalar = 9,ncols.scalar = 9,
-            byrow = TRUE,
-            newobj = "matrix",
-            datasources = connections)
-            
-  #Calculate the determinant of the matrix
-  
-  ds.matrixDet.report(M1 = "matrix", 
-                      logarithm = FALSE, 
-                      datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Create the matrix in the server-side
+  
+  ds.rUnif(samp.size = 9,
+           min = -10.5,
+           max = 10.5,
+           newobj = "ss.vector.9",
+           seed.as.integer = 5575,
+           force.output.to.k.decimal.places = 0,
+           datasources = connections)
+           
+  ds.matrix(mdata = "ss.vector.9",
+            from = "serverside.vector",
+            nrows.scalar = 9,ncols.scalar = 9,
+            byrow = TRUE,
+            newobj = "matrix",
+            datasources = connections)
+            
+  #Calculate the determinant of the matrix
+  
+  ds.matrixDet.report(M1 = "matrix", 
+                      logarithm = FALSE, 
+                      datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
 
@@ -152,19 +152,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.matrixDiag.html b/docs/reference/ds.matrixDiag.html index 478243908..84200fc85 100644 --- a/docs/reference/ds.matrixDiag.html +++ b/docs/reference/ds.matrixDiag.html @@ -1,11 +1,11 @@ -Calculates matrix diagonals in the server-side — ds.matrixDiag • dsBaseClientCalculates matrix diagonals in the server-side — ds.matrixDiag • dsBaseClient - +
@@ -31,82 +31,82 @@
- +

Extracts the diagonal vector from a square matrix or -creates a diagonal matrix based on a vector or a scalar value +creates a diagonal matrix based on a vector or a scalar value on the server-side.

-
ds.matrixDiag(
-  x1 = NULL,
-  aim = NULL,
-  nrows.scalar = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.matrixDiag(
+  x1 = NULL,
+  aim = NULL,
+  nrows.scalar = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
x1
+ + +
x1

a character string specifying the name of a server-side scalar or vector. Also, a numeric value or vector -specified from the client-side can be speficied. This argument depends +specified from the client-side can be specified. This argument depends on the value specified in aim. For more information see Details.

-
aim
+
aim

a character string specifying the behaviour of the function. -This can be set as: +This can be set as: "serverside.vector.2.matrix", "serverside.scalar.2.matrix", "serverside.matrix.2.vector", "clientside.vector.2.matrix" or "clientside.scalar.2.matrix". For more information see Details.

-
nrows.scalar
-

an integer specifying the dimensions of the matrix -note that the matrix is square (same number of rows and columns). -If this argument is not specified the matrix dimensions are -defined by the length of the vector. +

nrows.scalar
+

an integer specifying the dimensions of the matrix +note that the matrix is square (same number of rows and columns). +If this argument is not specified the matrix dimensions are +defined by the length of the vector. For more information see Details.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default matrixdiag.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.matrixDiag returns to the server-side the square matrix diagonal. +

ds.matrixDiag returns to the server-side the square matrix diagonal. Also, two validity messages are returned indicating whether the new object has been created in each data source and if so whether it is in a valid form.

Details

-

The function behaviour is different depending on the +

The function behaviour is different depending on the value specified in the aim argument:
(1) If aim = "serverside.vector.2.matrix" the function takes a server-side vector and writes out a square matrix with @@ -131,7 +131,7 @@

Details

and writes out a square matrix with all diagonal values equal to the value of the scalar. The dimensions of the square matrix are determined by the value of the nrows.scalar argument.

-

If x1 is a vector and the nrows.scalar +

If x1 is a vector and the nrows.scalar is set as k, the vector will be used repeatedly to fill up the diagonal. For example, the vector is of length 7 and nrows.scalar = 18, a square diagonal matrix with @@ -145,7 +145,7 @@

Author

Examples

- +
- - + + diff --git a/docs/reference/ds.matrixDimnames.html b/docs/reference/ds.matrixDimnames.html index 08bc5e630..8cc27186a 100644 --- a/docs/reference/ds.matrixDimnames.html +++ b/docs/reference/ds.matrixDimnames.html @@ -1,10 +1,10 @@ -Specifies the dimnames of the server-side matrix — ds.matrixDimnames • dsBaseClientSpecifies the dimnames of the server-side matrix — ds.matrixDimnames • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,45 +46,45 @@

Specifies the dimnames of the server-side matrix

-
ds.matrixDimnames(
-  M1 = NULL,
-  dimnames = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.matrixDimnames(
+  M1 = NULL,
+  dimnames = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
M1
+ + +
M1

a character string specifying the name of a server-side matrix.

-
dimnames
+
dimnames

a list of length 2 giving -the row and column names respectively. +the row and column names respectively. An empty list is treated as NULL.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default matrixdimnames.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.matrixDimnames returns to the server-side -the matrix with specified row and column names. -Also, two validity messages are returned to the client-side +

ds.matrixDimnames returns to the server-side +the matrix with specified row and column names. +Also, two validity messages are returned to the client-side indicating the new object that has been created in each data source and if so whether it is in a valid form.

@@ -100,66 +100,66 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-            
-  #Example 1: Set the row and column names of a server-side matrix
-  
-  #Create the server-side vector 
-  
-  ds.rUnif(samp.size = 9,
-           min = -10.5,
-           max = 10.5,
-           newobj = "ss.vector.9",
-           seed.as.integer = 5575,
-           force.output.to.k.decimal.places = 0,
-           datasources = connections)
-           
-  #Create the server-side matrix
-           
-  ds.matrix(mdata = "ss.vector.9",
-            from = "serverside.vector",
-            nrows.scalar = 3,
-            ncols.scalar = 4,
-            byrow = TRUE,
-            newobj = "matrix",
-            datasources = connections)
-   
-  #Specify the column and row names of the matrix
-  
-  ds.matrixDimnames(M1 = "matrix",
-                    dimnames = list(c("a","b","c"),c("a","b","c","d")),
-                    newobj = "matrix.dimnames",
-                    datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+            
+  #Example 1: Set the row and column names of a server-side matrix
+  
+  #Create the server-side vector 
+  
+  ds.rUnif(samp.size = 9,
+           min = -10.5,
+           max = 10.5,
+           newobj = "ss.vector.9",
+           seed.as.integer = 5575,
+           force.output.to.k.decimal.places = 0,
+           datasources = connections)
+           
+  #Create the server-side matrix
+           
+  ds.matrix(mdata = "ss.vector.9",
+            from = "serverside.vector",
+            nrows.scalar = 3,
+            ncols.scalar = 4,
+            byrow = TRUE,
+            newobj = "matrix",
+            datasources = connections)
+   
+  #Specify the column and row names of the matrix
+  
+  ds.matrixDimnames(M1 = "matrix",
+                    dimnames = list(c("a","b","c"),c("a","b","c","d")),
+                    newobj = "matrix.dimnames",
+                    datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
 
@@ -170,19 +170,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.matrixInvert.html b/docs/reference/ds.matrixInvert.html index 972d572a6..0937e4bd2 100644 --- a/docs/reference/ds.matrixInvert.html +++ b/docs/reference/ds.matrixInvert.html @@ -1,9 +1,9 @@ -Inverts a server-side square matrix — ds.matrixInvert • dsBaseClientInverts a server-side square matrix — ds.matrixInvert • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,33 +44,33 @@

Inverts a server-side square matrix

-
ds.matrixInvert(M1 = NULL, newobj = NULL, datasources = NULL)
+
ds.matrixInvert(M1 = NULL, newobj = NULL, datasources = NULL)

Arguments

-
M1
+ + +
M1

A character string specifying the name of the matrix to be inverted.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default matrixinvert.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.matrixInvert returns to the server-side the inverts square matrix. -Also, two validity messages are returned to the client-side +

ds.matrixInvert returns to the server-side the inverts square matrix. +Also, two validity messages are returned to the client-side indicating whether the new object has been created in each data source and if so whether it is in a valid form.

@@ -88,65 +88,65 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-            
-  #Example 1: Invert the server-side matrix
-  
-  #Create the server-side vector 
-  
-  ds.rUnif(samp.size = 9,
-           min = -10.5,
-           max = 10.5,
-           newobj = "ss.vector.9",
-           seed.as.integer = 5575,
-           force.output.to.k.decimal.places = 0,
-           datasources = connections)
-           
-  #Create the server-side matrix
-           
-  ds.matrix(mdata = "ss.vector.9",
-            from = "serverside.vector",
-            nrows.scalar = 3,
-            ncols.scalar = 4,
-            byrow = TRUE,
-            newobj = "matrix",
-            datasources = connections)
-   
-  #Invert the matrix
-  
-  ds.matrixInvert(M1 = "matrix",
-                  newobj = "matrix.invert",
-                  datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+            
+  #Example 1: Invert the server-side matrix
+  
+  #Create the server-side vector 
+  
+  ds.rUnif(samp.size = 9,
+           min = -10.5,
+           max = 10.5,
+           newobj = "ss.vector.9",
+           seed.as.integer = 5575,
+           force.output.to.k.decimal.places = 0,
+           datasources = connections)
+           
+  #Create the server-side matrix
+           
+  ds.matrix(mdata = "ss.vector.9",
+            from = "serverside.vector",
+            nrows.scalar = 3,
+            ncols.scalar = 4,
+            byrow = TRUE,
+            newobj = "matrix",
+            datasources = connections)
+   
+  #Invert the matrix
+  
+  ds.matrixInvert(M1 = "matrix",
+                  newobj = "matrix.invert",
+                  datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
 
@@ -157,19 +157,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.matrixMult.html b/docs/reference/ds.matrixMult.html index f1b530d6b..737b4cca5 100644 --- a/docs/reference/ds.matrixMult.html +++ b/docs/reference/ds.matrixMult.html @@ -1,10 +1,10 @@ -Calculates tow matrix multiplication in the server-side — ds.matrixMult • dsBaseClientCalculates tow matrix multiplication in the server-side — ds.matrixMult • dsBaseClient - +
@@ -30,62 +30,62 @@
- +
-

Calculates the matrix product of two matrices and +

Calculates the matrix product of two matrices and writes output to the server-side.

-
ds.matrixMult(M1 = NULL, M2 = NULL, newobj = NULL, datasources = NULL)
+
ds.matrixMult(M1 = NULL, M2 = NULL, newobj = NULL, datasources = NULL)

Arguments

-
M1
+ + +
M1

a character string specifying the name of the first matrix.

-
M2
+
M2

a character string specifying the name of the second matrix.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default matrixmult.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.matrixMult returns to the server-side +

ds.matrixMult returns to the server-side the result of the two matrix multiplication. -Also, two validity messages are returned to the client-side +Also, two validity messages are returned to the client-side indicating whether the new object has been created in each data source and if so whether it is in a valid form.

Details

-

Undertakes standard matrix multiplication wherewith input matrices -A and B with dimensions A: m x n and B: n x p +

Undertakes standard matrix multiplication wherewith input matrices +A and B with dimensions A: m x n and B: n x p the output matrix C has dimensions m x p. This calculation -is only valid if the number of columns of A +is only valid if the number of columns of A is the same as the number of rows of B.

Server function called: matrixMultDS

@@ -96,74 +96,74 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-            
-  #Example 1: Multiplicate two server-side matrix
-  
-  #Create the server-side vector 
-  
-  ds.rUnif(samp.size = 9,
-           min = -10.5,
-           max = 10.5,
-           newobj = "ss.vector.9",
-           seed.as.integer = 5575,
-           force.output.to.k.decimal.places = 0,
-           datasources = connections)
-           
-  #Create the server-side matrixes
-           
-  ds.matrix(mdata = "ss.vector.9",#using the created vector
-            from = "serverside.vector",
-            nrows.scalar = 5,
-            ncols.scalar = 4,
-            byrow = TRUE,
-            newobj = "matrix1",
-            datasources = connections)
-            
-   ds.matrix(mdata = 10,
-             from = "clientside.scalar",
-             nrows.scalar = 4,
-             ncols.scalar = 6,
-             byrow = TRUE,
-             newobj = "matrix2",
-             datasources = connections)
-
-  #Multiplicate the matrixes
-  
-  ds.matrixMult(M1 = "matrix1", 
-                M2 = "matrix2", 
-                newobj = "matrix.mult", 
-                datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+            
+  #Example 1: Multiplicate two server-side matrix
+  
+  #Create the server-side vector 
+  
+  ds.rUnif(samp.size = 9,
+           min = -10.5,
+           max = 10.5,
+           newobj = "ss.vector.9",
+           seed.as.integer = 5575,
+           force.output.to.k.decimal.places = 0,
+           datasources = connections)
+           
+  #Create the server-side matrixes
+           
+  ds.matrix(mdata = "ss.vector.9",#using the created vector
+            from = "serverside.vector",
+            nrows.scalar = 5,
+            ncols.scalar = 4,
+            byrow = TRUE,
+            newobj = "matrix1",
+            datasources = connections)
+            
+   ds.matrix(mdata = 10,
+             from = "clientside.scalar",
+             nrows.scalar = 4,
+             ncols.scalar = 6,
+             byrow = TRUE,
+             newobj = "matrix2",
+             datasources = connections)
+
+  #Multiplicate the matrixes
+  
+  ds.matrixMult(M1 = "matrix1", 
+                M2 = "matrix2", 
+                newobj = "matrix.mult", 
+                datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
 
@@ -174,19 +174,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.matrixTranspose.html b/docs/reference/ds.matrixTranspose.html index e300f35f0..8e3dabe42 100644 --- a/docs/reference/ds.matrixTranspose.html +++ b/docs/reference/ds.matrixTranspose.html @@ -1,9 +1,9 @@ -Transposes a server-side matrix — ds.matrixTranspose • dsBaseClientTransposes a server-side matrix — ds.matrixTranspose • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,42 +44,42 @@

Transposes a server-side matrix

-
ds.matrixTranspose(M1 = NULL, newobj = NULL, datasources = NULL)
+
ds.matrixTranspose(M1 = NULL, newobj = NULL, datasources = NULL)

Arguments

-
M1
+ + +
M1

a character string specifying the name of the matrix.

-
newobj
-

a character string that provides the name for the output -variable that is stored on the data servers. +

newobj
+

a character string that provides the name for the output +variable that is stored on the data servers. Default matrixtranspose.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.matrixTranspose returns to the server-side the transpose matrix. -Also, two validity messages are returned to the client-side +

ds.matrixTranspose returns to the server-side the transpose matrix. +Also, two validity messages are returned to the client-side indicating whether the new object has been created in each data source and if so whether it is in a valid form.

Details

This operation converts matrix -A to matrix C where element C[i,j] of +A to matrix C where element C[i,j] of matrix C equals element A[j,i] of matrix -A. Matrix A, therefore, has the same number +A. Matrix A, therefore, has the same number of rows as matrix C has columns and vice versa.

Server function called: matrixTransposeDS

@@ -90,65 +90,65 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-            
-  #Example 1: Transpose the server-side matrix
-  
-  #Create the server-side vector 
-  
-  ds.rUnif(samp.size = 9,
-           min = -10.5,
-           max = 10.5,
-           newobj = "ss.vector.9",
-           seed.as.integer = 5575,
-           force.output.to.k.decimal.places = 0,
-           datasources = connections)
-           
-  #Create the server-side matrix
-           
-  ds.matrix(mdata = "ss.vector.9",
-            from = "serverside.vector",
-            nrows.scalar = 3,
-            ncols.scalar = 4,
-            byrow = TRUE,
-            newobj = "matrix",
-            datasources = connections)
-   
-  #Transpose the matrix
-  
-  ds.matrixTranspose(M1 = "matrix",
-                     newobj = "matrix.transpose",
-                     datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+            
+  #Example 1: Transpose the server-side matrix
+  
+  #Create the server-side vector 
+  
+  ds.rUnif(samp.size = 9,
+           min = -10.5,
+           max = 10.5,
+           newobj = "ss.vector.9",
+           seed.as.integer = 5575,
+           force.output.to.k.decimal.places = 0,
+           datasources = connections)
+           
+  #Create the server-side matrix
+           
+  ds.matrix(mdata = "ss.vector.9",
+            from = "serverside.vector",
+            nrows.scalar = 3,
+            ncols.scalar = 4,
+            byrow = TRUE,
+            newobj = "matrix",
+            datasources = connections)
+   
+  #Transpose the matrix
+  
+  ds.matrixTranspose(M1 = "matrix",
+                     newobj = "matrix.transpose",
+                     datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
 
@@ -159,19 +159,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.mean.html b/docs/reference/ds.mean.html index 43c045359..5bac04db2 100644 --- a/docs/reference/ds.mean.html +++ b/docs/reference/ds.mean.html @@ -1,10 +1,10 @@ -Computes server-side vector statistical mean — ds.mean • dsBaseClientComputes server-side vector statistical mean — ds.mean • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,79 +46,60 @@

Computes server-side vector statistical mean

-
ds.mean(
-  x = NULL,
-  type = "split",
-  checks = FALSE,
-  save.mean.Nvalid = FALSE,
-  datasources = NULL
-)
+
ds.mean(
+  x = NULL,
+  type = "split",
+  checks = FALSE,
+  save.mean.Nvalid = FALSE,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character specifying the name of a numerical vector.

-
type
+
type

a character string that represents the type of analysis to carry out. This can be set as 'combine', 'combined', 'combines', 'split', 'splits', 's', -'both' or 'b'. +'both' or 'b'. For more information see Details.

-
checks
+
checks

logical. If TRUE optional checks of model -components will be undertaken. Default is FALSE to save time. +components will be undertaken. Default is FALSE to save time. It is suggested that checks should only be undertaken once the function call has failed.

-
save.mean.Nvalid
-

logical. If TRUE generated values of the mean and -the number of valid (non-missing) observations will be saved on the data servers. -Default FALSE. +

save.mean.Nvalid
+

logical. If TRUE generated values of the mean and +the number of valid (non-missing) observations will be saved on the data servers. +Default FALSE. For more information see Details.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.mean returns to the client-side a list including:

- - - -

Mean.by.Study: estimated mean, Nmissing

- - -

(number of missing observations), Nvalid (number of valid observations) and -Ntotal (sum of missing and valid observations) -separately for each study (if type = split or type = both).

- - -

Global.Mean: estimated mean, Nmissing, Nvalid and Ntotal

- - -

across all studies combined (if type = combine or type = both).

- - -

Nstudies: number of studies being analysed.

- - -

ValidityMessage: indicates if the analysis was possible.

- - - -

If save.mean.Nvalid is set as TRUE, the objects +

ds.mean returns to the client-side a list including:

+

Mean.by.Study: estimated mean, Nmissing +(number of missing observations), Nvalid (number of valid observations) and +Ntotal (sum of missing and valid observations) +separately for each study (if type = split or type = both).
Global.Mean: estimated mean, Nmissing, Nvalid and Ntotal +across all studies combined (if type = combine or type = both).
Nstudies: number of studies being analysed.
ValidityMessage: indicates if the analysis was possible.

+

If save.mean.Nvalid is set as TRUE, the objects Nvalid.all.studies, Nvalid.study.specific, mean.all.studies and mean.study.specific are written to the server-side.

@@ -127,16 +108,16 @@

Details

This function is similar to the R function mean.

The function can carry out 3 types of analysis depending on the argument type:
-(1) If type is set to 'combine', 'combined', +(1) If type is set to 'combine', 'combined', 'combines' or 'c', a global mean is calculated.
(2) If type is set to 'split', 'splits' or 's', the mean is calculated separately for each study.
-(3) If type is set to 'both' or 'b', +(3) If type is set to 'both' or 'b', both sets of outputs are produced.

-

If the argument save.mean.Nvalid is set to TRUE +

If the argument save.mean.Nvalid is set to TRUE study-specific means and Nvalids -as well as the global equivalents across all studies combined -are saved in the server-side. +as well as the global equivalents across all studies combined +are saved in the server-side. Once the estimated means and Nvalids are written into the server-side R environments, they can be used directly to centralize the variable of interest around its global mean or its study-specific means. Finally, @@ -155,45 +136,45 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Calculate the mean of a vector in the server-side
-  
-  ds.mean(x = "D$LAB_TSC",
-          type = "split",
-          checks = FALSE,
-          save.mean.Nvalid = FALSE,
-          datasources = connections)
-             
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Calculate the mean of a vector in the server-side
+  
+  ds.mean(x = "D$LAB_TSC",
+          type = "split",
+          checks = FALSE,
+          save.mean.Nvalid = FALSE,
+          datasources = connections)
+             
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -204,19 +185,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.meanByClass.html b/docs/reference/ds.meanByClass.html index 6b0f0a74f..411ed7332 100644 --- a/docs/reference/ds.meanByClass.html +++ b/docs/reference/ds.meanByClass.html @@ -1,10 +1,10 @@ -Computes the mean and standard deviation across categories — ds.meanByClass • dsBaseClientComputes the mean and standard deviation across categories — ds.meanByClass • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,47 +46,47 @@

Computes the mean and standard deviation across categories

-
ds.meanByClass(
-  x = NULL,
-  outvar = NULL,
-  covar = NULL,
-  type = "combine",
-  datasources = NULL
-)
+
ds.meanByClass(
+  x = NULL,
+  outvar = NULL,
+  covar = NULL,
+  type = "combine",
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string specifying the name of the dataset or a text formula.

-
outvar
+
outvar

a character vector specifying the names of the continuous variables.

-
covar
+
covar

a character vector specifying the names of up to 3 categorical variables

-
type
+
type

a character string that represents the type of analysis to carry out. -type can be set as: 'combine' or 'split'. -Default 'combine'. +type can be set as: 'combine' or 'split'. +Default 'combine'. For more information see Details.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.meanByClass returns to the client-side a table or a list of tables that +

ds.meanByClass returns to the client-side a table or a list of tables that hold the length of the numeric variable(s) and their mean and standard deviation in each subgroup (subset).

@@ -115,50 +115,50 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Calculate mean by class
-  
-  ds.meanByClass(x = "D",
-                 outvar = c('LAB_HDL','LAB_TSC'),
-                 covar = c('PM_BMI_CATEGORICAL'),
-                 type = "combine",
-                 datasources = connections)
-                 
-  ds.meanByClass(x = "D$LAB_HDL~D$PM_BMI_CATEGORICAL",
-                 type = "combine",
-                 datasources = connections[1])#Only the frist server is used ("study1")  
-             
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Calculate mean by class
+  
+  ds.meanByClass(x = "D",
+                 outvar = c('LAB_HDL','LAB_TSC'),
+                 covar = c('PM_BMI_CATEGORICAL'),
+                 type = "combine",
+                 datasources = connections)
+                 
+  ds.meanByClass(x = "D$LAB_HDL~D$PM_BMI_CATEGORICAL",
+                 type = "combine",
+                 datasources = connections[1])#Only the frist server is used ("study1")  
+             
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -169,19 +169,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.meanSdGp.html b/docs/reference/ds.meanSdGp.html index a0a739248..ffc0c2bfb 100644 --- a/docs/reference/ds.meanSdGp.html +++ b/docs/reference/ds.meanSdGp.html @@ -1,10 +1,10 @@ -Computes the mean and standard deviation across groups defined by one factor — ds.meanSdGp • dsBaseClientComputes the mean and standard deviation across groups defined by one factor — ds.meanSdGp • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,52 +46,52 @@

Computes the mean and standard deviation across groups defined by one factor

-
ds.meanSdGp(
-  x = NULL,
-  y = NULL,
-  type = "both",
-  do.checks = FALSE,
-  datasources = NULL
-)
+
ds.meanSdGp(
+  x = NULL,
+  y = NULL,
+  type = "both",
+  do.checks = FALSE,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string specifying the name of a numeric continuous variable.

-
y
+
y

a character string specifying the name of a categorical variable of class factor.

-
type
+
type

a character string that represents the type of analysis to carry out. -This can be set as: "combine", "split" or "both". -Default "both". +This can be set as: "combine", "split" or "both". +Default "both". For more information see Details.

-
do.checks
+
do.checks

logical. If TRUE the administrative checks are undertaken to ensure that the input objects are defined in all studies and that the -variables are of equivalent class in each study. +variables are of equivalent class in each study. Default is FALSE to save time.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.meanSdGp returns to the client-side the mean, SD, Nvalid and SEM combined +

ds.meanSdGp returns to the client-side the mean, SD, Nvalid and SEM combined across studies and/or separately for each study, depending on the argument type.

@@ -99,7 +99,7 @@

Details

This function calculates the mean, standard deviation (SD), N (number of observations) and the standard error of the mean (SEM) of a continuous variable broken down into subgroups defined by a single factor.

-

There are important differences between ds.meanSdGp function compared to +

There are important differences between ds.meanSdGp function compared to the function ds.meanByClass:

(A) ds.meanSdGp does not actually subset the data it simply calculates the required statistics and reports them. This means you cannot use this function if you wish to physically break the @@ -117,13 +117,13 @@

Details

(4) sex = 1 and BMI = 1 -> newfactor = 4
(5) sex = 1 and BMI = 2 -> newfactor = 5
(6) sex = 1 and BMI = 3 -> newfactor = 6

-

(C) At present, ds.meanByClass calculates the sample size in each group to mean the +

(C) At present, ds.meanByClass calculates the sample size in each group to mean the total sample size (i.e. it includes all observations in each group regardless of whether or not they include missing values for the continuous variable or the factor). The calculation of sample size in each group by ds.meanSdGp always reports the number of observations that are non-missing both for the continuous variable and the factor. This makes sense - in the case of ds.meanByClass, -the total size of the physical subsets was important, +the total size of the physical subsets was important, but when it comes down only to ds.meanSdGp which undertakes analysis without physical subsetting, it is only the observations with non-missing values in both variables that contribute to the calculation of means and SDs within each group @@ -151,59 +151,59 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-
-  #Example 1: Calculate the mean, SD, Nvalid and SEM of the continuous variable age.60 (age in
-  #years centralised at 60), broken down by time.id (a six level factor relating to survival time)
-  #and report the pooled results combined across studies.
- 
-  ds.meanSdGp(x = "D$age.60",
-              y = "D$time.id",
-              type = "combine",
-              do.checks = FALSE,
-              datasources = connections)
-              
-  #Example 2: Calculate the mean, SD, Nvalid and SEM of the continuous variable age.60 (age in
-  #years centralised at 60), broken down by time.id (a six level factor relating to survival time)
-  #and report both study-specific results and the pooled results combined across studies.
-  #Save the returned output to msg.b.
-  
-  ds.meanSdGp(x = "D$age.60",
-              y = "D$time.id",
-              type = "both",
-              do.checks = FALSE,
-              datasources = connections)  
-             
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+
+  #Example 1: Calculate the mean, SD, Nvalid and SEM of the continuous variable age.60 (age in
+  #years centralised at 60), broken down by time.id (a six level factor relating to survival time)
+  #and report the pooled results combined across studies.
+ 
+  ds.meanSdGp(x = "D$age.60",
+              y = "D$time.id",
+              type = "combine",
+              do.checks = FALSE,
+              datasources = connections)
+              
+  #Example 2: Calculate the mean, SD, Nvalid and SEM of the continuous variable age.60 (age in
+  #years centralised at 60), broken down by time.id (a six level factor relating to survival time)
+  #and report both study-specific results and the pooled results combined across studies.
+  #Save the returned output to msg.b.
+  
+  ds.meanSdGp(x = "D$age.60",
+              y = "D$time.id",
+              type = "both",
+              do.checks = FALSE,
+              datasources = connections)  
+             
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -214,19 +214,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.merge.html b/docs/reference/ds.merge.html index ca407b167..1ad185efc 100644 --- a/docs/reference/ds.merge.html +++ b/docs/reference/ds.merge.html @@ -1,10 +1,10 @@ -Merges two data frames in the server-side — ds.merge • dsBaseClientMerges two data frames in the server-side — ds.merge • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,102 +46,102 @@

Merges two data frames in the server-side

-
ds.merge(
-  x.name = NULL,
-  y.name = NULL,
-  by.x.names = NULL,
-  by.y.names = NULL,
-  all.x = FALSE,
-  all.y = FALSE,
-  sort = TRUE,
-  suffixes = c(".x", ".y"),
-  no.dups = TRUE,
-  incomparables = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.merge(
+  x.name = NULL,
+  y.name = NULL,
+  by.x.names = NULL,
+  by.y.names = NULL,
+  all.x = FALSE,
+  all.y = FALSE,
+  sort = TRUE,
+  suffixes = c(".x", ".y"),
+  no.dups = TRUE,
+  incomparables = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
x.name
+ + +
x.name

a character string specifying the name of the -first data frame to be merged. The length of the string should be less than the -specified threshold for the nfilter.stringShort which is one of the disclosure +first data frame to be merged. The length of the string should be less than the +specified threshold for the nfilter.stringShort which is one of the disclosure prevention checks in DataSHIELD.

-
y.name
+
y.name

a character string specifying the name of the -second data frame to be merged. The length of the string should be less than the -specified threshold for the nfilter.stringShort which is one of the disclosure +second data frame to be merged. The length of the string should be less than the +specified threshold for the nfilter.stringShort which is one of the disclosure prevention checks in DataSHIELD.

-
by.x.names
-

a character string or a vector of names specifying +

by.x.names
+

a character string or a vector of names specifying of the column(s) in data frame x.name for merging.

-
by.y.names
-

a character string or a vector of names specifying +

by.y.names
+

a character string or a vector of names specifying of the column(s) in data frame y.name for merging.

-
all.x
+
all.x

logical. If TRUE then extra rows will be added to the output, -one for each row in x.name that has no matching row in y.name. +one for each row in x.name that has no matching row in y.name. If FALSE the rows with data from both data frames are included in the output. Default FALSE.

-
all.y
+
all.y

logical. If TRUE then extra rows will be added to the output, -one for each row in y.name that has no matching row in x.name. +one for each row in y.name that has no matching row in x.name. If FALSE the rows with data from both data frames are included in the output. Default FALSE.

-
sort
+
sort

logical. If TRUE the merged result is sorted on elements in the by.x.names and by.y.names columns. Default TRUE.

-
suffixes
+
suffixes

a character vector of length 2 specifying the suffixes to be used for making unique common column names in the two input data frames when they both appear in the merged data frame.

-
no.dups
+
no.dups

logical. Suffixes are appended in more cases to avoid duplicated column names in the merged data frame. Default TRUE (FALSE before R version 3.5.0).

-
incomparables
-

values that cannot be matched. This is intended to +

incomparables
+

values that cannot be matched. This is intended to be used for merging on one column, so these are incomparable values of that column. For more information see match in native R merge function.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default merge.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.merge returns the merged data frame that is written on the server-side. +

ds.merge returns the merged data frame that is written on the server-side. Also, two validity messages are returned to the client-side indicating whether the new object has been created in each data source and if so whether it is in a valid form.

@@ -149,7 +149,7 @@

Value

Details

This function is similar to the native R function merge. -There are some changes compared with the native R function +There are some changes compared with the native R function in choosing which variables to use to merge the data frames, the function merge is very flexible. For example, you can choose to merge using all vectors that appear in both data frames. However, for ds.merge @@ -165,63 +165,63 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Create two data frames with a common column
-  
-  ds.dataFrame(x = c("D$LAB_TSC","D$LAB_TRIG","D$LAB_HDL","D$LAB_GLUC_ADJUSTED"),
-               completeCases = TRUE,
-               newobj = "df.x",
-               datasources = connections)
-               
-  ds.dataFrame(x = c("D$LAB_TSC","D$GENDER","D$PM_BMI_CATEGORICAL","D$PM_BMI_CONTINUOUS"),
-               completeCases = TRUE,
-               newobj = "df.y",
-               datasources = connections) 
-               
-  # Merge data frames using the common variable "LAB_TSC"
-               
-  ds.merge(x.name = "df.x",
-           y.name = "df.y",
-           by.x.names = "df.x$LAB_TSC",
-           by.y.names = "df.y$LAB_TSC",
-           all.x = TRUE,
-           all.y = TRUE,
-           sort = TRUE,
-           suffixes = c(".x", ".y"),
-           no.dups = TRUE,
-           newobj = "df.merge",
-           datasources = connections)              
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Create two data frames with a common column
+  
+  ds.dataFrame(x = c("D$LAB_TSC","D$LAB_TRIG","D$LAB_HDL","D$LAB_GLUC_ADJUSTED"),
+               completeCases = TRUE,
+               newobj = "df.x",
+               datasources = connections)
+               
+  ds.dataFrame(x = c("D$LAB_TSC","D$GENDER","D$PM_BMI_CATEGORICAL","D$PM_BMI_CONTINUOUS"),
+               completeCases = TRUE,
+               newobj = "df.y",
+               datasources = connections) 
+               
+  # Merge data frames using the common variable "LAB_TSC"
+               
+  ds.merge(x.name = "df.x",
+           y.name = "df.y",
+           by.x.names = "df.x$LAB_TSC",
+           by.y.names = "df.y$LAB_TSC",
+           all.x = TRUE,
+           all.y = TRUE,
+           sort = TRUE,
+           suffixes = c(".x", ".y"),
+           no.dups = TRUE,
+           newobj = "df.merge",
+           datasources = connections)              
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -232,19 +232,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.message.html b/docs/reference/ds.message.html index f6ae5cf64..bbbb5cd76 100644 --- a/docs/reference/ds.message.html +++ b/docs/reference/ds.message.html @@ -1,10 +1,10 @@ -Returns server-side messages to the client-side — ds.message • dsBaseClientReturns server-side messages to the client-side — ds.message • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,27 +46,27 @@

Returns server-side messages to the client-side

-
ds.message(message.obj.name = NULL, datasources = NULL)
+
ds.message(message.obj.name = NULL, datasources = NULL)

Arguments

-
message.obj.name
-

is a character string specifying the name of the list that + + +

message.obj.name
+

is a character string specifying the name of the list that contains the message.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.message returns a list object from each study, +

ds.message returns a list object from each study, containing the message that has been written by DataSHIELD into $studysideMessage.

@@ -82,7 +82,7 @@

Details

available are designed to be able to write the designated error message to the $serversideMessage object into the list that is saved on the server-side as the primary output of that function. So only valid server-side functions of -DataSHIELD can write a $studysideMessage. The error message is a string that +DataSHIELD can write a $studysideMessage. The error message is a string that cannot exceed a length of nfilter.string a default of 80 characters.

Server function called: messageDS

@@ -93,47 +93,47 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Use a ds.asCharacter assign function to create the message in the server-side
-  
-  ds.asCharacter(x.name = "D$LAB_TRIG", 
-                 newobj = "vector1",
-                 datasources = connections)
-                 
-  #Return the message to the client-side
-  
-  ds.message(message.obj.name = "vector1",
-             datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Use a ds.asCharacter assign function to create the message in the server-side
+  
+  ds.asCharacter(x.name = "D$LAB_TRIG", 
+                 newobj = "vector1",
+                 datasources = connections)
+                 
+  #Return the message to the client-side
+  
+  ds.message(message.obj.name = "vector1",
+             datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
 
@@ -144,19 +144,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.metadata.html b/docs/reference/ds.metadata.html index 851aa7492..64330a092 100644 --- a/docs/reference/ds.metadata.html +++ b/docs/reference/ds.metadata.html @@ -1,10 +1,10 @@ -Gets the metadata associated with a variable held on the server — ds.metadata • dsBaseClientGets the metadata associated with a variable held on the server — ds.metadata • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,26 +46,26 @@

Gets the metadata associated with a variable held on the server

-
ds.metadata(x = NULL, datasources = NULL)
+
ds.metadata(x = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of the object.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.metadata returns to the client-side the metadata of associated to an object +

ds.metadata returns to the client-side the metadata of associated to an object held at the server.

@@ -80,38 +80,38 @@

Author

Examples

-
if (FALSE) {
-
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Example 1: Get the metadata associated with variable 'D'
-  ds.metadata(x = 'D$LAB_TSC', datasources = connections)
-
-  # clear the Datashield R sessions and logout
-  DSI::datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Example 1: Get the metadata associated with variable 'D'
+  ds.metadata(x = 'D$LAB_TSC', datasources = connections)
+
+  # clear the Datashield R sessions and logout
+  DSI::datashield.logout(connections)
+} # }
+
 
@@ -122,19 +122,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.mice.html b/docs/reference/ds.mice.html index c01a6c9d9..2bc5ccc73 100644 --- a/docs/reference/ds.mice.html +++ b/docs/reference/ds.mice.html @@ -1,17 +1,17 @@ -Multivariate Imputation by Chained Equations — ds.mice • dsBaseClientMultivariate Imputation by Chained Equations — ds.mice • dsBaseClient - +
@@ -37,110 +37,110 @@
- +
-

This function calls the miceDS that is a wrapper function of the mice +

This function calls the miceDS that is a wrapper function of the mice from the mice R package. The function creates multiple imputations (replacement values) -for multivariate missing data. The method is based on Fully Conditional Specification, +for multivariate missing data. The method is based on Fully Conditional Specification, where each incomplete variable is imputed by a separate model. The MICE algorithm can impute mixes of continuous, binary, unordered categorical and ordered categorical data. In addition, MICE can impute continuous two-level data, and maintain consistency between imputations by means of passive imputation. It is recommended that the imputation is -done in each datasource separately. Otherwise the user should make sure that the input +done in each datasource separately. Otherwise the user should make sure that the input data have the same columns in all datasources and in the same order.

-
ds.mice(
-  data = NULL,
-  m = 5,
-  maxit = 5,
-  method = NULL,
-  predictorMatrix = NULL,
-  post = NULL,
-  seed = NA,
-  newobj_mids = NULL,
-  newobj_df = NULL,
-  datasources = NULL
-)
+
ds.mice(
+  data = NULL,
+  m = 5,
+  maxit = 5,
+  method = NULL,
+  predictorMatrix = NULL,
+  post = NULL,
+  seed = NA,
+  newobj_mids = NULL,
+  newobj_df = NULL,
+  datasources = NULL
+)

Arguments

-
data
+ + +
data

a data frame or a matrix containing the incomplete data.

-
m
+
m

Number of multiple imputations. The default is m=5.

-
maxit
+
maxit

A scalar giving the number of iterations. The default is 5.

-
method
-

Can be either a single string, or a vector of strings with length -ncol(data), specifying the imputation method to be used for each column in data. If -specified as a single string, the same method will be used for all blocks. The default -imputation method (when no argument is specified) depends on the measurement level of -the target column, as regulated by the defaultMethod argument in native R mice function. +

method
+

Can be either a single string, or a vector of strings with length +ncol(data), specifying the imputation method to be used for each column in data. If +specified as a single string, the same method will be used for all blocks. The default +imputation method (when no argument is specified) depends on the measurement level of +the target column, as regulated by the defaultMethod argument in native R mice function. Columns that need not be imputed have the empty method "".

-
predictorMatrix
-

A numeric matrix of ncol(data) rows and ncol(data) columns, +

predictorMatrix
+

A numeric matrix of ncol(data) rows and ncol(data) columns, containing 0/1 data specifying the set of predictors to be used for each target column. -Each row corresponds to a variable to be imputed. A value of 1 means that the column +Each row corresponds to a variable to be imputed. A value of 1 means that the column variable is used as a predictor for the target variables (in the rows). By default, the -predictorMatrix is a square matrix of ncol(data) rows and columns with all 1's, except +predictorMatrix is a square matrix of ncol(data) rows and columns with all 1's, except for the diagonal.

-
post
-

A vector of strings with length ncol(data) specifying expressions as strings. -Each string is parsed and executed within the sampler() function to post-process imputed -values during the iterations. The default is a vector of empty strings, indicating no +

post
+

A vector of strings with length ncol(data) specifying expressions as strings. +Each string is parsed and executed within the sampler() function to post-process imputed +values during the iterations. The default is a vector of empty strings, indicating no post-processing. Multivariate (block) imputation methods ignore the post parameter.

-
seed
+
seed

either NA (default) or "fixed". If seed is set to "fixed" then a fixed seed random number generator which is study-specific is used.

-
newobj_mids
+
newobj_mids

a character string that provides the name for the output mids object that is stored on the data servers. Default mids_object.

-
newobj_df
-

a character string that provides the name for the output dataframes -that are stored on the data servers. Default imputationSet. For example, if m=5, and +

newobj_df
+

a character string that provides the name for the output dataframes +that are stored on the data servers. Default imputationSet. For example, if m=5, and newobj_df="imputationSet", then five imputed dataframes are saved on the servers with names imputationSet.1, imputationSet.2, imputationSet.3, imputationSet.4, imputationSet.5.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

a list with three elements: the method, the predictorMatrix and the post.

+

a list with three elements: the method, the predictorMatrix and the post.

Details

@@ -160,19 +160,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.names.html b/docs/reference/ds.names.html index 83ceb4e69..aa35230ec 100644 --- a/docs/reference/ds.names.html +++ b/docs/reference/ds.names.html @@ -1,9 +1,9 @@ -Return the names of a list object — ds.names • dsBaseClientReturn the names of a list object — ds.names • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,16 +44,18 @@

Return the names of a list object

-
ds.names(xname = NULL, datasources = NULL)
+
ds.names(xname = NULL, datasources = NULL)

Arguments

-
xname
+ + +
xname

a character string specifying the name of the list.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login that represent the particular data sources (studies) to be addressed by the function call. If the datasources @@ -63,9 +65,7 @@

Arguments

Value

- - -

ds.names returns to the client-side the names +

ds.names returns to the client-side the names of a list object stored on the server-side.

@@ -75,7 +75,7 @@

Details

for example, it only works to extract names that already exist, not to create new names for objects. The function is restricted to objects of type list, but this includes objects that have a primary class other than list but which -return TRUE to the native R function is.list. As an example this includes +return TRUE to the native R function is.list. As an example this includes the multi-component object created by fitting a generalized linear model using ds.glmSLMA. The resultant object saved on each server separately is formally of class "glm" and "ls" but responds TRUE to is.list(),

@@ -88,50 +88,50 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
- 
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1",
-                 url = "http://192.168.56.100:8080/",
-                 user = "administrator", password = "datashield_test&",
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2",
-                 url = "http://192.168.56.100:8080/",
-                 user = "administrator", password = "datashield_test&",
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/",
-                 user = "administrator", password = "datashield_test&",
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
- 
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
- 
-  #Create a list in the server-side
- 
-  ds.asList(x.name = "D",
-            newobj = "D.list",
-            datasources = connections)
-           
-  #Get the names of the list
- 
-  ds.names(xname = "D.list",
-           datasources = connections)
- 
- 
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+ 
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1",
+                 url = "http://192.168.56.100:8080/",
+                 user = "administrator", password = "datashield_test&",
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2",
+                 url = "http://192.168.56.100:8080/",
+                 user = "administrator", password = "datashield_test&",
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/",
+                 user = "administrator", password = "datashield_test&",
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+ 
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
+ 
+  #Create a list in the server-side
+ 
+  ds.asList(x.name = "D",
+            newobj = "D.list",
+            datasources = connections)
+           
+  #Get the names of the list
+ 
+  ds.names(xname = "D.list",
+           datasources = connections)
+ 
+ 
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+} # }
+
 
@@ -142,19 +142,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.ns.html b/docs/reference/ds.ns.html index 5b8587067..1a30398fe 100644 --- a/docs/reference/ds.ns.html +++ b/docs/reference/ds.ns.html @@ -1,11 +1,11 @@ -Generate a Basis Matrix for Natural Cubic Splines — ds.ns • dsBaseClientGenerate a Basis Matrix for Natural Cubic Splines — ds.ns • dsBaseClient - +
@@ -31,13 +31,13 @@
- +
@@ -48,61 +48,61 @@

Generate a Basis Matrix for Natural Cubic Splines

-
ds.ns(
-  x,
-  df = NULL,
-  knots = NULL,
-  intercept = FALSE,
-  Boundary.knots = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.ns(
+  x,
+  df = NULL,
+  knots = NULL,
+  intercept = FALSE,
+  Boundary.knots = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

the predictor variable. Missing values are allowed.

-
df
-

degrees of freedom. One can supply df rather than knots; ns() then chooses +

df
+

degrees of freedom. One can supply df rather than knots; ns() then chooses df - 1 - intercept knots at suitably chosen quantiles of x (which will ignore missing values). The default, df = NULL, sets the number of inner knots as length(knots).

-
knots
+
knots

breakpoints that define the spline. The default is no knots; together with the natural boundary conditions this results in a basis for linear regression on x. Typical values are the mean or median for one knot, quantiles for more knots. See also Boundary.knots.

-
intercept
+
intercept

if TRUE, an intercept is included in the basis; default is FALSE.

-
Boundary.knots
+
Boundary.knots

boundary points at which to impose the natural boundary conditions and anchor the B-spline basis (default the range of the data). If both knots and Boundary.knots are supplied, the basis parameters do not depend on x. Data can extend beyond Boundary.knots.

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default ns.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

A matrix of dimension length(x) * df where either df was supplied or if knots were +

A matrix of dimension length(x) * df where either df was supplied or if knots were supplied, df = length(knots) + 1 + intercept. Attributes are returned that correspond to the arguments to ns, and explicitly give the knots, Boundary.knots etc for use by predict.ns(). The object is assigned at each serverside.

@@ -114,7 +114,7 @@

Details

sequence of interior knots, and the natural boundary conditions. These enforce the constraint that the function is linear beyond the boundary knots, which can either be supplied or default to the extremes of the data. -A primary use is in modeling formula to directly specify a natural spline term in a model.

+A primary use is in modelling formula to directly specify a natural spline term in a model.

Author

@@ -129,19 +129,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.numNA.html b/docs/reference/ds.numNA.html index 5cf5381a9..7cfea2cac 100644 --- a/docs/reference/ds.numNA.html +++ b/docs/reference/ds.numNA.html @@ -1,10 +1,10 @@ -Gets the number of missing values in a server-side vector — ds.numNA • dsBaseClientGets the number of missing values in a server-side vector — ds.numNA • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,26 +46,26 @@

Gets the number of missing values in a server-side vector

-
ds.numNA(x = NULL, datasources = NULL)
+
ds.numNA(x = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of the vector.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.numNA returns to the client-side the number of missing values +

ds.numNA returns to the client-side the number of missing values on a server-side vector.

@@ -80,43 +80,43 @@

Author

Examples

-
if (FALSE) {
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Get the number of missing values on a server-side vector 
-  
-  ds.numNA(x = "D$LAB_TSC",
-           datasources = connections)
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-
-}
-
+    
if (FALSE) { # \dontrun{
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Get the number of missing values on a server-side vector 
+  
+  ds.numNA(x = "D$LAB_TSC",
+           datasources = connections)
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+
+} # }
+
 
@@ -127,19 +127,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.qlspline.html b/docs/reference/ds.qlspline.html index b34a86551..b5039387a 100644 --- a/docs/reference/ds.qlspline.html +++ b/docs/reference/ds.qlspline.html @@ -1,12 +1,12 @@ -Basis for a piecewise linear spline with meaningful coefficients — ds.qlspline • dsBaseClientBasis for a piecewise linear spline with meaningful coefficients — ds.qlspline • dsBaseClient - +
@@ -32,13 +32,13 @@
- +
@@ -50,57 +50,57 @@

Basis for a piecewise linear spline with meaningful coefficients

-
ds.qlspline(
-  x,
-  q,
-  na.rm = TRUE,
-  marginal = FALSE,
-  names = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.qlspline(
+  x,
+  q,
+  na.rm = TRUE,
+  marginal = FALSE,
+  names = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

the name of the input numeric variable

-
q
+
q

numeric, a single scalar greater or equal to 2 for a number of equal-frequency intervals along x or a vector of numbers in (0; 1) specifying the quantiles explicitly.

-
na.rm
+
na.rm

logical, whether NA should be removed when calculating quantiles, passed to na.rm of quantile. Default set to TRUE

-
marginal
-

logical, how to parametrize the spline, see Details

+
marginal
+

logical, how to parametrise the spline, see Details

-
names
+
names

character, vector of names for constructed variables

-
newobj
-

a character string that provides the name for the output +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default qlspline.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

an object of class "lspline" and "matrix", which its name is specified by the +

an object of class "lspline" and "matrix", which its name is specified by the newobj argument (or its default name "qlspline.newobj"), is assigned on the serverside.

@@ -110,7 +110,7 @@

Details

the slope of the first segment. The consecutive coefficients correspond to the change in slope as compared to the previous segment. Function qlspline wraps lspline and calculates the knot positions to be at quantiles -of x. If q is a numerical scalar greater or equal to 2, the quantiles are computed at +of x. If q is a numerical scalar greater or equal to 2, the quantiles are computed at seq(0, 1, length.out = q + 1)[-c(1, q+1)], i.e. knots are at q-tiles of the distribution of x. Alternatively, q can be a vector of values in [0; 1] specifying the quantile probabilities directly (the vector is passed to argument probs of quantile).

@@ -128,19 +128,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.quantileMean.html b/docs/reference/ds.quantileMean.html index e2edef25c..7f935afb7 100644 --- a/docs/reference/ds.quantileMean.html +++ b/docs/reference/ds.quantileMean.html @@ -1,10 +1,10 @@ -Computes the quantiles of a server-side variable — ds.quantileMean • dsBaseClientComputes the quantiles of a server-side variable — ds.quantileMean • dsBaseClient - +
@@ -30,48 +30,48 @@
- +
-

This function calculates the mean and quantile values of a +

This function calculates the mean and quantile values of a server-side quantitative variable.

-
ds.quantileMean(x = NULL, type = "combine", datasources = NULL)
+
ds.quantileMean(x = NULL, type = "combine", datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of the numeric vector.

-
type
+
type

a character that represents the type of graph to display. This can be set as 'combine' or 'split'. For more information see Details.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.quantileMean returns to the client-side the quantiles and statistical mean +

ds.quantileMean returns to the client-side the quantiles and statistical mean of a server-side numeric vector.

@@ -96,46 +96,46 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Get the quantiles and mean of a server-side variable
-  
-  ds.quantileMean(x = "D$LAB_TRIG",
-                  type = "combine",
-                  datasources = connections)
-  
-  
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Get the quantiles and mean of a server-side variable
+  
+  ds.quantileMean(x = "D$LAB_TRIG",
+                  type = "combine",
+                  datasources = connections)
+  
+  
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+
+
+} # }
+
 
@@ -146,19 +146,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.rBinom.html b/docs/reference/ds.rBinom.html index f27111f91..ea94d67c0 100644 --- a/docs/reference/ds.rBinom.html +++ b/docs/reference/ds.rBinom.html @@ -1,10 +1,10 @@ -Generates Binomial distribution in the server-side — ds.rBinom • dsBaseClientGenerates Binomial distribution in the server-side — ds.rBinom • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,90 +46,90 @@

Generates Binomial distribution in the server-side

-
ds.rBinom(
-  samp.size = 1,
-  size = 0,
-  prob = 1,
-  newobj = NULL,
-  seed.as.integer = NULL,
-  return.full.seed.as.set = FALSE,
-  datasources = NULL
-)
+
ds.rBinom(
+  samp.size = 1,
+  size = 0,
+  prob = 1,
+  newobj = NULL,
+  seed.as.integer = NULL,
+  return.full.seed.as.set = FALSE,
+  datasources = NULL
+)

Arguments

-
samp.size
-

an integer value or an integer vector that defines the length of + + +

samp.size
+

an integer value or an integer vector that defines the length of the random numeric vector to be created in each source.

-
size
+
size

a positive integer that specifies the number of Bernoulli trials.

-
prob
+
prob

a numeric scalar value or vector in range 0 > prob > 1 which specifies the probability of a positive response (i.e. 1 rather than 0).

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default rbinom.newobj.

-
seed.as.integer
-

an integer or a NULL value which provides the +

seed.as.integer
+

an integer or a NULL value which provides the random seed in each data source.

-
return.full.seed.as.set
-

logical, if TRUE will return the full random number seed -in each data source (a numeric vector of length 626). If FALSE it will only return the +

return.full.seed.as.set
+

logical, if TRUE will return the full random number seed +in each data source (a numeric vector of length 626). If FALSE it will only return the trigger seed value you have provided. Default is FALSE.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.rBinom returns random number vectors -with a Binomial distribution for each study, +

ds.rBinom returns random number vectors +with a Binomial distribution for each study, taking into account the values specified in each parameter of the function. -The output vector is written to the server-side. -If requested, it also returned to the client-side the full 626 lengths -random seed vector generated in each source +The output vector is written to the server-side. +If requested, it also returned to the client-side the full 626 lengths +random seed vector generated in each source (see info for the argument return.full.seed.as.set).

Details

-

Creates a vector of random or pseudorandom non-negative integer values -distributed with a Binomial distribution. The ds.rBinom function's arguments specify -the number of trials, the success probability, the length and the seed of the output +

Creates a vector of random or pseudorandom non-negative integer values +distributed with a Binomial distribution. The ds.rBinom function's arguments specify +the number of trials, the success probability, the length and the seed of the output vector in each source.

-

To specify a different size in each source, you can use a character vector -(..., size="vector.of.sizes"...) +

To specify a different size in each source, you can use a character vector +(..., size="vector.of.sizes"...) or the datasources parameter to create the random vector for one source at a time, changing size as required. The default value for size = 1 which simulates binary outcomes (all observations 0 or 1).

-

To specify different prob in each source, you can use an integer or character vector -(..., prob="vector.of.probs"...) or the datasources parameter to create the random +

To specify different prob in each source, you can use an integer or character vector +(..., prob="vector.of.probs"...) or the datasources parameter to create the random vector for one source at a time, changing prob as required.

-

If seed.as.integer is an integer -e.g. 5 and there is more than one source (N) the seed is set as 5*N. -For example, in the first study the seed is set as 938*1, -in the second as 938*2 +

If seed.as.integer is an integer +e.g. 5 and there is more than one source (N) the seed is set as 5*N. +For example, in the first study the seed is set as 938*1, +in the second as 938*2 up to 938*N in the Nth study.

If seed.as.integer is set as 0 all sources will start with the seed value -0 and all the random number generators will, therefore, start from the same position. +0 and all the random number generators will, therefore, start from the same position. Besides, to use the same starting seed in all studies but do not wish it to -be 0, you can use datasources argument to generate the random number +be 0, you can use datasources argument to generate the random number vectors one source at a time.

Server functions called: rBinomDS and setSeedDS.

@@ -140,54 +140,54 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  #Generating the vectors in the Opal servers
-  ds.rBinom(samp.size=c(13,20,25), #the length of the vector created in each source is different
-  size=as.character(c(10,23,5)),   #Bernoulli trials change in each source 
-  prob=c(0.6,0.1,0.5), #Probability  changes in each source 
-  newobj="Binom.dist", 
-  seed.as.integer=45, 
-  return.full.seed.as.set=FALSE,
-  datasources=connections)   #all the Opal servers are used, in this case 3 
-                             #(see above the connection to the servers) 
-
-  ds.rBinom(samp.size=15,    
-            size=4,          
-            prob=0.7, 
-            newobj="Binom.dist", 
-            seed.as.integer=324, 
-            return.full.seed.as.set=FALSE, 
-            datasources=connections[2]) #only the second  Opal server is used ("study2")
-            
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  #Generating the vectors in the Opal servers
+  ds.rBinom(samp.size=c(13,20,25), #the length of the vector created in each source is different
+  size=as.character(c(10,23,5)),   #Bernoulli trials change in each source 
+  prob=c(0.6,0.1,0.5), #Probability  changes in each source 
+  newobj="Binom.dist", 
+  seed.as.integer=45, 
+  return.full.seed.as.set=FALSE,
+  datasources=connections)   #all the Opal servers are used, in this case 3 
+                             #(see above the connection to the servers) 
+
+  ds.rBinom(samp.size=15,    
+            size=4,          
+            prob=0.7, 
+            newobj="Binom.dist", 
+            seed.as.integer=324, 
+            return.full.seed.as.set=FALSE, 
+            datasources=connections[2]) #only the second  Opal server is used ("study2")
+            
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
 
@@ -198,19 +198,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.rNorm.html b/docs/reference/ds.rNorm.html index 9a9707e23..bdddcf80f 100644 --- a/docs/reference/ds.rNorm.html +++ b/docs/reference/ds.rNorm.html @@ -1,10 +1,10 @@ -Generates Normal distribution in the server-side — ds.rNorm • dsBaseClientGenerates Normal distribution in the server-side — ds.rNorm • dsBaseClient - +
@@ -30,117 +30,117 @@
- +
-

Generates normally distributed random (pseudorandom) scalar numbers. +

Generates normally distributed random (pseudorandom) scalar numbers. Besides, ds.rNorm allows creating different vector lengths in each server.

-
ds.rNorm(
-  samp.size = 1,
-  mean = 0,
-  sd = 1,
-  newobj = "newObject",
-  seed.as.integer = NULL,
-  return.full.seed.as.set = FALSE,
-  force.output.to.k.decimal.places = 9,
-  datasources = NULL
-)
+
ds.rNorm(
+  samp.size = 1,
+  mean = 0,
+  sd = 1,
+  newobj = "newObject",
+  seed.as.integer = NULL,
+  return.full.seed.as.set = FALSE,
+  force.output.to.k.decimal.places = 9,
+  datasources = NULL
+)

Arguments

-
samp.size
+ + +
samp.size

an integer value or an integer vector that defines the length of the random numeric vector to be created in each source.

-
mean
+
mean

the mean value or vector of the Normal distribution to be created.

-
sd
+
sd

the standard deviation of the Normal distribution to be created.

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default newObject.

-
seed.as.integer
-

an integer +

seed.as.integer
+

an integer or a NULL value which provides the random seed in each data source.

-
return.full.seed.as.set
-

logical, if TRUE will returns the full random number -seed in each data source (a numeric vector of length 626). -If FALSE it will only return the trigger seed value you have provided. +

return.full.seed.as.set
+

logical, if TRUE will returns the full random number +seed in each data source (a numeric vector of length 626). +If FALSE it will only return the trigger seed value you have provided. Default is FALSE.

-
force.output.to.k.decimal.places
-

an integer vector that +

force.output.to.k.decimal.places
+

an integer vector that forces the output random numbers vector to have k decimals.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.rNorm returns random number vectors with a normal distribution for each -study, taking into account the values specified in each parameter of the function. +

ds.rNorm returns random number vectors with a normal distribution for each +study, taking into account the values specified in each parameter of the function. The output vector is written to the server-side. If requested, it also returned to the client-side the full 626 lengths random seed vector generated in each source (see info for the argument return.full.seed.as.set).

Details

-

Creates a vector of pseudorandom numbers distributed -with a Normal distribution in each data source. -The ds.rNorm function's arguments specify the mean and the standard deviation -(sd) of the normal distribution and -the length and the seed of the output vector in each source.

+

Creates a vector of pseudorandom numbers distributed +with a Normal distribution in each data source. +The ds.rNorm function's arguments specify the mean and the standard deviation +(sd) of the normal distribution and +the length and the seed of the output vector in each source.

To specify a different mean value in each source, you can use a character vector (..., mean="vector.of.means"...) -or the datasources parameter to create the random vector for one source at a time, +or the datasources parameter to create the random vector for one source at a time, changing the mean as required. -Default value for mean = 0.

-

To specify different sd value in each source, +Default value for mean = 0.

+

To specify different sd value in each source, you can use a character vector (..., sd="vector.of.sds"... -or the datasources parameter to create the random vector for one source at a time, +or the datasources parameter to create the random vector for one source at a time, changing the <mean> as required. -Default value for sd = 0.

-

If seed.as.integer is an integer -e.g. 5 and there is more than one source (N) the seed is set as 5*N. -For example, in the first study the seed is set as 938*1, -in the second as 938*2 +Default value for sd = 0.

+

If seed.as.integer is an integer +e.g. 5 and there is more than one source (N) the seed is set as 5*N. +For example, in the first study the seed is set as 938*1, +in the second as 938*2 up to 938*N in the Nth study.

If seed.as.integer is set as 0 all sources will start with the seed value -0 and all the random number generators will, therefore, start from the same position. +0 and all the random number generators will, therefore, start from the same position. Also, to use the same starting seed in all studies but do not wish it to -be 0, you can use datasources argument to generate the random number +be 0, you can use datasources argument to generate the random number vectors one source at a time.

-

In force.output.to.k.decimal.places the range of k is 1-8 decimals. -If k = 0 the output random numbers are forced to integer. -If k = 9, no rounding of output numbers occurs. +

In force.output.to.k.decimal.places the range of k is 1-8 decimals. +If k = 0 the output random numbers are forced to integer. +If k = 9, no rounding of output numbers occurs. The default value of force.output.to.k.decimal.places = 9.

Server functions called: rNormDS and setSeedDS.

@@ -151,58 +151,58 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  # Generating the vectors in the Opal servers
-
-  ds.rNorm(samp.size=c(10,20,45), #the length of the vector created in each source is different 
-           mean=c(1,6,4),         #the mean of the Normal distribution changes in each server
-           sd=as.character(c(1,4,3)), #the sd of the Normal distribution changes in each server
-           newobj="Norm.dist",
-           seed.as.integer=2345, 
-           return.full.seed.as.set=FALSE,
-           force.output.to.k.decimal.places=c(4,5,6), #output random numbers have different 
-                                                      #decimal quantity in each source 
-           datasources=connections) #all the Opal servers are used, in this case 3 
-                                    #(see above the connection to the servers) 
-  
-  ds.rNorm(samp.size=10,
-           mean=1.4,
-           sd=0.2, 
-           newobj="Norm.dist", 
-           seed.as.integer=2345,
-           return.full.seed.as.set=FALSE,
-           force.output.to.k.decimal.places=1,
-           datasources=connections[2]) #only the second  Opal server is used ("study2")
-           
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  # Generating the vectors in the Opal servers
+
+  ds.rNorm(samp.size=c(10,20,45), #the length of the vector created in each source is different 
+           mean=c(1,6,4),         #the mean of the Normal distribution changes in each server
+           sd=as.character(c(1,4,3)), #the sd of the Normal distribution changes in each server
+           newobj="Norm.dist",
+           seed.as.integer=2345, 
+           return.full.seed.as.set=FALSE,
+           force.output.to.k.decimal.places=c(4,5,6), #output random numbers have different 
+                                                      #decimal quantity in each source 
+           datasources=connections) #all the Opal servers are used, in this case 3 
+                                    #(see above the connection to the servers) 
+  
+  ds.rNorm(samp.size=10,
+           mean=1.4,
+           sd=0.2, 
+           newobj="Norm.dist", 
+           seed.as.integer=2345,
+           return.full.seed.as.set=FALSE,
+           force.output.to.k.decimal.places=1,
+           datasources=connections[2]) #only the second  Opal server is used ("study2")
+           
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
 
@@ -213,19 +213,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.rPois.html b/docs/reference/ds.rPois.html index dc2160f4e..cd46f0a0f 100644 --- a/docs/reference/ds.rPois.html +++ b/docs/reference/ds.rPois.html @@ -1,11 +1,11 @@ -Generates Poisson distribution in the server-side — ds.rPois • dsBaseClientGenerates Poisson distribution in the server-side — ds.rPois • dsBaseClient - +
@@ -31,98 +31,98 @@
- +

Generates random (pseudorandom) non-negative integers -with a Poisson distribution. +with a Poisson distribution. Besides, ds.rPois allows creating different vector lengths in each server.

-
ds.rPois(
-  samp.size = 1,
-  lambda = 1,
-  newobj = "newObject",
-  seed.as.integer = NULL,
-  return.full.seed.as.set = FALSE,
-  datasources = NULL
-)
+
ds.rPois(
+  samp.size = 1,
+  lambda = 1,
+  newobj = "newObject",
+  seed.as.integer = NULL,
+  return.full.seed.as.set = FALSE,
+  datasources = NULL
+)

Arguments

-
samp.size
+ + +
samp.size

an integer value or an integer vector that defines the length of the random numeric vector to be created in each source.

-
lambda
+
lambda

the number of events mean per interval.

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default newObject.

-
seed.as.integer
+
seed.as.integer

an integer or a NULL value which provides the random seed in each data source.

-
return.full.seed.as.set
+
return.full.seed.as.set

logical, if TRUE will return the full random number seed in each data source (a numeric vector of length 626). If -FALSE it will only return the trigger seed value you have provided. +FALSE it will only return the trigger seed value you have provided. Default is FALSE.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.rPois returns random number vectors with a Poisson distribution for each study, -taking into account the values specified in each parameter of the function. -The created vectors are stored in the server-side. +

ds.rPois returns random number vectors with a Poisson distribution for each study, +taking into account the values specified in each parameter of the function. +The created vectors are stored in the server-side. If requested, it also returned to the client-side the full -626 lengths random seed vector generated in each source +626 lengths random seed vector generated in each source (see info for the argument return.full.seed.as.set).

Details

-

Creates a vector of random or pseudorandom non-negative integer values -distributed with a Poisson distribution in each data source. -The ds.rPois function's arguments specify lambda, +

Creates a vector of random or pseudorandom non-negative integer values +distributed with a Poisson distribution in each data source. +The ds.rPois function's arguments specify lambda, the length and the seed of the output vector in each source.

-

To specify different lambda value in each source, you can use a character vector +

To specify different lambda value in each source, you can use a character vector (..., lambda = "vector.of.lambdas"...) or the datasources -parameter to create the random vector for one source at a time, +parameter to create the random vector for one source at a time, changing lambda as required. Default value for lambda> = 1.

-

If seed.as.integer is an integer -e.g. 5 and there is more than one source (N) the seed is set as 5*N. -For example, in the first study the seed is set as 938*1, -in the second as 938*2 +

If seed.as.integer is an integer +e.g. 5 and there is more than one source (N) the seed is set as 5*N. +For example, in the first study the seed is set as 938*1, +in the second as 938*2 up to 938*N in the Nth study.

If seed.as.integer is set as 0 all sources will start with the seed value -0 and all the random number generators will, therefore, start from the same position. +0 and all the random number generators will, therefore, start from the same position. Also, to use the same starting seed in all studies but do not wish it to -be 0, you can use datasources argument to generate the random number +be 0, you can use datasources argument to generate the random number vectors one source at a time.

Server functions called: rPoisDS and setSeedDS.

@@ -133,51 +133,51 @@

Author

Examples

-

-if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki 
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  # Generating the vectors in the Opal servers
-  ds.rPois(samp.size=c(13,20,25), #the length of the vector created in each source is different
-          lambda=as.character(c(2,3,4)), #different mean per interval (2,3,4) in each source
-          newobj="Pois.dist",                   
-          seed.as.integer=1234,         
-          return.full.seed.as.set=FALSE, 
-          datasources=connections)  #all the Opal servers are used, in this case 3 
-                                    #(see above the connection to the servers) 
-  ds.rPois(samp.size=13,                
-          lambda=5,
-          newobj="Pois.dist", 
-          seed.as.integer=1234, 
-          return.full.seed.as.set=FALSE, 
-          datasources=connections[1])  #only the first Opal server is used ("study1")
-        
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
+    

+if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki 
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  # Generating the vectors in the Opal servers
+  ds.rPois(samp.size=c(13,20,25), #the length of the vector created in each source is different
+          lambda=as.character(c(2,3,4)), #different mean per interval (2,3,4) in each source
+          newobj="Pois.dist",                   
+          seed.as.integer=1234,         
+          return.full.seed.as.set=FALSE, 
+          datasources=connections)  #all the Opal servers are used, in this case 3 
+                                    #(see above the connection to the servers) 
+  ds.rPois(samp.size=13,                
+          lambda=5,
+          newobj="Pois.dist", 
+          seed.as.integer=1234, 
+          return.full.seed.as.set=FALSE, 
+          datasources=connections[1])  #only the first Opal server is used ("study1")
+        
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
 
@@ -188,19 +188,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.rUnif.html b/docs/reference/ds.rUnif.html index e56d93198..5823f42ff 100644 --- a/docs/reference/ds.rUnif.html +++ b/docs/reference/ds.rUnif.html @@ -1,10 +1,10 @@ -Generates Uniform distribution in the server-side — ds.rUnif • dsBaseClientGenerates Uniform distribution in the server-side — ds.rUnif • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,106 +46,106 @@

Generates Uniform distribution in the server-side

-
ds.rUnif(
-  samp.size = 1,
-  min = 0,
-  max = 1,
-  newobj = "newObject",
-  seed.as.integer = NULL,
-  return.full.seed.as.set = FALSE,
-  force.output.to.k.decimal.places = 9,
-  datasources = NULL
-)
+
ds.rUnif(
+  samp.size = 1,
+  min = 0,
+  max = 1,
+  newobj = "newObject",
+  seed.as.integer = NULL,
+  return.full.seed.as.set = FALSE,
+  force.output.to.k.decimal.places = 9,
+  datasources = NULL
+)

Arguments

-
samp.size
-

an integer value or an integer vector that defines the + + +

samp.size
+

an integer value or an integer vector that defines the length of the random numeric vector to be created in each source.

-
min
-

a numeric scalar that specifies the minimum value of the +

min
+

a numeric scalar that specifies the minimum value of the random numbers in the distribution.

-
max
-

a numeric scalar that specifies the maximum value of the +

max
+

a numeric scalar that specifies the maximum value of the random numbers in the distribution.

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default newObject.

-
seed.as.integer
-

an integer or a NULL value which provides the random +

seed.as.integer
+

an integer or a NULL value which provides the random seed in each data source.

-
return.full.seed.as.set
-

logical, if TRUE will return the full random number -seed in each data source (a numeric vector of length 626). If FALSE it will only +

return.full.seed.as.set
+

logical, if TRUE will return the full random number +seed in each data source (a numeric vector of length 626). If FALSE it will only return the trigger seed value you have provided. Default is FALSE.

-
force.output.to.k.decimal.places
-

an integer or -an integer vector that forces the output random +

force.output.to.k.decimal.places
+

an integer or +an integer vector that forces the output random numbers vector to have k decimals.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.Unif returns random number vectors with a uniform distribution for each study, +

ds.Unif returns random number vectors with a uniform distribution for each study, taking into account the values specified in each parameter of the function. -The created vectors are stored in the server-side. If requested, it also returned to the +The created vectors are stored in the server-side. If requested, it also returned to the client-side the full 626 lengths random seed vector generated in each source (see info for the argument return.full.seed.as.set).

Details

-

It creates a vector of pseudorandom numbers distributed -with a uniform probability in each data source. -The ds.Unif function's arguments specify -the minimum and maximum of the uniform distribution +

It creates a vector of pseudorandom numbers distributed +with a uniform probability in each data source. +The ds.Unif function's arguments specify +the minimum and maximum of the uniform distribution and the length and the seed of the output vector in each source.

-

To specify different min values in each source, +

To specify different min values in each source, you can use a character vector (..., min="vector.of.mins"...) -or the datasources parameter to create the random vector for one source at a time, +or the datasources parameter to create the random vector for one source at a time, changing the min value as required. Default value for min = 0.

-

To specify different max values in each source, +

To specify different max values in each source, you can use a character vector (..., max="vector.of.maxs"...) -or the datasources parameter to create the random vector for one source at a time, +or the datasources parameter to create the random vector for one source at a time, changing the max value as required. Default value for max = 1.

-

If seed.as.integer is an integer -e.g. 5 and there is more than one source (N) the seed is set as 5*N. -For example, in the first study the seed is set as 938*1, -in the second as 938*2 +

If seed.as.integer is an integer +e.g. 5 and there is more than one source (N) the seed is set as 5*N. +For example, in the first study the seed is set as 938*1, +in the second as 938*2 up to 938*N in the Nth study.

If seed.as.integer is set as 0 all sources will start with the seed value -0 and all the random number generators will, therefore, start from the same position. +0 and all the random number generators will, therefore, start from the same position. Also, to use the same starting seed in all studies but do not wish it to -be 0, you can use datasources argument to generate +be 0, you can use datasources argument to generate the random number vectors one source at a time.

-

In force.output.to.k.decimal.places the range of k is 1-8 decimals. -If k = 0 the output random numbers are forced to an integer. -If k = 9, no rounding of output numbers occurs. +

In force.output.to.k.decimal.places the range of k is 1-8 decimals. +If k = 0 the output random numbers are forced to an integer. +If k = 9, no rounding of output numbers occurs. The default value of force.output.to.k.decimal.places = 9. If you wish to generate integers with equal probabilities in the range 1-10 -you should specify min = 0.5 and max = 10.5. +you should specify min = 0.5 and max = 10.5. Default value for k = 9.

Server functions called: rUnifDS and setSeedDS.

@@ -156,59 +156,59 @@

Author

Examples

-

-if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  # Generating the vectors in the Opal servers
-
-  ds.rUnif(samp.size = c(12,20,4), #the length of the vector created in each source is different 
-           min = as.character(c(0,2,5)), #different minumum value of the function in each source
-           max = as.character(c(2,5,9)), #different maximum value of the function in each source
-           newobj = "Unif.dist",
-           seed.as.integer = 234,
-           return.full.seed.as.set = FALSE,
-           force.output.to.k.decimal.places = c(1,2,3),
-           datasources = connections)   #all the Opal servers are used, in this case 3 
-                                        #(see above the connection to the servers) 
-
-  ds.rUnif(samp.size = 12,
-           min = 0,
-           max = 2,
-           newobj = "Unif.dist",
-           seed.as.integer = 12345,
-           return.full.seed.as.set = FALSE,
-           force.output.to.k.decimal.places = 2,
-           datasources = connections[2]) #only the second  Opal server is used ("study2")
-           
-  # Clear the Datashield R sessions and logout           
-  datashield.logout(connections)
-}
- 
+    

+if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  # Generating the vectors in the Opal servers
+
+  ds.rUnif(samp.size = c(12,20,4), #the length of the vector created in each source is different 
+           min = as.character(c(0,2,5)), #different minumum value of the function in each source
+           max = as.character(c(2,5,9)), #different maximum value of the function in each source
+           newobj = "Unif.dist",
+           seed.as.integer = 234,
+           return.full.seed.as.set = FALSE,
+           force.output.to.k.decimal.places = c(1,2,3),
+           datasources = connections)   #all the Opal servers are used, in this case 3 
+                                        #(see above the connection to the servers) 
+
+  ds.rUnif(samp.size = 12,
+           min = 0,
+           max = 2,
+           newobj = "Unif.dist",
+           seed.as.integer = 12345,
+           return.full.seed.as.set = FALSE,
+           force.output.to.k.decimal.places = 2,
+           datasources = connections[2]) #only the second  Opal server is used ("study2")
+           
+  # Clear the Datashield R sessions and logout           
+  datashield.logout(connections)
+} # }
+ 
 
@@ -219,19 +219,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.ranksSecure.html b/docs/reference/ds.ranksSecure.html index bed6feb43..963c254e4 100644 --- a/docs/reference/ds.ranksSecure.html +++ b/docs/reference/ds.ranksSecure.html @@ -1,10 +1,10 @@ -Secure ranking of a vector across all sources — ds.ranksSecure • dsBaseClientSecure ranking of a vector across all sources — ds.ranksSecure • dsBaseClient - +
@@ -30,59 +30,61 @@
- +

Securely generate the ranks of a numeric vector and estimate -true qlobal quantiles across all data sources simultaneously

+true global quantiles across all data sources simultaneously

-
ds.ranksSecure(
-  input.var.name = NULL,
-  quantiles.for.estimation = "0.05-0.95",
-  generate.quantiles = TRUE,
-  output.ranks.df = NULL,
-  summary.output.ranks.df = NULL,
-  ranks.sort.by = "ID.orig",
-  shared.seed.value = 10,
-  synth.real.ratio = 2,
-  NA.manage = "NA.delete",
-  rm.residual.objects = TRUE,
-  monitor.progress = FALSE,
-  datasources = NULL
-)
+
ds.ranksSecure(
+  input.var.name = NULL,
+  quantiles.for.estimation = "0.05-0.95",
+  generate.quantiles = TRUE,
+  output.ranks.df = NULL,
+  summary.output.ranks.df = NULL,
+  ranks.sort.by = "ID.orig",
+  shared.seed.value = 10,
+  synth.real.ratio = 2,
+  NA.manage = "NA.delete",
+  rm.residual.objects = TRUE,
+  monitor.progress = FALSE,
+  datasources = NULL
+)

Arguments

-
input.var.name
+ + +
input.var.name

a character string in a format that can pass through the DataSHIELD R parser which specifies the name of the vector to be ranked. Needs to have same name in each data source.

-
quantiles.for.estimation
+
quantiles.for.estimation

one of a restricted set of character strings. To mitigate disclosure risk only the following set of quantiles can be generated: c(0.025,0.05,0.10,0.20,0.25,0.30,0.3333,0.40,0.50,0.60,0.6667, 0.70,0.75,0.80,0.90,0.95,0.975). The allowable formats for the argument are of the general form: "0.025-0.975" where the first number is the lowest -quantile to be estimated and the second number is the equivalent highest +quantile to be estimated and the second number is the equivalent highest quantile to estimate. These two quantiles are then estimated along with all allowable quantiles in between. The allowable argument values are then: "0.025-0.975", "0.05-0.95", "0.10-0.90", "0.20-0.80". Two alternative values are "quartiles" i.e. c(0.25,0.50,0.75), and "median" i.e. c(0.50). The default value is "0.05-0.95". If the sample size is so small that an extreme -quartile could be disclosive the function will be terminated and an error +quartile could be disclosive the function will be terminated and an error message returned telling you that you might try using an argument with a narrower set of quantiles. This disclosure trap will be triggered if the total number of subjects across all studies divided by the total number @@ -90,7 +92,7 @@

Arguments

(the minimum cell size in a contingency table).

-
generate.quantiles
+
generate.quantiles

a logical value indicating whether the ds.ranksSecure function should carry on to estimate the key quantile values specified by argument <quantiles.for.estimation> or should stop @@ -100,7 +102,7 @@

Arguments

of the clusters of values that are being ranked such that some values are treated as being missing and the processing stops, then setting generate.quantiles to FALSE allows the generation of ranks to complete so -they can then be used for non-parameteric analysis, even if the key values +they can then be used for non-parametric analysis, even if the key values cannot be estimated. A real example of an unusual configuration was in a reasonably large dataset of survival times, where a substantial proportion of survival profiles were censored at precisely 10 years. This meant that @@ -114,7 +116,7 @@

Arguments

from "0.025-0.975" to "0.05-0.95".

-
output.ranks.df
+
output.ranks.df

a character string in a format that can pass through the DataSHIELD R parser which specifies an optional name for the data.frame written to the serverside on each data source that contains @@ -126,7 +128,7 @@

Arguments

information and cannot therefore be passed to the clientside.

-
summary.output.ranks.df
+
summary.output.ranks.df

a character string in a format that can pass through the DataSHIELD R parser which specifies an optional name for the summary data.frame written to the serverside on each data source that contains @@ -138,24 +140,24 @@

Arguments

information and cannot therefore be passed to the clientside.

-
ranks.sort.by
+
ranks.sort.by

a character string taking two possible values. These are "ID.orig" and "vals.orig". These define the order in which the output.ranks.df and summary.output.ranks.df data frames are presented. If the argument is set as "ID.orig" the order of rows in the output data frames are precisely the same as the order of original input vector that is being ranked (i.e. V2BR). This means the ranks can simply be cbinded to the -matrix, data frame or tibble that originally included V2BR so it also +matrix, data frame or tibble that originally included V2BR so it also includes the corresponding ranks. If it is set as "vals.orig" the output data frames are in order of increasing magnitude of the original values of V2BR. Default value is "ID.orig".

-
shared.seed.value
+
shared.seed.value

an integer value which is used to set the random seed generator in each study. Initially, the seed is set to be the same in all studies, so the order and parameters of the repeated -encryption procedures are precisely the same in each study. Then a +encryption procedures are precisely the same in each study. Then a study-specific modification of the seed in each study ensures that the procedures initially generating the masking pseudodata (which are then subject to the same encryption procedures as the real data) are different @@ -164,17 +166,17 @@

Arguments

header document.

-
synth.real.ratio
+
synth.real.ratio

an integer value specifying the ratio between the number of masking pseudodata values generated in each study compared to the number of real data values in V2BR.

-
NA.manage
+
NA.manage

character string taking three possible values: "NA.delete", "NA.low","NA.hi". This argument determines how missing values are managed before ranking. "NA.delete" results in all missing values being removed -prior to ranking. This means that the vector of ranks in each study is +prior to ranking. This means that the vector of ranks in each study is shorter than the original vector of V2BR values by an amount corresponding to the number of missing values in V2BR in that study. Any rows containing missing values in V2BR are simply removed before the ranking procedure is @@ -185,7 +187,7 @@

Arguments

for example, that if there are a total of M values of V2BR that are missing across all studies, there will be a total of M observations that are ranked lowest each with a rank of (M+1)/2. So if 7 are missing the lowest 7 ranks -will be 4,4,4,4,4,4,4 and if 4 are missing the first 4 ranks will be +will be 4,4,4,4,4,4,4 and if 4 are missing the first 4 ranks will be 2.5,2.5,2.5,2.5. "NA.hi" indicates that all missing values should be converted to a new value that has a meaningful magnitude that is higher(less negative or more positive)than the highest non-missing value of V2BR in any @@ -199,7 +201,7 @@

Arguments

2.5,2.5,2.5,2.5. The default value of the "NA.manage" argument is "NA.delete"

-
rm.residual.objects
+
rm.residual.objects

logical value. Default = TRUE: at the beginning and end of each run of ds.ranksSecure delete all extraneous objects that are otherwise left behind. These are not usually needed, but could be of value @@ -207,12 +209,12 @@

Arguments

the residual objects

-
monitor.progress
+
monitor.progress

logical value. Default = FALSE. If TRUE, function outputs information about its progress.

-
datasources
+
datasources

specifies the particular opal object(s) to use. If the <datasources> argument is not specified (NULL) the default set of opals will be used. If <datasources> is specified, it should be set without @@ -225,9 +227,7 @@

Arguments

Value

- - -

the data frame objects specified by the arguments output.ranks.df +

the data frame objects specified by the arguments output.ranks.df and summary.output.ranks.df. These are written to the serverside in each study. Provided the sort order is consistent these data frames can be cbinded to any other data frame, matrix or tibble object containing V2BR or to the @@ -262,7 +262,7 @@

Details

global quantiles are, in general, different to the mean or median of the equivalent quantiles calculated independently in each data source separately. For more details about the cluster of functions that collectively -enable secure global ranking and estimation of global quantiles see the +enable secure global ranking and estimation of global quantiles see the associated document entitled "secure.global.ranking.docx".

@@ -278,19 +278,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.rbind.html b/docs/reference/ds.rbind.html index e3f873db7..6a117c6da 100644 --- a/docs/reference/ds.rbind.html +++ b/docs/reference/ds.rbind.html @@ -1,10 +1,10 @@ -Combines R objects by rows in the server-side — ds.rbind • dsBaseClientCombines R objects by rows in the server-side — ds.rbind • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,69 +46,67 @@

Combines R objects by rows in the server-side

-
ds.rbind(
-  x = NULL,
-  DataSHIELD.checks = FALSE,
-  force.colnames = NULL,
-  newobj = NULL,
-  datasources = NULL,
-  notify.of.progress = FALSE
-)
+
ds.rbind(
+  x = NULL,
+  DataSHIELD.checks = FALSE,
+  force.colnames = NULL,
+  newobj = NULL,
+  datasources = NULL,
+  notify.of.progress = FALSE
+)

Arguments

-
x
+ + +
x

a character vector with the name of the objects to be combined.

-
DataSHIELD.checks
+
DataSHIELD.checks

logical, if TRUE checks that all input objects exist and are of an appropriate class.

-
force.colnames
-

can be NULL or a vector of characters that +

force.colnames
+

can be NULL or a vector of characters that specifies column names of the output object.

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Defaults rbind.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

-
notify.of.progress
+
notify.of.progress

specifies if console output should be produced to indicate progress. Default FALSE.

Value

- - -

ds.rbind returns a matrix combining the rows of the +

ds.rbind returns a matrix combining the rows of the R objects specified in the function -which is written to the server-side. -It also returns two messages to the client-side with the name of newobj

- - -

that has been created in each data source and DataSHIELD.checks result.

+which is written to the server-side. +It also returns two messages to the client-side with the name of newobj +that has been created in each data source and DataSHIELD.checks result.

Details

A sequence of vector, matrix or data-frame arguments is combined by rows to produce a matrix on the server-side.

-

In DataSHIELD.checks the checks are relatively slow. +

In DataSHIELD.checks the checks are relatively slow. Default DataSHIELD.checks value is FALSE.

If force.colnames is NULL column names are inferred from the names or column names of the first object specified in the x argument. -The vector of column names must have the same number of elements as +The vector of column names must have the same number of elements as the columns in the output object.

Server functions called: rbindDS.

@@ -119,50 +117,50 @@

Author

Examples

-

-if (FALSE) {
-  ## Version 6, for version 5 see the Wiki 
-  
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  #Combining R objects by rows 
-   
-                   
-  ds.rbind(x = "D", #data frames in the server-side to be conbined 
-                    #(see above the connection to the Opal servers) 
-           DataSHIELD.checks = FALSE,
-           force.colnames = NULL,
-           newobj = "D.rbind", # name for the output object that is stored in the data servers
-           datasources = connections, # All Opal servers are used 
-                                      #(see above the connection to the Opal servers)
-           notify.of.progress = FALSE)
-           
-  # Clear the Datashield R sessions and logout  
-  datashield.logout(connections) 
-  }
-
+    

+if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki 
+  
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  #Combining R objects by rows 
+   
+                   
+  ds.rbind(x = "D", #data frames in the server-side to be conbined 
+                    #(see above the connection to the Opal servers) 
+           DataSHIELD.checks = FALSE,
+           force.colnames = NULL,
+           newobj = "D.rbind", # name for the output object that is stored in the data servers
+           datasources = connections, # All Opal servers are used 
+                                      #(see above the connection to the Opal servers)
+           notify.of.progress = FALSE)
+           
+  # Clear the Datashield R sessions and logout  
+  datashield.logout(connections) 
+  } # }
+
 
@@ -173,19 +171,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.reShape.html b/docs/reference/ds.reShape.html index 0f6fc6d31..069891e61 100644 --- a/docs/reference/ds.reShape.html +++ b/docs/reference/ds.reShape.html @@ -1,10 +1,10 @@ -Reshapes server-side grouped data — ds.reShape • dsBaseClientReshapes server-side grouped data — ds.reShape • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,58 +46,60 @@

Reshapes server-side grouped data

-
ds.reShape(
-  data.name = NULL,
-  varying = NULL,
-  v.names = NULL,
-  timevar.name = "time",
-  idvar.name = "id",
-  drop = NULL,
-  direction = NULL,
-  sep = ".",
-  newobj = "newObject",
-  datasources = NULL
-)
+
ds.reShape(
+  data.name = NULL,
+  varying = NULL,
+  v.names = NULL,
+  timevar.name = "time",
+  idvar.name = "id",
+  drop = NULL,
+  direction = NULL,
+  sep = ".",
+  newobj = "newObject",
+  datasources = NULL
+)

Arguments

-
data.name
+ + +
data.name

a character string specifying the name of the data frame to be reshaped.

-
varying
+
varying

names of sets of variables in the wide format that correspond to single variables in 'long' format.

-
v.names
+
v.names

the names of variables in the 'long' format that correspond to multiple variables in the 'wide' format.

-
timevar.name
+
timevar.name

the variable in 'long' format that differentiates multiple -records from the same group or individual. +records from the same group or individual. If more than one record matches, the first will be taken.

-
idvar.name
+
idvar.name

names of one or more variables in 'long' format that identify multiple records from the same group/individual. These variables may also be present in 'wide' format.

-
drop
+
drop

a vector of names of variables to drop before reshaping. This can simplify the resultant output.

-
direction
+
direction

a character string that partially matched to either 'wide' to reshape from 'long' to 'wide' format, or 'long' to reshape from 'wide' to 'long' format.

-
sep
+
sep

a character vector of length 1, indicating a separating character in the variable names in the 'wide' format. This is used for creating good v.names and times arguments based on the names in the varying argument. This is also used to create variable names @@ -105,24 +107,22 @@

Arguments

to 'wide' format.

-
newobj
+
newobj

a character string that provides the name for the output object -that is stored on the data servers. +that is stored on the data servers. Default reshape.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.reShape returns to the server-side a reshaped data frame -converted from 'long' to 'wide' format or from 'wide' to long' format. +

ds.reShape returns to the server-side a reshaped data frame +converted from 'long' to 'wide' format or from 'wide' to long' format. Also, two validity messages are returned to the client-side indicating whether the new object has been created in each data source and if so whether it is in a valid form.

@@ -133,7 +133,7 @@

Details

It reshapes a data frame containing longitudinal or otherwise grouped data between 'wide' format with repeated measurements in separate columns of the same record and 'long' format with the repeated -measurements in separate records. The reshaping can be in either direction. +measurements in separate records. The reshaping can be in either direction. Server function called: reShapeDS

@@ -143,47 +143,47 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see Wiki
-  # Connecting to the Opal servers
-  
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-  
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Reshape server-side grouped data
-  
-  ds.reShape(data.name = "D", 
-             v.names = "age.60", 
-             timevar.name = "time.id",
-             idvar.name = "id",
-             direction = "wide",
-             newobj = "reshape1_obj",
-             datasources = connections)
-  
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see Wiki
+  # Connecting to the Opal servers
+  
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+  
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "SURVIVAL.EXPAND_NO_MISSING3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Reshape server-side grouped data
+  
+  ds.reShape(data.name = "D", 
+             v.names = "age.60", 
+             timevar.name = "time.id",
+             idvar.name = "id",
+             direction = "wide",
+             newobj = "reshape1_obj",
+             datasources = connections)
+  
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
+
 
@@ -194,19 +194,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.recodeLevels.html b/docs/reference/ds.recodeLevels.html index 324ef0a19..1272061bb 100644 --- a/docs/reference/ds.recodeLevels.html +++ b/docs/reference/ds.recodeLevels.html @@ -1,9 +1,9 @@ -Recodes the levels of a server-side factor vector — ds.recodeLevels • dsBaseClientRecodes the levels of a server-side factor vector — ds.recodeLevels • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,41 +44,41 @@

Recodes the levels of a server-side factor vector

-
ds.recodeLevels(
-  x = NULL,
-  newCategories = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.recodeLevels(
+  x = NULL,
+  newCategories = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string specifying the name of a factor variable.

-
newCategories
+
newCategories

a character vector specifying the new levels. Its length must be equal or greater to the current number of levels.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default recodelevels.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.recodeLevels returns to the server-side a variable of type factor +

ds.recodeLevels returns to the server-side a variable of type factor with the replaces levels.

@@ -95,45 +95,45 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Recode the levels of a factor variable
-  
-  ds.recodeLevels(x = "D$PM_BMI_CATEGORICAL",
-                  newCategories = c("1","2","3"),
-                  newobj = "BMI_CAT",
-                  datasources = connections)
-                 
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Recode the levels of a factor variable
+  
+  ds.recodeLevels(x = "D$PM_BMI_CATEGORICAL",
+                  newCategories = c("1","2","3"),
+                  newobj = "BMI_CAT",
+                  datasources = connections)
+                 
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
+
 
@@ -144,19 +144,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.recodeValues.html b/docs/reference/ds.recodeValues.html index 14df64fbe..ef260dde7 100644 --- a/docs/reference/ds.recodeValues.html +++ b/docs/reference/ds.recodeValues.html @@ -1,10 +1,10 @@ -Recodes server-side variable values — ds.recodeValues • dsBaseClientRecodes server-side variable values — ds.recodeValues • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,62 +46,62 @@

Recodes server-side variable values

-
ds.recodeValues(
-  var.name = NULL,
-  values2replace.vector = NULL,
-  new.values.vector = NULL,
-  missing = NULL,
-  newobj = NULL,
-  datasources = NULL,
-  notify.of.progress = FALSE
-)
+
ds.recodeValues(
+  var.name = NULL,
+  values2replace.vector = NULL,
+  new.values.vector = NULL,
+  missing = NULL,
+  newobj = NULL,
+  datasources = NULL,
+  notify.of.progress = FALSE
+)

Arguments

-
var.name
+ + +
var.name

a character string providing the name of the variable to be recoded.

-
values2replace.vector
+
values2replace.vector

a numeric or character vector specifying the values in the variable var.name to be replaced.

-
new.values.vector
+
new.values.vector

a numeric or character vector specifying the new values.

-
missing
-

If supplied, any missing values in var.name will be replaced by this value. -Must be of length 1. If the analyst want to recode only missing values then it should also -specify an identical vector of values in both arguments values2replace.vector and +

missing
+

If supplied, any missing values in var.name will be replaced by this value. +Must be of length 1. If the analyst want to recode only missing values then it should also +specify an identical vector of values in both arguments values2replace.vector and new.values.vector. Otherwise please look the ds.replaceNA function.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default recodevalues.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

-
notify.of.progress
+
notify.of.progress

logical. If TRUE console output should be produced to indicate progress. Default FALSE.

Value

- - -

Assigns to each server a new variable with the recoded values. -Also, two validity messages are returned to the client-side +

Assigns to each server a new variable with the recoded values. +Also, two validity messages are returned to the client-side indicating whether the new object has been created in each data source and if so whether it is in a valid form.

@@ -110,7 +110,7 @@

Details

This function recodes individual values with new individual values. This can apply to numeric and character values, factor levels and NAs. One particular use of ds.recodeValues is to convert NAs to an explicit value. This value is specified -in the argument missing. If tthe user want to recode only missing values, then it +in the argument missing. If the user want to recode only missing values, then it should also specify an identical vector of values in both arguments values2replace.vector and new.values.vector (see Example 2 below). Server function called: recodeValuesDS

@@ -122,7 +122,7 @@

Author

Examples

- +
- - + + diff --git a/docs/reference/ds.rep.html b/docs/reference/ds.rep.html index e1b6ea382..71db19a6d 100644 --- a/docs/reference/ds.rep.html +++ b/docs/reference/ds.rep.html @@ -1,10 +1,10 @@ -Creates a repetitive sequence in the server-side — ds.rep • dsBaseClientCreates a repetitive sequence in the server-side — ds.rep • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,81 +46,81 @@

Creates a repetitive sequence in the server-side

-
ds.rep(
-  x1 = NULL,
-  times = NA,
-  length.out = NA,
-  each = 1,
-  source.x1 = "clientside",
-  source.times = NULL,
-  source.length.out = NULL,
-  source.each = NULL,
-  x1.includes.characters = FALSE,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.rep(
+  x1 = NULL,
+  times = NA,
+  length.out = NA,
+  each = 1,
+  source.x1 = "clientside",
+  source.times = NULL,
+  source.length.out = NULL,
+  source.each = NULL,
+  x1.includes.characters = FALSE,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
x1
+ + +
x1

an scalar number, vector or list.

-
times
+
times

an integer from clientside or a serverside integer or vector.

-
length.out
+
length.out

a clientside integer or a serverside integer or vector.

-
each
+
each

a clientside or serverside integer.

-
source.x1
-

the source x1 argument. It can be "clientside" or "c" +

source.x1
+

the source x1 argument. It can be "clientside" or "c" and serverside or "s".

-
source.times
+
source.times

see source.x1

-
source.length.out
+
source.length.out

see source.x1

-
source.each
+
source.each

see source.x1

-
x1.includes.characters
-

Boolean parameter which specifies if +

x1.includes.characters
+

Boolean parameter which specifies if the x1 is a character.

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default seq.vect.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.rep returns in the server-side a vector with the specified repetitive sequence. +

ds.rep returns in the server-side a vector with the specified repetitive sequence. Also, two validity messages are returned to the client-side - the name of newobj that has been created + the name of newobj that has been created in each data source and if it is in a valid form.

@@ -137,65 +137,65 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-    require('DSI')
-    require('DSOpal')
-    require('dsBaseClient')
-
-    builder <- DSI::newDSLoginBuilder()
-    builder$append(server = "study1", 
-                   url = "http://192.168.56.100:8080/", 
-                   user = "administrator", password = "datashield_test&", 
-                   table = "CNSIM.CNSIM1", driver = "OpalDriver")
-    builder$append(server = "study2", 
-                   url = "http://192.168.56.100:8080/", 
-                   user = "administrator", password = "datashield_test&", 
-                   table = "CNSIM.CNSIM2", driver = "OpalDriver")
-    builder$append(server = "study3",
-                   url = "http://192.168.56.100:8080/", 
-                   user = "administrator", password = "datashield_test&", 
-                   table = "CNSIM.CNSIM3", driver = "OpalDriver")
-    logindata <- builder$build()
-
-  # Log onto the remote Opal training servers
-    connections <- DSI::datashield.login(logins = logindata, 
-                                         assign = TRUE, 
-                                         symbol = "D") 
-
-  # Creating a repetitive sequence  
-              
-     ds.rep(x1 = 4,
-            times = 6,
-            length.out = NA,
-            each = 1,
-            source.x1 = "clientside",
-            source.times = "c",
-            source.length.out = NULL,
-            source.each = "c",
-            x1.includes.characters = FALSE,
-            newobj = "rep.seq",
-            datasources = connections)
-       
-     ds.rep(x1 = "lung",
-            times = 6,
-            length.out = 7,
-            each = 1,
-            source.x1 = "clientside",
-            source.times = "c",
-            source.length.out = "c",
-            source.each = "c",
-            x1.includes.characters = TRUE,
-            newobj = "rep.seq",
-            datasources = connections)
-
-  # Clear the Datashield R sessions and logout  
-  datashield.logout(connections) 
-} 
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+    require('DSI')
+    require('DSOpal')
+    require('dsBaseClient')
+
+    builder <- DSI::newDSLoginBuilder()
+    builder$append(server = "study1", 
+                   url = "http://192.168.56.100:8080/", 
+                   user = "administrator", password = "datashield_test&", 
+                   table = "CNSIM.CNSIM1", driver = "OpalDriver")
+    builder$append(server = "study2", 
+                   url = "http://192.168.56.100:8080/", 
+                   user = "administrator", password = "datashield_test&", 
+                   table = "CNSIM.CNSIM2", driver = "OpalDriver")
+    builder$append(server = "study3",
+                   url = "http://192.168.56.100:8080/", 
+                   user = "administrator", password = "datashield_test&", 
+                   table = "CNSIM.CNSIM3", driver = "OpalDriver")
+    logindata <- builder$build()
+
+  # Log onto the remote Opal training servers
+    connections <- DSI::datashield.login(logins = logindata, 
+                                         assign = TRUE, 
+                                         symbol = "D") 
+
+  # Creating a repetitive sequence  
+              
+     ds.rep(x1 = 4,
+            times = 6,
+            length.out = NA,
+            each = 1,
+            source.x1 = "clientside",
+            source.times = "c",
+            source.length.out = NULL,
+            source.each = "c",
+            x1.includes.characters = FALSE,
+            newobj = "rep.seq",
+            datasources = connections)
+       
+     ds.rep(x1 = "lung",
+            times = 6,
+            length.out = 7,
+            each = 1,
+            source.x1 = "clientside",
+            source.times = "c",
+            source.length.out = "c",
+            source.each = "c",
+            x1.includes.characters = TRUE,
+            newobj = "rep.seq",
+            datasources = connections)
+
+  # Clear the Datashield R sessions and logout  
+  datashield.logout(connections) 
+} # } 
+
 
@@ -206,19 +206,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.replaceNA.html b/docs/reference/ds.replaceNA.html index feefcf7fd..8fe3ba9a2 100644 --- a/docs/reference/ds.replaceNA.html +++ b/docs/reference/ds.replaceNA.html @@ -1,10 +1,10 @@ -Replaces the missing values in a server-side vector — ds.replaceNA • dsBaseClientReplaces the missing values in a server-side vector — ds.replaceNA • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,36 +46,36 @@

Replaces the missing values in a server-side vector

-
ds.replaceNA(x = NULL, forNA = NULL, newobj = NULL, datasources = NULL)
+
ds.replaceNA(x = NULL, forNA = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of the vector.

-
forNA
-

a list or a vector that contains the replacement value(s), for each study. +

forNA
+

a list or a vector that contains the replacement value(s), for each study. The length of the list or vector must be equal to the number of servers (studies).

-
newobj
+
newobj

a character string that provides the name for the output object that is stored on the data servers. Default replacena.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.replaceNA returns to the server-side a new vector or table structure +

ds.replaceNA returns to the server-side a new vector or table structure with the missing values replaced by the specified values. The class of the vector is the same as the initial vector.

@@ -99,67 +99,67 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-    require('DSI')
-    require('DSOpal')
-    require('dsBaseClient')
-
-    builder <- DSI::newDSLoginBuilder()
-    builder$append(server = "study1", 
-                   url = "http://192.168.56.100:8080/", 
-                   user = "administrator", password = "datashield_test&", 
-                   table = "CNSIM.CNSIM1", driver = "OpalDriver")
-    builder$append(server = "study2", 
-                   url = "http://192.168.56.100:8080/", 
-                   user = "administrator", password = "datashield_test&", 
-                   table = "CNSIM.CNSIM2", driver = "OpalDriver")
-    builder$append(server = "study3",
-                   url = "http://192.168.56.100:8080/", 
-                   user = "administrator", password = "datashield_test&", 
-                   table = "CNSIM.CNSIM3", driver = "OpalDriver")
-    logindata <- builder$build()
-
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  # Example 1: Replace missing values in variable 'LAB_HDL' by the mean value 
-  # in each study
-  
-  # Get the mean value of  'LAB_HDL' for each study
-  mean <- ds.mean(x = "D$LAB_HDL",
-                  type = "split",
-                  datasources = connections)
-
-  # Replace the missing values using the mean for each study
-  ds.replaceNA(x = "D$LAB_HDL",
-               forNA = list(mean[[1]][1], mean[[1]][2], mean[[1]][3]),
-               newobj = "HDL.noNA",
-               datasources = connections)
-               
-  # Example 2: Replace missing values in categorical variable 'PM_BMI_CATEGORICAL'
-  # with 999s
- 
-  # First check how many NAs there are in 'PM_BMI_CATEGORICAL' in each study
-  ds.table(rvar = "D$PM_BMI_CATEGORICAL", 
-          useNA = "always")   
-          
-  # Replace the missing values with 999s
-  ds.replaceNA(x = "D$PM_BMI_CATEGORICAL", 
-               forNA = c(999,999,999), 
-               newobj = "bmi999")
-               
-  # Check if the NAs have been replaced correctly
-  ds.table(rvar = "bmi999", 
-          useNA = "always")   
- 
-  # Clear the Datashield R sessions and logout  
-  datashield.logout(connections) 
-} 
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+    require('DSI')
+    require('DSOpal')
+    require('dsBaseClient')
+
+    builder <- DSI::newDSLoginBuilder()
+    builder$append(server = "study1", 
+                   url = "http://192.168.56.100:8080/", 
+                   user = "administrator", password = "datashield_test&", 
+                   table = "CNSIM.CNSIM1", driver = "OpalDriver")
+    builder$append(server = "study2", 
+                   url = "http://192.168.56.100:8080/", 
+                   user = "administrator", password = "datashield_test&", 
+                   table = "CNSIM.CNSIM2", driver = "OpalDriver")
+    builder$append(server = "study3",
+                   url = "http://192.168.56.100:8080/", 
+                   user = "administrator", password = "datashield_test&", 
+                   table = "CNSIM.CNSIM3", driver = "OpalDriver")
+    logindata <- builder$build()
+
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  # Example 1: Replace missing values in variable 'LAB_HDL' by the mean value 
+  # in each study
+  
+  # Get the mean value of  'LAB_HDL' for each study
+  mean <- ds.mean(x = "D$LAB_HDL",
+                  type = "split",
+                  datasources = connections)
+
+  # Replace the missing values using the mean for each study
+  ds.replaceNA(x = "D$LAB_HDL",
+               forNA = list(mean[[1]][1], mean[[1]][2], mean[[1]][3]),
+               newobj = "HDL.noNA",
+               datasources = connections)
+               
+  # Example 2: Replace missing values in categorical variable 'PM_BMI_CATEGORICAL'
+  # with 999s
+ 
+  # First check how many NAs there are in 'PM_BMI_CATEGORICAL' in each study
+  ds.table(rvar = "D$PM_BMI_CATEGORICAL", 
+          useNA = "always")   
+          
+  # Replace the missing values with 999s
+  ds.replaceNA(x = "D$PM_BMI_CATEGORICAL", 
+               forNA = c(999,999,999), 
+               newobj = "bmi999")
+               
+  # Check if the NAs have been replaced correctly
+  ds.table(rvar = "bmi999", 
+          useNA = "always")   
+ 
+  # Clear the Datashield R sessions and logout  
+  datashield.logout(connections) 
+} # } 
+
 
@@ -170,19 +170,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.rm.html b/docs/reference/ds.rm.html index 7f6873dd1..7403ff28c 100644 --- a/docs/reference/ds.rm.html +++ b/docs/reference/ds.rm.html @@ -1,9 +1,9 @@ -Deletes server-side R objects — ds.rm • dsBaseClientDeletes server-side R objects — ds.rm • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,33 +44,33 @@

Deletes server-side R objects

-
ds.rm(x.names = NULL, datasources = NULL)
+
ds.rm(x.names = NULL, datasources = NULL)

Arguments

-
x.names
+ + +
x.names

a character string specifying the objects to be deleted.

-
datasources
-

a list of DSConnection-class objects obtained after login. -If the datasources argument is not specified +

datasources
+

a list of DSConnection-class objects obtained after login. +If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

The ds.rm function deletes from the server-side -the specified object. If this +

The ds.rm function deletes from the server-side +the specified object. If this is successful the message "Object(s) '<x.names>' was deleted." is returned to the client-side.

Details

-

This function is similar to the native R function +

This function is similar to the native R function rm().

The fact that it is an aggregate function may be surprising because it modifies an object @@ -89,47 +89,47 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Create an object in the server-side
-  
-  ds.assign(toAssign = "D$LAB_TSC",
-            newobj = "labtsc",
-            datasources = connections)
-  
-  #Delete "labtsc" object from the server-side
-  
-  ds.rm(x.names = "labtsc",
-        datasources = connections)
-             
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Create an object in the server-side
+  
+  ds.assign(toAssign = "D$LAB_TSC",
+            newobj = "labtsc",
+            datasources = connections)
+  
+  #Delete "labtsc" object from the server-side
+  
+  ds.rm(x.names = "labtsc",
+        datasources = connections)
+             
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
 
@@ -140,19 +140,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.rowColCalc.html b/docs/reference/ds.rowColCalc.html index 1f5dfc7eb..bfbd73861 100644 --- a/docs/reference/ds.rowColCalc.html +++ b/docs/reference/ds.rowColCalc.html @@ -1,10 +1,10 @@ -Computes rows and columns sums and means in the server-side — ds.rowColCalc • dsBaseClientComputes rows and columns sums and means in the server-side — ds.rowColCalc • dsBaseClient - +
@@ -30,52 +30,52 @@
- +
-

Computes sums and means of rows or columns +

Computes sums and means of rows or columns of a numeric matrix or data frame on the server-side.

-
ds.rowColCalc(x = NULL, operation = NULL, newobj = NULL, datasources = NULL)
+
ds.rowColCalc(x = NULL, operation = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of a matrix or a data frame.

-
operation
+
operation

a character string that indicates the operation to carry out: "rowSums", "colSums", "rowMeans" or "colMeans".

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default rowcolcalc.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.rowColCalc returns to the server-side rows and columns sums and means.

+

ds.rowColCalc returns to the server-side rows and columns sums and means.

Details

@@ -92,47 +92,47 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki 
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  myvar <- list("LAB_TSC","LAB_HDL")
-   
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, 
-  variables = myvar, symbol = "D") 
-
-  
-  #Calculate the colSums
-  
-  ds.rowColCalc(x = "D",
-                operation = "colSums", 
-                newobj = "D.rowSums", 
-                datasources = connections)
-                
-  #Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-
-}
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki 
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  myvar <- list("LAB_TSC","LAB_HDL")
+   
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, 
+  variables = myvar, symbol = "D") 
+
+  
+  #Calculate the colSums
+  
+  ds.rowColCalc(x = "D",
+                operation = "colSums", 
+                newobj = "D.rowSums", 
+                datasources = connections)
+                
+  #Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+
+} # }
 
@@ -143,19 +143,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.sample.html b/docs/reference/ds.sample.html index 31a865d4d..bd9e16792 100644 --- a/docs/reference/ds.sample.html +++ b/docs/reference/ds.sample.html @@ -1,11 +1,11 @@ -Performs random sampling and permuting of vectors, dataframes and matrices — ds.sample • dsBaseClientPerforms random sampling and permuting of vectors, dataframes and matrices — ds.sample • dsBaseClient - +
@@ -31,13 +31,13 @@
- +
@@ -48,36 +48,38 @@

Performs random sampling and permuting of vectors, dataframes and matrices
-
ds.sample(
-  x = NULL,
-  size = NULL,
-  seed.as.integer = NULL,
-  replace = FALSE,
-  prob = NULL,
-  newobj = NULL,
-  datasources = NULL,
-  notify.of.progress = FALSE
-)
+
ds.sample(
+  x = NULL,
+  size = NULL,
+  seed.as.integer = NULL,
+  replace = FALSE,
+  prob = NULL,
+  newobj = NULL,
+  datasources = NULL,
+  notify.of.progress = FALSE
+)

Arguments

-
x
+ + +
x

Either a character string providing the name for the serverside vector, matrix or data.frame to be sampled or permuted, or an integer/numeric scalar (e.g. 923) indicating that one should create a new vector on the serverside that is a randomly permuted sample of the vector 1:923, or (if [replace] = FALSE, a full random permutation of that same vector. For further details of using ds.sample with x set as an integer/numeric please see help for -the sample function in native R. But if x is set as a character string +the sample function in native R. But if x is set as a character string denoting a vector, matrix or data.frame on the serverside, please note -that although ds.sample effectively calls sample on the serverside -it behaves somewhat differently to sample - for the reasons identified -at the top of 'details' and so help for sample should be used as a guide +that although ds.sample effectively calls sample on the serverside +it behaves somewhat differently to sample - for the reasons identified +at the top of 'details' and so help for sample should be used as a guide only.

-
size
+
size

a numeric/integer scalar indicating the size of the sample to be drawn. If the [x] argument is a vector, matrix or data.frame on the serverside and if the [size] argument is set either to 0 or to the length of @@ -90,7 +92,7 @@

Arguments

this is violated an error message will be returned.

-
seed.as.integer
+
seed.as.integer

this is precisely equivalent to the [seed.as.integer] arguments for the pseudo-random number generating functions (e.g. also see help for ds.rBinom, ds.rNorm, ds.rPois and ds.rUnif). @@ -113,31 +115,31 @@

Arguments

each source

-
replace
+
replace

a Boolean indicator (TRUE or FALSE) specifying whether the sample should be drawn with or without replacement. Default is FALSE so the sample is drawn without replacement. For further details see -help for sample in native R.

+help for sample in native R.

-
prob
+
prob

a character string containing the name of a numeric vector of probability weights on the serverside that is associated with each of the elements of the vector to be sampled enabling the drawing of a sample with some elements given higher probability of being drawn than others. -For further details see help for sample in native R.

+For further details see help for sample in native R.

-
newobj
+
newobj

This a character string providing a name for the output data.frame which defaults to 'newobj.sample' if no name is specified.

-
datasources
+
datasources

specifies the particular opal object(s) to use. If the <datasources> argument is not specified the default set of opals will be used. The default opals are called default.opals and the default can be set using the function -ds.setDefaultOpals. If the <datasources> is to be specified, it should be set without +ds.setDefaultOpals. If the <datasources> is to be specified, it should be set without inverted commas: e.g. datasources=opals.em or datasources=default.opals. If you wish to apply the function solely to e.g. the second opal server in a set of three, the argument can be specified as: e.g. datasources=opals.em[2]. @@ -145,16 +147,14 @@

Arguments

e.g. datasources=opals.em[c(1,3)]

-
notify.of.progress
+
notify.of.progress

specifies if console output should be produce to indicate progress. The default value for notify.of.progress is FALSE.

Value

- - -

the object specified by the <newobj> argument (or default name +

the object specified by the <newobj> argument (or default name 'newobj.sample') which is written to the serverside. In addition, two validity messages are returned indicating whether <newobj> has been created in each data source and if so whether @@ -167,8 +167,8 @@

Value

Details

Clientside function ds.sample calls serverside -assign function sampleDS. Based on the native R function sample() but deals -slightly differently with data.frames and matrices. Specifically the sample() +assign function sampleDS. Based on the native R function sample() but deals +slightly differently with data.frames and matrices. Specifically the sample() function in R identifies the length of an object and then samples n components of that length. But length(data.frame) in native R returns the number of columns not the number of rows. So if you have a data.frame with 71 rows and 10 columns, @@ -179,7 +179,7 @@

Details

or a matrix it is first coerced to a data.frame on the serverside and so is dealt with in the same way (i.e. random selection of 10 rows). If x is an integer not expressed as a character string, it is dealt with in exactly the same way -as in native R. That is, if x = 923 and size=117, DataSHIELD will draw a +as in native R. That is, if x = 923 and size=117, DataSHIELD will draw a random sample in random order of size 117 from the vector 1:923 (i.e. 1, 2, ... ,923) with or without replacement depending whether [replace] is TRUE or FALSE. If the [x] argument is numeric (e.g. 923) and size is either undefined @@ -195,9 +195,9 @@

Details

1 whenever a row enters the sample and as a QA test, all values in that column in the output object should be 1. 'ID.seq' is a sequential numeric ID appended to the right of the object to be sampled during the running of ds.sample that runs from -1 to the length of the object and will be appended even if there is already +1 to the length of the object and will be appended even if there is already an equivalent sequential ID in the object. The output object is stored in -the same original order as it was before sampling, and so if the first +the same original order as it was before sampling, and so if the first four elements of 'ID.seq' are 3,4, 6, 15 ... then it means that rows 1 and 2 were not included in the random sample, but rows 3, 4 were. Row 5 was not included, 6 was included and rows 7-14 were not etc. The 'sampling.order' vector is @@ -208,7 +208,7 @@

Details

case with the default name 'newobj.sample) using ds.dataFrameSort with the 'sampling.order' vector as the sort key, the output object is rendered equivalent to PRWa but with the rows randomly permuted (so the column reflecting -the vector 'sample.order' now runs from 1:length of obejct, while the +the vector 'sample.order' now runs from 1:length of object, while the column reflecting 'ID.seq' denoting the original order is now randomly ordered. If you need to return to the original order you can simply us ds.dataFrameSort again using the column reflecting 'ID.seq' as the sort key: @@ -234,19 +234,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.scatterPlot.html b/docs/reference/ds.scatterPlot.html index 0a6e986c9..ae6518636 100644 --- a/docs/reference/ds.scatterPlot.html +++ b/docs/reference/ds.scatterPlot.html @@ -1,10 +1,10 @@ -Generates non-disclosive scatter plots — ds.scatterPlot • dsBaseClientGenerates non-disclosive scatter plots — ds.scatterPlot • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,72 +46,72 @@

Generates non-disclosive scatter plots

-
ds.scatterPlot(
-  x = NULL,
-  y = NULL,
-  method = "deterministic",
-  k = 3,
-  noise = 0.25,
-  type = "split",
-  return.coords = FALSE,
-  datasources = NULL
-)
+
ds.scatterPlot(
+  x = NULL,
+  y = NULL,
+  method = "deterministic",
+  k = 3,
+  noise = 0.25,
+  type = "split",
+  return.coords = FALSE,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character string specifying the name of the explanatory variable, a numeric vector.

-
y
+
y

a character string specifying the name of the response variable, a numeric vector.

-
method
-

a character string that specifies the +

method
+

a character string that specifies the method that is used to generated non-disclosive -coordinates to be displayed in a scatter plot. +coordinates to be displayed in a scatter plot. This argument can be set as 'deteministic' or 'probabilistic'. -Default 'deteministic'. +Default 'deteministic'. For more information see Details.

-
k
-

the number of the nearest neighbors for which their centroid is calculated. -Default 3. +

k
+

the number of the nearest neighbours for which their centroid is calculated. +Default 3. For more information see Details.

-
noise
+
noise

the percentage of the initial variance that is used as the variance of the embedded noise if the argument method is set to 'probabilistic'. For more information see Details.

-
type
+
type

a character that represents the type of graph to display. -This can be set as 'combine' or 'split'. -Default 'split'. +This can be set as 'combine' or 'split'. +Default 'split'. For more information see Details.

-
return.coords
-

a logical. If TRUE the coordinates of the anonymised data points are return +

return.coords
+

a logical. If TRUE the coordinates of the anonymised data points are return to the Console. Default value is FALSE.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.scatterPlot returns to the client-side one or more scatter +

ds.scatterPlot returns to the client-side one or more scatter plots depending on the argument type.

@@ -119,24 +119,24 @@

Details

As the generation of a scatter plot from original data is disclosive and is not permitted in DataSHIELD, this function allows the user to plot non-disclosive scatter plots.

If the argument method is set to 'deterministic', the server-side function searches -for the k-1 nearest neighbors of each single data point and calculates the centroid -of such k points. +for the k-1 nearest neighbours of each single data point and calculates the centroid +of such k points. The proximity is defined by the minimum Euclidean distances of z-score transformed data.

When the coordinates of all centroids are estimated the function applies scaling to expand the centroids back to the dispersion of the original data. The scaling is achieved by multiplying the centroids with a scaling factor that is equal to the ratio between the standard deviation of the original variable and the standard deviation of the calculated centroids. The coordinates of the scaled centroids are then returned to the client-side.

-

The value of k is specified by the user. +

The value of k is specified by the user. The suggested and default value is equal to 3 which is also the suggested minimum threshold that is used to prevent disclosure which is specified in the -protection filter nfilter.kNN. When the value of k increases, +protection filter nfilter.kNN. When the value of k increases, the disclosure risk decreases but the utility loss increases. The value of k is used only -if the argument method is set to 'deterministic'. +if the argument method is set to 'deterministic'. Any value of k is ignored if the argument method is set to 'probabilistic'.

-

If the argument method is set to 'probabilistic', +

If the argument method is set to 'probabilistic', the server-side function generates a random normal noise of zero mean and variance equal to 10% of the variance of each x and y variable. The noise is added to each x and y variable and the disturbed by the addition of @@ -160,58 +160,58 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki 
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  #Example 1: generate a scatter plot for each study separately
-  #Using the default deterministic method and k = 10
-  
-  ds.scatterPlot(x = "D$PM_BMI_CONTINUOUS",
-                 y = "D$LAB_GLUC_ADJUSTED",
-                 method = "deterministic",
-                 k = 10,
-                 type = "split",
-                 datasources = connections)
-
-  #Example 2: generate a combined scatter plot with the probabilistic method
-  #and noise of variance 0.5% of the variable's variance, and display the coordinates
-  # of the anonymised data points to the Console
-  
-  ds.scatterPlot(x = "D$PM_BMI_CONTINUOUS",
-                 y = "D$LAB_GLUC_ADJUSTED",
-                 method = "probabilistic",
-                 noise = 0.5,
-                 type = "combine",
-                 datasources = connections)
-                   
-  #Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki 
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  #Example 1: generate a scatter plot for each study separately
+  #Using the default deterministic method and k = 10
+  
+  ds.scatterPlot(x = "D$PM_BMI_CONTINUOUS",
+                 y = "D$LAB_GLUC_ADJUSTED",
+                 method = "deterministic",
+                 k = 10,
+                 type = "split",
+                 datasources = connections)
+
+  #Example 2: generate a combined scatter plot with the probabilistic method
+  #and noise of variance 0.5% of the variable's variance, and display the coordinates
+  # of the anonymised data points to the Console
+  
+  ds.scatterPlot(x = "D$PM_BMI_CONTINUOUS",
+                 y = "D$LAB_GLUC_ADJUSTED",
+                 method = "probabilistic",
+                 noise = 0.5,
+                 type = "combine",
+                 datasources = connections)
+                   
+  #Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+
+} # }
+
 
@@ -222,19 +222,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.seq.html b/docs/reference/ds.seq.html index 7fe5b8424..08b34ac85 100644 --- a/docs/reference/ds.seq.html +++ b/docs/reference/ds.seq.html @@ -1,10 +1,10 @@ -Generates a sequence in the server-side — ds.seq • dsBaseClientGenerates a sequence in the server-side — ds.seq • dsBaseClient - +
@@ -30,118 +30,118 @@
- +
-

This function generates a sequence for given parameters +

This function generates a sequence for given parameters on the server-side.

-
ds.seq(
-  FROM.value.char = "1",
-  BY.value.char = "1",
-  TO.value.char = NULL,
-  LENGTH.OUT.value.char = NULL,
-  ALONG.WITH.name = NULL,
-  newobj = "newObj",
-  datasources = NULL
-)
+
ds.seq(
+  FROM.value.char = "1",
+  BY.value.char = "1",
+  TO.value.char = NULL,
+  LENGTH.OUT.value.char = NULL,
+  ALONG.WITH.name = NULL,
+  newobj = "newObj",
+  datasources = NULL
+)

Arguments

-
FROM.value.char
-

an integer or a number in character from specifying -the starting value for the sequence. + + +

FROM.value.char
+

an integer or a number in character from specifying +the starting value for the sequence. Default "1".

-
BY.value.char
-

an integer or a number in character from specifying +

BY.value.char
+

an integer or a number in character from specifying the value to increment each step in the sequence. Default "1".

-
TO.value.char
-

an integer or a number in character from specifying +

TO.value.char
+

an integer or a number in character from specifying the terminal value for the sequence. -Default NULL. +Default NULL. For more information see Details.

-
LENGTH.OUT.value.char
-

an integer or a number in character from specifying +

LENGTH.OUT.value.char
+

an integer or a number in character from specifying the length of the sequence at which point its extension should be stopped. -Default NULL. +Default NULL. For more information see Details.

-
ALONG.WITH.name
-

a character string specifying the name of a standard vector -to generate a vector of the same length. +

ALONG.WITH.name
+

a character string specifying the name of a standard vector +to generate a vector of the same length. For more information see Details.

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default seq.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.seq returns to the server-side the generated sequence. -Also, two validity messages are returned to the client-side +

ds.seq returns to the server-side the generated sequence. +Also, two validity messages are returned to the client-side indicating whether the new object has been created in each data source and if so whether it is in a valid form.

Details

-

This function is similar to a native R function seq(). +

This function is similar to a native R function seq(). It creates a flexible range of sequence vectors that can then be used to help manage and analyse data.

Note: the combinations of arguments that are not allowed -for the function seq in native R are also prohibited in ds.seq.

-

To be specific, FROM.value.char argument +for the function seq in native R are also prohibited in ds.seq.

+

To be specific, FROM.value.char argument defines the start of the sequence and BY.value.char defines how the sequence is incremented (or decremented) at each step. But where the sequence stops can be defined in three different ways:
-(1) TO.value.char indicates the terminal value of the sequence. +(1) TO.value.char indicates the terminal value of the sequence. For example, ds.seq(FROM.value.char = "3", BY.value.char = "2", TO.value.char = "7") creates the sequence 3,5,7 on the server-side.
-(2) LENGTH.OUT.value.char indicates the length of the sequence. -For example, ds.seq(FROM.value.char = "3", BY.value.char = "2", -LENGTH.OUT.value.char = "7") +(2) LENGTH.OUT.value.char indicates the length of the sequence. +For example, ds.seq(FROM.value.char = "3", BY.value.char = "2", +LENGTH.OUT.value.char = "7") creates the sequence 3,5,7,9,11,13,15 on the server-side.
(3) ALONG.WITH.name specifies the name of a variable on the server-side, such that the sequence in each study will be equal in length to that variable. -For example, ds.seq(FROM.value.char = "3", BY.value.char = "2", +For example, ds.seq(FROM.value.char = "3", BY.value.char = "2", ALONG.WITH.name = "var.x") creates a sequence such that if var.x is of length 100 in study 1 the sequence written to study 1 will be 3,5,7,...,197,199,201 and if var.x is of length 4 in study 2, the sequence written to study 2 will be 3,5,7,9.
Only one of the three arguments: TO.value.char, LENGTH.OUT.value.char and ALONG.WITH.name can be non-null in any one call.

-

In LENGTH.OUT.value.char argument if you specify a number with a decimal point but +

In LENGTH.OUT.value.char argument if you specify a number with a decimal point but in character form this result in a sequence length(integer) + 1. -For example, LENGTH.OUT.value.char = "1000.0001" +For example, LENGTH.OUT.value.char = "1000.0001" generates a sequence of length 1001.

Server function called: seqDS

@@ -152,57 +152,57 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-
-  #Create 3 different sequences
-  
-  ds.seq(FROM.value.char = "1",
-         BY.value.char = "2",
-         TO.value.char = "7",
-         newobj = "new.seq1",
-         datasources = connections)
-         
-         
-  ds.seq(FROM.value.char = "4",
-         BY.value.char = "3",
-         LENGTH.OUT.value.char = "10",
-         newobj = "new.seq2",
-         datasources = connections)  
-         
-  ds.seq(FROM.value.char = "2",
-         BY.value.char = "5",
-         ALONG.WITH.name = "D$GENDER",
-         newobj = "new.seq3",
-         datasources = connections)                            
-         
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-}
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+
+  #Create 3 different sequences
+  
+  ds.seq(FROM.value.char = "1",
+         BY.value.char = "2",
+         TO.value.char = "7",
+         newobj = "new.seq1",
+         datasources = connections)
+         
+         
+  ds.seq(FROM.value.char = "4",
+         BY.value.char = "3",
+         LENGTH.OUT.value.char = "10",
+         newobj = "new.seq2",
+         datasources = connections)  
+         
+  ds.seq(FROM.value.char = "2",
+         BY.value.char = "5",
+         ALONG.WITH.name = "D$GENDER",
+         newobj = "new.seq3",
+         datasources = connections)                            
+         
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+} # }
 
@@ -213,19 +213,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.setDefaultOpals.html b/docs/reference/ds.setDefaultOpals.html index bb711d4d2..3b34696d1 100644 --- a/docs/reference/ds.setDefaultOpals.html +++ b/docs/reference/ds.setDefaultOpals.html @@ -1,9 +1,9 @@ -Creates a default set of Opal objects called 'default.opals' — ds.setDefaultOpals • dsBaseClientCreates a default set of Opal objects called 'default.opals' — ds.setDefaultOpals • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,14 +44,12 @@

Creates a default set of Opal objects called 'default.opals'

-
ds.setDefaultOpals(opal.name)
+
ds.setDefaultOpals(opal.name)

Value

- - -

Copies a specified set of Opals (on the client-side server) +

Copies a specified set of Opals (on the client-side server) and calls the copy 'default.opals'

@@ -95,19 +93,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.setSeed.html b/docs/reference/ds.setSeed.html index ef807e37c..eac16115c 100644 --- a/docs/reference/ds.setSeed.html +++ b/docs/reference/ds.setSeed.html @@ -1,9 +1,9 @@ -Server-side random number generation — ds.setSeed • dsBaseClientServer-side random number generation — ds.setSeed • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,27 +44,27 @@

Server-side random number generation

-
ds.setSeed(seed.as.integer = NULL, datasources = NULL)
+
ds.setSeed(seed.as.integer = NULL, datasources = NULL)

Arguments

-
seed.as.integer
+ + +
seed.as.integer

a numeric value or a NULL that primes the random seed in each data source.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

Sets the values of the vector of integers of length 626 known as +

Sets the values of the vector of integers of length 626 known as .Random.seed on each data source that is the true current state of the random seed in each source. It also returns the value of the trigger integer that has primed the random seed vector (.Random.seed) in @@ -77,9 +77,9 @@

Details

the full pseudorandom number seed that is a vector of integers of length 626 called .Random.seed, this vector is written to the server-side.

This function is similar to a native R function set.seed().

-

In seed.as.integer argument +

In seed.as.integer argument the current limitation on the value of the integer that -can be specified is -2147483647 up to +2147483647 +can be specified is -2147483647 up to +2147483647 (this is +/- ([2^31]-1)).

Because you only specify one integer in the call to ds.setSeed (i.e. the value for the seed.as.integer argument) that value will be @@ -88,7 +88,7 @@

Details

the same position and if a vector of pseudorandom number values is requested based on one of DataSHIELD's pseudorandom number generating functions precisely the same random vector will be generated in each source. If you want to avoid this -you can specify a different priming value in each source by using +you can specify a different priming value in each source by using the datasources argument to generate the random number vectors one source at a time with a different integer in each case.

Furthermore, if you use any one @@ -105,47 +105,47 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki 
-  
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Generate a pseudorandom number in the server-side
-  
-  ds.setSeed(seed.as.integer = 152584,
-             datasources = connections)
-             
-  #Specify the pseudorandom number only in the first source
-  
-  ds.setSeed(seed.as.integer = 741,
-             datasources = connections[1])#only the frist study is used (study1)
-                   
-  # Clear the Datashield R sessions and logout  
-  datashield.logout(connections) 
-  }
-
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki 
+  
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Generate a pseudorandom number in the server-side
+  
+  ds.setSeed(seed.as.integer = 152584,
+             datasources = connections)
+             
+  #Specify the pseudorandom number only in the first source
+  
+  ds.setSeed(seed.as.integer = 741,
+             datasources = connections[1])#only the frist study is used (study1)
+                   
+  # Clear the Datashield R sessions and logout  
+  datashield.logout(connections) 
+  } # }
+
 
@@ -156,19 +156,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.skewness.html b/docs/reference/ds.skewness.html index c9cd5151e..ad9e9fa11 100644 --- a/docs/reference/ds.skewness.html +++ b/docs/reference/ds.skewness.html @@ -1,10 +1,10 @@ -Calculates the skewness of a server-side numeric variable — ds.skewness • dsBaseClientCalculates the skewness of a server-side numeric variable — ds.skewness • dsBaseClient - +
@@ -30,60 +30,60 @@
- +
-

This function calculates the skewness of a numeric variable +

This function calculates the skewness of a numeric variable that is stored on the server-side (Opal server).

-
ds.skewness(x = NULL, method = 1, type = "both", datasources = NULL)
+
ds.skewness(x = NULL, method = 1, type = "both", datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of a numeric variable.

-
method
-

an integer value between 1 and 3 selecting one of the algorithms for computing skewness. +

method
+

an integer value between 1 and 3 selecting one of the algorithms for computing skewness. For more information see Details. The default value is set to 1.

-
type
-

a character string which represents the type of analysis to carry out. +

type
+

a character string which represents the type of analysis to carry out. type can be set as: 'combine', 'split' or 'both'. For more information -see Details. +see Details. The default value is set to 'both'.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.skewness returns a matrix showing the skewness of the input numeric variable, +

ds.skewness returns a matrix showing the skewness of the input numeric variable, the number of valid observations and the validity message.

Details

This function is similar to the function skewness in R package e1071.

-

The function calculates the skewness of an input variable x +

The function calculates the skewness of an input variable x with three different methods:
(1) If method is set to 1 the following formula is used \( skewness= \frac{\sum_{i=1}^{N} (x_i - \bar(x))^3 /N}{(\sum_{i=1}^{N} ((x_i - \bar(x))^2) /N)^(3/2) }\), where \( \bar{x} \) is the mean of x and \(N\) is the number of observations.
@@ -91,9 +91,9 @@

Details

the following formula is used \( skewness= \frac{\sum_{i=1}^{N} (x_i - \bar(x))^3 /N}{(\sum_{i=1}^{N} ((x_i - \bar(x))^2) /N)^(3/2) } * \frac{\sqrt(N(N-1)}{n-2}\).
(3) If method is set to 3 the following formula is used \( skewness= \frac{\sum_{i=1}^{N} (x_i - \bar(x))^3 /N}{(\sum_{i=1}^{N} ((x_i - \bar(x))^2) /N)^(3/2) } * (\frac{N-1}{N})^(3/2)\).

The type argument can be set as follows:
-(1) If type is set to 'combine', 'combined', 'combines' or 'c', +(1) If type is set to 'combine', 'combined', 'combines' or 'c', the global skewness is returned.
-(2) If type is set to 'split', 'splits' or 's', +(2) If type is set to 'split', 'splits' or 's', the skewness is returned separately for each study.
(3) If type is set to 'both' or 'b', both sets of outputs are produced.

If x contains any missing value, the function removes those before @@ -107,43 +107,43 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Calculate the skewness of LAB_TSC numeric variable for each study separately and combined
-  
-  ds.skewness(x = "D$LAB_TSC",
-              method = 1, 
-              type = "both",
-             datasources = connections)
-  
-  # Clear the Datashield R sessions and logout                 
-  DSI::datashield.logout(connections) 
-  
-} 
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Calculate the skewness of LAB_TSC numeric variable for each study separately and combined
+  
+  ds.skewness(x = "D$LAB_TSC",
+              method = 1, 
+              type = "both",
+             datasources = connections)
+  
+  # Clear the Datashield R sessions and logout                 
+  DSI::datashield.logout(connections) 
+  
+} # } 
 
@@ -154,19 +154,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.sqrt.html b/docs/reference/ds.sqrt.html index 05719f9bf..c0f0612e9 100644 --- a/docs/reference/ds.sqrt.html +++ b/docs/reference/ds.sqrt.html @@ -1,10 +1,10 @@ -Computes the square root values of a variable — ds.sqrt • dsBaseClientComputes the square root values of a variable — ds.sqrt • dsBaseClient - +
@@ -30,55 +30,55 @@
- +
-

Computes the square root values for a specified numeric or integer vector. +

Computes the square root values for a specified numeric or integer vector. This function is similar to R function sqrt.

-
ds.sqrt(x = NULL, newobj = NULL, datasources = NULL)
+
ds.sqrt(x = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string providing the name of a numeric or an integer vector.

-
newobj
+
newobj

a character string that provides the name for the output variable that is stored on the data servers. Default name is set to sqrt.newobj.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.sqrt assigns a vector for each study that includes the square root values of +

ds.sqrt assigns a vector for each study that includes the square root values of the input numeric or integer vector specified in the argument x. The created vectors are stored in the servers.

Details

-

The function calls the server-side function sqrtDS that computes the -square root values of the elements of a numeric or integer vector and assigns a new vector -with those square root values on the server-side. The name of the new generated vector is +

The function calls the server-side function sqrtDS that computes the +square root values of the elements of a numeric or integer vector and assigns a new vector +with those square root values on the server-side. The name of the new generated vector is specified by the user through the argument newobj, otherwise is named by default to sqrt.newobj.

@@ -89,53 +89,52 @@

Author

Examples

-
if (FALSE) {
-
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-                 
-  logindata <- builder$build()
-  
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Example 1: Get the square root of LAB_HDL variable
-  ds.sqrt(x='D$LAB_HDL', newobj='LAB_HDL.sqrt', datasources=connections)
-  # compare the mean of LAB_HDL and of LAB_HDL.sqrt
-  # Note here that the number of missing values is bigger in the LAB_HDL.sqrt 
-  ds.mean(x='D$LAB_HDL', datasources=connections)
-  ds.mean(x='LAB_HDL.sqrt', datasources=connections)
-
-  # Example 2: Generate a repeated vector of the squares of integers from 1 to 10
-  # and get their square roots
-  ds.make(toAssign='rep((1:10)^2, times=10)', newobj='squares.vector', datasources=connections)
-  ds.sqrt(x='squares.vector', newobj='sqrt.vector', datasources=connections)
-  # check the behavior of that operation by comparing the tables of squares.vector and sqrt.vector
-  ds.table(rvar='squares.vector')$output.list$TABLE_rvar.by.study_counts
-  ds.table(rvar='sqrt.vector')$output.list$TABLE_rvar.by.study_counts
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections) 
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+                 
+  logindata <- builder$build()
+  
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Example 1: Get the square root of LAB_HDL variable
+  ds.sqrt(x='D$LAB_HDL', newobj='LAB_HDL.sqrt', datasources=connections)
+  # compare the mean of LAB_HDL and of LAB_HDL.sqrt
+  # Note here that the number of missing values is bigger in the LAB_HDL.sqrt 
+  ds.mean(x='D$LAB_HDL', datasources=connections)
+  ds.mean(x='LAB_HDL.sqrt', datasources=connections)
+
+  # Example 2: Generate a repeated vector of the squares of integers from 1 to 10
+  # and get their square roots
+  ds.make(toAssign='rep((1:10)^2, times=10)', newobj='squares.vector', datasources=connections)
+  ds.sqrt(x='squares.vector', newobj='sqrt.vector', datasources=connections)
+  ds.table(rvar='squares.vector')$output.list$TABLE_rvar.by.study_counts
+  ds.table(rvar='sqrt.vector')$output.list$TABLE_rvar.by.study_counts
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections) 
+
+} # }
+
 
@@ -146,19 +145,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.subset.html b/docs/reference/ds.subset.html index 1769282bf..ff9d08f2d 100644 --- a/docs/reference/ds.subset.html +++ b/docs/reference/ds.subset.html @@ -1,11 +1,11 @@ -Generates a valid subset of a table or a vector — ds.subset • dsBaseClientGenerates a valid subset of a table or a vector — ds.subset • dsBaseClient - +
@@ -31,83 +31,83 @@
- +

The function uses the R classical subsetting with squared brackets '[]' and allows also to -subset using a logical oprator and a threshold. The object to subset from must be a vector (factor, numeric -or charcater) or a table (data.frame or matrix).

+subset using a logical operator and a threshold. The object to subset from must be a vector (factor, numeric +or character) or a table (data.frame or matrix).

-
ds.subset(
-  x = NULL,
-  subset = "subsetObject",
-  completeCases = FALSE,
-  rows = NULL,
-  cols = NULL,
-  logicalOperator = NULL,
-  threshold = NULL,
-  datasources = NULL
-)
+
ds.subset(
+  x = NULL,
+  subset = "subsetObject",
+  completeCases = FALSE,
+  rows = NULL,
+  cols = NULL,
+  logicalOperator = NULL,
+  threshold = NULL,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character, the name of the dataframe or the factor vector and the range of the subset.

-
subset
+
subset

the name of the output object, a list that holds the subset object. If set to NULL the default name of this list is 'subsetObject'

-
completeCases
+
completeCases

a character that tells if only complete cases should be included or not.

-
rows
+
rows

a vector of integers, the indices of the rows to extract.

-
cols
+
cols

a vector of integers or a vector of characters; the indices of the columns to extract or their names.

-
logicalOperator
+
logicalOperator

a boolean, the logical parameter to use if the user wishes to subset a vector using a logical operator. This parameter is ignored if the input data is not a vector.

-
threshold
+
threshold

a numeric, the threshold to use in conjunction with the logical parameter. This parameter is ignored if the input data is not a vector.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

Value

- - -

no data are return to the user, the generated subset dataframe is stored on the server side.

+

no data are return to the user, the generated subset dataframe is stored on the server side.

Details

(1) If the input data is a table the user specifies the rows and/or columns to include in the subset; the columns can be -refered to by their names. Table subsetting can also be done using the name of a variable and a threshold (see example 3). +referred to by their names. Table subsetting can also be done using the name of a variable and a threshold (see example 3). (2) If the input data is a vector and the parameters 'rows', 'logical' and 'threshold' are all provided the last two are ignored (i.e. 'rows' has precedence over the other two parameters then). IMPORTANT NOTE: If the requested subset is not valid (i.e. contains less than the allowed number of observations) all the values are @@ -125,51 +125,51 @@

Author

Examples

-
if (FALSE) {
-
-  # load the login data
-  data(logindata)
-
-  # login and assign some variables to R
-  myvar <- list("DIS_DIAB","PM_BMI_CONTINUOUS","LAB_HDL", "GENDER")
-  conns <- datashield.login(logins=logindata,assign=TRUE,variables=myvar)
-
-  # Example 1: generate a subset of the assigned dataframe (by default the table is named 'D')
-  # with complete cases only
-  ds.subset(x='D', subset='subD1', completeCases=TRUE)
-  # display the dimensions of the initial table ('D') and those of the subset table ('subD1')
-  ds.dim('D')
-  ds.dim('subD1')
-
-  # Example 2: generate a subset of the assigned table (by default the table is named 'D')
-  # with only the variables
-  # DIS_DIAB' and'PM_BMI_CONTINUOUS' specified by their name.
-  ds.subset(x='D', subset='subD2', cols=c('DIS_DIAB','PM_BMI_CONTINUOUS'))
-
-  # Example 3: generate a subset of the table D with bmi values greater than or equal to 25.
-  ds.subset(x='D', subset='subD3', logicalOperator='PM_BMI_CONTINUOUS>=', threshold=25)
-
-  # Example 4: get the variable 'PM_BMI_CONTINUOUS' from the dataframe 'D' and generate a
-  # subset bmi
-  # vector with bmi values greater than or equal to 25
-  ds.assign(toAssign='D$PM_BMI_CONTINUOUS', newobj='BMI')
-  ds.subset(x='BMI', subset='BMI25plus', logicalOperator='>=', threshold=25)
-
-  # Example 5: subsetting by rows:
-  # get the logarithmic values of the variable 'lab_hdl' and generate a subset with
-  # the first 50 observations of that new vector. If the specified number of row is
-  # greater than the total
-  # number of rows in any of the studies the process will stop.
-  ds.assign(toAssign='log(D$LAB_HDL)', newobj='logHDL')
-  ds.subset(x='logHDL', subset='subLAB_HDL', rows=c(1:50))
-  # now get a subset of the table 'D' with just the 100 first observations
-  ds.subset(x='D', subset='subD5', rows=c(1:100))
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(conns)
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  # load the login data
+  data(logindata)
+
+  # login and assign some variables to R
+  myvar <- list("DIS_DIAB","PM_BMI_CONTINUOUS","LAB_HDL", "GENDER")
+  conns <- datashield.login(logins=logindata,assign=TRUE,variables=myvar)
+
+  # Example 1: generate a subset of the assigned dataframe (by default the table is named 'D')
+  # with complete cases only
+  ds.subset(x='D', subset='subD1', completeCases=TRUE)
+  # display the dimensions of the initial table ('D') and those of the subset table ('subD1')
+  ds.dim('D')
+  ds.dim('subD1')
+
+  # Example 2: generate a subset of the assigned table (by default the table is named 'D')
+  # with only the variables
+  # DIS_DIAB' and'PM_BMI_CONTINUOUS' specified by their name.
+  ds.subset(x='D', subset='subD2', cols=c('DIS_DIAB','PM_BMI_CONTINUOUS'))
+
+  # Example 3: generate a subset of the table D with bmi values greater than or equal to 25.
+  ds.subset(x='D', subset='subD3', logicalOperator='PM_BMI_CONTINUOUS>=', threshold=25)
+
+  # Example 4: get the variable 'PM_BMI_CONTINUOUS' from the dataframe 'D' and generate a
+  # subset bmi
+  # vector with bmi values greater than or equal to 25
+  ds.assign(toAssign='D$PM_BMI_CONTINUOUS', newobj='BMI')
+  ds.subset(x='BMI', subset='BMI25plus', logicalOperator='>=', threshold=25)
+
+  # Example 5: subsetting by rows:
+  # get the logarithmic values of the variable 'lab_hdl' and generate a subset with
+  # the first 50 observations of that new vector. If the specified number of row is
+  # greater than the total
+  # number of rows in any of the studies the process will stop.
+  ds.assign(toAssign='log(D$LAB_HDL)', newobj='logHDL')
+  ds.subset(x='logHDL', subset='subLAB_HDL', rows=c(1:50))
+  # now get a subset of the table 'D' with just the 100 first observations
+  ds.subset(x='D', subset='subD5', rows=c(1:100))
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(conns)
+
+} # }
+
 
@@ -180,19 +180,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.subsetByClass.html b/docs/reference/ds.subsetByClass.html index 98fbc3cee..b401a1562 100644 --- a/docs/reference/ds.subsetByClass.html +++ b/docs/reference/ds.subsetByClass.html @@ -1,10 +1,10 @@ -Generates valid subset(s) of a data frame or a factor — ds.subsetByClass • dsBaseClientGenerates valid subset(s) of a data frame or a factor — ds.subsetByClass • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,39 +46,39 @@

Generates valid subset(s) of a data frame or a factor

-
ds.subsetByClass(
-  x = NULL,
-  subsets = "subClasses",
-  variables = NULL,
-  datasources = NULL
-)
+
ds.subsetByClass(
+  x = NULL,
+  subsets = "subClasses",
+  variables = NULL,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character, the name of the dataframe or the vector to generate subsets from.

-
subsets
+
subsets

the name of the output object, a list that holds the subset objects. If set to NULL the default name of this list is 'subClasses'.

-
variables
+
variables

a vector of string characters, the name(s) of the variables to subset by.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

Value

- - -

a no data are return to the user but messages are printed out.

+

a no data are return to the user but messages are printed out.

Details

@@ -100,38 +100,38 @@

Author

Examples

-
if (FALSE) {
-
-  # load the login data
-  data(logindata)
-
-  # login and assign some variables to R
-  myvar <- list('DIS_DIAB','PM_BMI_CONTINUOUS','LAB_HDL', 'GENDER')
-  conns <- datashield.login(logins=logindata,assign=TRUE,variables=myvar)
-
-  # Example 1: generate all possible subsets from the table assigned above (one subset table
-  # for each class in each factor)
-  ds.subsetByClass(x='D', subsets='subclasses')
-  # display the names of the subset tables that were generated in each study
-  ds.names('subclasses')
-
-  # Example 2: subset the table initially assigned by the variable 'GENDER'
-  ds.subsetByClass(x='D', subsets='subtables', variables='GENDER')
-  # display the names of the subset tables that were generated in each study
-  ds.names('subtables')
-
-  # Example 3: generate a new variable 'gender' and split it into two vectors: males
-  # and females
-  ds.assign(toAssign='D$GENDER', newobj='gender')
-  ds.subsetByClass(x='gender', subsets='subvectors')
-  # display the names of the subset vectors that were generated in each study
-  ds.names('subvectors')
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(conns)
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  # load the login data
+  data(logindata)
+
+  # login and assign some variables to R
+  myvar <- list('DIS_DIAB','PM_BMI_CONTINUOUS','LAB_HDL', 'GENDER')
+  conns <- datashield.login(logins=logindata,assign=TRUE,variables=myvar)
+
+  # Example 1: generate all possible subsets from the table assigned above (one subset table
+  # for each class in each factor)
+  ds.subsetByClass(x='D', subsets='subclasses')
+  # display the names of the subset tables that were generated in each study
+  ds.names('subclasses')
+
+  # Example 2: subset the table initially assigned by the variable 'GENDER'
+  ds.subsetByClass(x='D', subsets='subtables', variables='GENDER')
+  # display the names of the subset tables that were generated in each study
+  ds.names('subtables')
+
+  # Example 3: generate a new variable 'gender' and split it into two vectors: males
+  # and females
+  ds.assign(toAssign='D$GENDER', newobj='gender')
+  ds.subsetByClass(x='gender', subsets='subvectors')
+  # display the names of the subset vectors that were generated in each study
+  ds.names('subvectors')
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(conns)
+
+} # }
+
 
@@ -142,19 +142,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.summary.html b/docs/reference/ds.summary.html index 2cb7a634f..b0990fab8 100644 --- a/docs/reference/ds.summary.html +++ b/docs/reference/ds.summary.html @@ -1,9 +1,9 @@ -Generates the summary of a server-side object — ds.summary • dsBaseClientGenerates the summary of a server-side object — ds.summary • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,39 +44,39 @@

Generates the summary of a server-side object

-
ds.summary(x = NULL, datasources = NULL)
+
ds.summary(x = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a character string specifying the name of a numeric or factor variable.

-
datasources
-

a list of DSConnection-class objects obtained after login. +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.summary returns to the client-side the class and -size of the server-side object. +

ds.summary returns to the client-side the class and +size of the server-side object. Also other information is returned depending on the class of the object. For example, potentially disclosive information -such as the minimum and maximum values of numeric vectors are not returned. +such as the minimum and maximum values of numeric vectors are not returned. The summary is given for each study separately.

Details

-

This function provides some insight about an object. Unlike the similar native R +

This function provides some insight about an object. Unlike the similar native R summary function only a limited class of objects can be used as input to reduce the risk of disclosure. For example, the minimum and the maximum values of a numeric vector - are not given to the client because they are potentially disclosive.

+ are not given to the client because they are potentially disclosive.

server functions called: isValidDS, dimDS and colnamesDS

@@ -86,49 +86,49 @@

Author

Examples

-
if (FALSE) {
-
-  ## Version 6, for version 5 see the Wiki 
-  
-  # Connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-
-  # Log onto the remote Opal training servers
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Calculate the summary of a numeric variable
-  
-  ds.summary(x = "D$LAB_TSC",
-             datasources = connections)
- 
-  #Calculate the summary of a factor variable
-
-  ds.summary(x = "D$PM_BMI_CATEGORICAL",
-             datasources = connections)
-                                
-  # Clear the Datashield R sessions and logout  
-  datashield.logout(connections) 
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  ## Version 6, for version 5 see the Wiki 
+  
+  # Connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+
+  # Log onto the remote Opal training servers
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Calculate the summary of a numeric variable
+  
+  ds.summary(x = "D$LAB_TSC",
+             datasources = connections)
+ 
+  #Calculate the summary of a factor variable
+
+  ds.summary(x = "D$PM_BMI_CATEGORICAL",
+             datasources = connections)
+                                
+  # Clear the Datashield R sessions and logout  
+  datashield.logout(connections) 
+
+} # }
+
 
@@ -139,19 +139,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.table.html b/docs/reference/ds.table.html index 8e1fc6599..bcd1b6127 100644 --- a/docs/reference/ds.table.html +++ b/docs/reference/ds.table.html @@ -1,12 +1,10 @@ -Generates 1-, 2-, and 3-dimensional contingency tables with option -of assigning to serverside only and producing chi-squared statistics — ds.table • dsBaseClientGenerates 1-, 2-, and 3-dimensional contingency tables with option of assigning to serverside only and producing chi-squared statistics — ds.table • dsBaseClient - +
@@ -32,14 +30,13 @@
- +
@@ -49,24 +46,26 @@

Generates 1-, 2-, and 3-dimensional contingency tables with option

-
ds.table(
-  rvar = NULL,
-  cvar = NULL,
-  stvar = NULL,
-  report.chisq.tests = FALSE,
-  exclude = NULL,
-  useNA = "always",
-  suppress.chisq.warnings = FALSE,
-  table.assign = FALSE,
-  newobj = NULL,
-  datasources = NULL,
-  force.nfilter = NULL
-)
+
ds.table(
+  rvar = NULL,
+  cvar = NULL,
+  stvar = NULL,
+  report.chisq.tests = FALSE,
+  exclude = NULL,
+  useNA = "always",
+  suppress.chisq.warnings = FALSE,
+  table.assign = FALSE,
+  newobj = NULL,
+  datasources = NULL,
+  force.nfilter = NULL
+)

Arguments

-
rvar
+ + +
rvar

is a character string (in inverted commas) specifying the name of the variable defining the rows in all of the 2 dimensional tables that form the output. Please see 'details' above for more @@ -74,33 +73,33 @@

Arguments

by <rvar> but <cvar> and <stvar> are both NULL

-
cvar
+
cvar

is a character string specifying the name of the variable defining the columns in all of the 2 dimensional tables that form the output.

-
stvar
+
stvar

is a character string specifying the name of the variable that indexes the separate two dimensional tables in the output if the call specifies a 3 dimensional table.

-
report.chisq.tests
+
report.chisq.tests

if TRUE, chi-squared tests are applied to every 2 dimensional table in the output and reported as "chisq.test_table.name". Default = FALSE.

-
exclude
-

this argument is passed through to the table function in -native R which is called by tableDS. The help for table in native R +

exclude
+

this argument is passed through to the table function in +native R which is called by tableDS. The help for table in native R indicates that 'exclude' specifies any levels that should be deleted for all factors in rvar, cvar or stvar. If the <exclude> argument does not include NA and if the <useNA> argument is not specified, -it implies <useNA> = "always" in DataSHIELD. If you read the help for table in native R +it implies <useNA> = "always" in DataSHIELD. If you read the help for table in native R including the 'details' and the 'examples' (particularly 'd.patho') you -will see that the response of table to different combinations of the +will see that the response of table to different combinations of the <exclude> and <useNA> arguments can be non-intuitive. This is particularly so if there is more than one type of missing (e.g. missing by observation as well as missing because of an NaN response to a mathematical @@ -109,7 +108,7 @@

Arguments

you cannot interpret the output that has been approached you might try: (1) making sure that the variable producing the strange results is of class factor rather than integer or numeric - although integers and -numerics are coerced to factors by ds.table they can occasionally behave less +numerics are coerced to factors by ds.table they can occasionally behave less well when the NA setting is complex; (2) specify both an <exclude> argument e.g. exclude = c("NaN","3") and a <useNA> argument e.g. useNA= "no"; (3) if you are excluding multiple levels e.g exclude = c("NA","3") @@ -117,25 +116,25 @@

Arguments

the 3s by deleting rows of data, or converting the 3s to a different value.

-
useNA
-

this argument is passed through to the table function in -native R which is called by tableDS. In DataSHIELD, this argument can take +

useNA
+

this argument is passed through to the table function in +native R which is called by tableDS. In DataSHIELD, this argument can take two values: "no" or "always" which indicate whether to include NA values in the table. For further information, please see the help for the <exclude> argument (above) -and/or the help for the table function in native R. Default value is set to "always".

+and/or the help for the table function in native R. Default value is set to "always".

-
suppress.chisq.warnings
+
suppress.chisq.warnings

if set to TRUE, the default warnings are -suppressed that would otherwise be produced by the table function in +suppressed that would otherwise be produced by the table function in native R whenever an expected cell count in one or more cells is less than 5. Default is FALSE. Further details can be found under 'details' and the help provided for the <report.chisq.tests> argument (above).

-
table.assign
+
table.assign

is a Boolean argument set by default to FALSE. If it is -FALSE the ds.table function acts as a standard aggregate function - +FALSE the ds.table function acts as a standard aggregate function - it returns the table that is specified in its call to the clientside where it can be visualised and worked with by the analyst. But if <table.assign> is TRUE, the same table object is also written to @@ -146,7 +145,7 @@

Arguments

to the clientside because it fails disclosure rules.

-
newobj
+
newobj

this a character string providing a name for the output table object to be written to the serverside if <table.assign> is TRUE. If no explicit name for the table object is specified, but <table.assign> @@ -154,7 +153,7 @@

Arguments

to table.newobj.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default. If the <datasources> is to be specified, it should be set without @@ -165,12 +164,12 @@

Arguments

e.g. datasources=connections.em[c(1,3)].

-
force.nfilter
+
force.nfilter

if <force.nfilter> is non-NULL it must be specified as a positive integer represented as a character string: e.g. "173". This the has the effect of the standard value of 'nfilter.tab' (often 1, 3, 5 or 10 depending what value the data custodian has selected for this particular -data set), to this new value (here, 173). CRUCIALLY, the ds.table function +data set), to this new value (here, 173). CRUCIALLY, the ds.table function only allows the standard value to be INCREASED. So if the standard value has been set as 5 (as one of the R options set in the serverside connection), "6" and "4981" would be allowable values for the <force.nfilter> argument but "4" or @@ -181,21 +180,15 @@

Arguments

Value

- - -

Having created the requested table based on serverside data +

Having created the requested table based on serverside data it is returned to the clientside for the analyst to visualise (unless it is blocked because it fails the disclosure control criteria or there is an error for some other reason).

- -

The clientside output from -ds.table includes error messages that identify when the creation of a +ds.table includes error messages that identify when the creation of a table from a particular study has failed and why. If table.assign=TRUE, -ds.table also writes the requested table as an object named by +ds.table also writes the requested table as an object named by the <newobj> argument or set to 'newObj' by default.

- -

Further information about the visible material passed to the clientside, and the optional table object written to the serverside can be seen under 'details' (above).

@@ -226,14 +219,14 @@

Details

The true counts in the studyside vector are replaced by a sequential set of cell-IDs running from 1:n (where n is the total number of cells in the table) in the empty -representation of the structure of the potentially disclosive table +representation of the structure of the potentially disclosive table that is returned to the clientside. These cell-IDs reflect the order of the counts in the true counts vector on the serverside. In consequence, if the number 13 appears in a cell of the empty table returned to the clientside, it means that the true count in that same cell is held as the 13th element of the true count vector saved on the serverside. This means that a data analyst -can still make use of the counts from a call to the ds.table +can still make use of the counts from a call to the ds.table function to drive their ongoing analysis even when one or more non-zero cell counts fall below the specified threshold for potential disclosure risk.

@@ -248,7 +241,7 @@

Details

tables produced in the output.

In creating a 3-dimensional table the <stvar> ('separate tables') argument identifies the variable that -indexes the set of two dimensional tables in the output ds.table.

+indexes the set of two dimensional tables in the output ds.table.

As a minor technicality, it should be noted that if a 1-dimensional table is required, one only need specify a value for the <rvar> argument and any one dimensional table in the output @@ -258,7 +251,7 @@

Details

dimensional tables and key components of the output for one dimensional tables are actually two dimensional: with rows defined by <rvar> and with one column for each of the studies.

-

The output list generated by ds.table contains tables based on counts +

The output list generated by ds.table contains tables based on counts named "table.name_counts" and other tables reporting corresponding column proportions ("table.name_col.props") or row proportions ("table.name_row.props"). In one dimensional tables in the output the @@ -291,19 +284,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.table1D.html b/docs/reference/ds.table1D.html index b1141e4cc..354a9ef6d 100644 --- a/docs/reference/ds.table1D.html +++ b/docs/reference/ds.table1D.html @@ -1,11 +1,11 @@ -Generates 1-dimensional contingency tables — ds.table1D • dsBaseClientGenerates 1-dimensional contingency tables — ds.table1D • dsBaseClient - +
@@ -31,13 +31,13 @@
- +
@@ -48,42 +48,42 @@

Generates 1-dimensional contingency tables

-
ds.table1D(
-  x = NULL,
-  type = "combine",
-  warningMessage = TRUE,
-  datasources = NULL
-)
+
ds.table1D(
+  x = NULL,
+  type = "combine",
+  warningMessage = TRUE,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character, the name of a numerical vector with discrete values - usually a factor.

-
type
-

a character which represent the type of table to ouput: pooled table or one table for each +

type
+

a character which represent the type of table to output: pooled table or one table for each data source. If type is set to 'combine', a pooled 1-dimensional table is returned; if If type is set to 'split' a 1-dimensional table is returned for each data source.

-
warningMessage
-

a boolean, if set to TRUE (deafult) a warning is displayed if any returned table is invalid. Warning +

warningMessage
+

a boolean, if set to TRUE (default) a warning is displayed if any returned table is invalid. Warning messages are suppressed if this parameter is set to FALSE. However the analyst can still view 'validity' information which are stored in the output object 'validity' - see the list of output objects.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

Value

- - -

A list object containing the following items:

+

A list object containing the following items:

counts

table(s) that hold counts for each level/category. If some cells counts are invalid (see 'Details' section) only the total (outer) cell counts are displayed in the returned individual study tables or in the pooled @@ -118,38 +118,38 @@

Author

Examples

-
if (FALSE) {
-
-  # load the file that contains the login details
-  data(logindata)
-
-  # login and assign all the stored variables to R
-  conns <- datashield.login(logins=logindata,assign=TRUE)
-
-  # Example 1: generate a one dimensional table, outputting combined (pooled) contingency tables
-  output <- ds.table1D(x='D$GENDER')
-  output$counts
-  output$percentages
-  output$validity
-
-  # Example 2: generate a one dimensional table, outputting study specific contingency tables
-  output <- ds.table1D(x='D$GENDER', type='split')
-  output$counts
-  output$percentages
-  output$validity
-
-  # Example 3: generate a one dimensional table, outputting study specific and combined
-  # contingency tables - see what happens if the reruened table is 'invalid'.
-  output <- ds.table1D(x='D$DIS_CVA')
-  output$counts
-  output$percentages
-  output$validity
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(conns)
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  # load the file that contains the login details
+  data(logindata)
+
+  # login and assign all the stored variables to R
+  conns <- datashield.login(logins=logindata,assign=TRUE)
+
+  # Example 1: generate a one dimensional table, outputting combined (pooled) contingency tables
+  output <- ds.table1D(x='D$GENDER')
+  output$counts
+  output$percentages
+  output$validity
+
+  # Example 2: generate a one dimensional table, outputting study specific contingency tables
+  output <- ds.table1D(x='D$GENDER', type='split')
+  output$counts
+  output$percentages
+  output$validity
+
+  # Example 3: generate a one dimensional table, outputting study specific and combined
+  # contingency tables - see what happens if the reruened table is 'invalid'.
+  output <- ds.table1D(x='D$DIS_CVA')
+  output$counts
+  output$percentages
+  output$validity
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(conns)
+
+} # }
+
 
@@ -160,19 +160,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.table2D.html b/docs/reference/ds.table2D.html index 27e06d3b7..88dfcfd52 100644 --- a/docs/reference/ds.table2D.html +++ b/docs/reference/ds.table2D.html @@ -1,10 +1,10 @@ -Generates 2-dimensional contingency tables — ds.table2D • dsBaseClientGenerates 2-dimensional contingency tables — ds.table2D • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,48 +46,48 @@

Generates 2-dimensional contingency tables

-
ds.table2D(
-  x = NULL,
-  y = NULL,
-  type = "both",
-  warningMessage = TRUE,
-  datasources = NULL
-)
+
ds.table2D(
+  x = NULL,
+  y = NULL,
+  type = "both",
+  warningMessage = TRUE,
+  datasources = NULL
+)

Arguments

-
x
+ + +
x

a character, the name of a numerical vector with discrete values - usually a factor.

-
y
+
y

a character, the name of a numerical vector with discrete values - usually a factor.

-
type
-

a character which represent the type of table to ouput: pooled table or one table for each +

type
+

a character which represent the type of table to output: pooled table or one table for each data source or both. If type is set to 'combine', a pooled 2-dimensional table is returned; If type is set to 'split' a 2-dimensional table is returned for each data source. If type is set to 'both' (default) a pooled 2-dimensional table plus a 2-dimensional table for each data source are returned.

-
warningMessage
-

a boolean, if set to TRUE (deafult) a warning is displayed if any returned table is invalid. Warning +

warningMessage
+

a boolean, if set to TRUE (default) a warning is displayed if any returned table is invalid. Warning messages are suppressed if this parameter is set to FALSE. However the analyst can still view 'validity' information which are stored in the output object 'validity' - see the list of output objects.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

Value

- - -

A list object containing the following items:

+

A list object containing the following items:

colPercent

table(s) that hold column percentages for each level/category. Inner cells are reported as missing if one or more cells are 'invalid'.

@@ -129,7 +129,7 @@

Author

Examples

- +
- - + + diff --git a/docs/reference/ds.tapply.assign.html b/docs/reference/ds.tapply.assign.html index ec8ea665e..158d62457 100644 --- a/docs/reference/ds.tapply.assign.html +++ b/docs/reference/ds.tapply.assign.html @@ -1,11 +1,11 @@ -Applies a Function Over a Ragged Array on the server-side — ds.tapply.assign • dsBaseClientApplies a Function Over a Ragged Array on the server-side — ds.tapply.assign • dsBaseClient - +
@@ -31,13 +31,13 @@
- +
@@ -48,58 +48,58 @@

Applies a Function Over a Ragged Array on the server-side

-
ds.tapply.assign(
-  X.name = NULL,
-  INDEX.names = NULL,
-  FUN.name = NULL,
-  newobj = NULL,
-  datasources = NULL
-)
+
ds.tapply.assign(
+  X.name = NULL,
+  INDEX.names = NULL,
+  FUN.name = NULL,
+  newobj = NULL,
+  datasources = NULL
+)

Arguments

-
X.name
+ + +
X.name

a character string specifying the name of the variable to be summarized.

-
INDEX.names
-

a character string specifying the name of a single factor -or a vector of names of up to two factors to index the variable to be summarized. +

INDEX.names
+

a character string specifying the name of a single factor +or a vector of names of up to two factors to index the variable to be summarized. For more information see Details.

-
FUN.name
-

a character string specifying the name of one of the allowable -summarizing functions. This can be set as: +

FUN.name
+

a character string specifying the name of one of the allowable +summarizing functions. This can be set as: "N" (or "length"), "mean","sd", "sum", -or "quantile". +or "quantile". For more information see Details.

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default tapply.assign.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.tapply.assign returns an array of the summarized values. +

ds.tapply.assign returns an array of the summarized values. The array is written to the server-side. It has the same number of dimensions as INDEX.

Details

This function applies one of -a selected range of functions to each cell of a ragged array, +a selected range of functions to each cell of a ragged array, that is to each (non-empty) group of values given by each unique combination of a series of indexing factors.

The range of allowable summarizing functions for DataSHIELD ds.tapply function @@ -111,7 +111,7 @@

Details

team could work on them if requested.

To protect against disclosure the number of observations in each summarizing group in each source is calculated -and if any of these falls below the value of nfilter.tab +and if any of these falls below the value of nfilter.tab (the minimum allowable non-zero count in a contingency table) the tapply analysis of that source will return only an error message. The value of nfilter.tab is can be set and modified only by the data custodian. If an @@ -123,14 +123,14 @@

Details

which individuals have got at least one positive value for a binary outcome variable, then that flagging does not have to be overtly returned to the client-side. Rather, it can be written as a vector to the server-side at each source (which, like any other server-side -object, cannot then be seen, abstracted or copied). This can be done using -ds.tapply.assign which writes the results as a newobj to the server-side -and does not test the number of observations in each group against nfilter.tab. +object, cannot then be seen, abstracted or copied). This can be done using +ds.tapply.assign which writes the results as a newobj to the server-side +and does not test the number of observations in each group against nfilter.tab. For more information see the help option of ds.tapply.assign function.

The native R tapply function has optional arguments such as na.rm = TRUE for FUN = mean -which will exclude any NAs from the outcome variable to be summarized. -However, in order to keep DataSHIELD's ds.tapply and ds.tapply.assign +which will exclude any NAs from the outcome variable to be summarized. +However, in order to keep DataSHIELD's ds.tapply and ds.tapply.assign functions straightforward, the server-side functions tapplyDS and tapplyDS.assign both starts by stripping any observations which have missing (NA) values in either the outcome variable or in @@ -138,7 +138,7 @@

Details

on complete cases.

In INDEX.names argument the native R tapply function can coerce non-factor vectors into factors. However, this does not always work when -using the DataSHIELD ds.tapply or ds.tapply.assign +using the DataSHIELD ds.tapply or ds.tapply.assign functions so if you are concerned that an indexing vector is not being treated correctly as a factor, please first declare it explicitly as a factor using ds.asFactor.

@@ -155,53 +155,53 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Apply a Function Over a Server-Side Ragged Array. 
-  # Write the resultant object on the server-side
-  
-  
-  ds.assign(toAssign = "D$LAB_TSC",
-            newobj = "LAB_TSC",
-            datasources = connections)
-            
-  ds.assign(toAssign = "D$GENDER",
-            newobj =  "GENDER",
-            datasources = connections)
-            
-  ds.tapply.assign(X.name = "LAB_TSC",
-                   INDEX.names = c("GENDER"),
-                   FUN.name = "mean",
-                   newobj="fun_mean.newobj",
-                   datasources = connections)
-                 
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  }
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Apply a Function Over a Server-Side Ragged Array. 
+  # Write the resultant object on the server-side
+  
+  
+  ds.assign(toAssign = "D$LAB_TSC",
+            newobj = "LAB_TSC",
+            datasources = connections)
+            
+  ds.assign(toAssign = "D$GENDER",
+            newobj =  "GENDER",
+            datasources = connections)
+            
+  ds.tapply.assign(X.name = "LAB_TSC",
+                   INDEX.names = c("GENDER"),
+                   FUN.name = "mean",
+                   newobj="fun_mean.newobj",
+                   datasources = connections)
+                 
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  } # }
 
@@ -212,19 +212,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.tapply.html b/docs/reference/ds.tapply.html index d7a724cdf..99f9338ea 100644 --- a/docs/reference/ds.tapply.html +++ b/docs/reference/ds.tapply.html @@ -1,11 +1,11 @@ -Applies a Function Over a Server-Side Ragged Array — ds.tapply • dsBaseClientApplies a Function Over a Server-Side Ragged Array — ds.tapply • dsBaseClient - +
@@ -31,68 +31,68 @@
- +

Apply one of a selected range of functions to summarize an -outcome variable over one or more indexing factors. +outcome variable over one or more indexing factors. The resultant summary is written to the client-side.

-
ds.tapply(
-  X.name = NULL,
-  INDEX.names = NULL,
-  FUN.name = NULL,
-  datasources = NULL
-)
+
ds.tapply(
+  X.name = NULL,
+  INDEX.names = NULL,
+  FUN.name = NULL,
+  datasources = NULL
+)

Arguments

-
X.name
+ + +
X.name

a character string specifying the name of the variable to be summarized.

-
INDEX.names
-

a character string specifying the name of a single factor -or a list or vector of names of up to two factors to index the variable to be summarized. +

INDEX.names
+

a character string specifying the name of a single factor +or a list or vector of names of up to two factors to index the variable to be summarized. For more information see Details.

-
FUN.name
-

a character string specifying the name of one of the allowable -summarizing functions. This can be set as: +

FUN.name
+

a character string specifying the name of one of the allowable +summarizing functions. This can be set as: "N" (or "length"), "mean","sd", "sum", -or "quantile". +or "quantile". For more information see Details.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.tapply returns to the client-side an array of the summarized values. +

ds.tapply returns to the client-side an array of the summarized values. It has the same number of dimensions as INDEX.

Details

This function is similar to a native R function tapply(). -It applies one of a selected range of functions to each cell +It applies one of a selected range of functions to each cell of a ragged array, that is to each (non-empty) group of values given by each unique combination of a series of indexing factors.

The range of allowable summarizing functions for DataSHIELD ds.tapply function @@ -104,7 +104,7 @@

Details

team could work on them if requested.

To protect against disclosure the number of observations in each summarizing group in each source is calculated -and if any of these falls below the value of nfilter.tab +and if any of these falls below the value of nfilter.tab (the minimum allowable non-zero count in a contingency table) the tapply analysis of that source will return only an error message. The value of nfilter.tab is can be set and modified only by the data custodian. If an @@ -116,14 +116,14 @@

Details

which individuals have got at least one positive value for a binary outcome variable, then that flagging does not have to be overtly returned to the client-side. Rather, it can be written as a vector to the server-side at each source (which, like any other server-side -object, cannot then be seen, abstracted or copied). This can be done using -ds.tapply.assign which writes the results as a newobj to the server-side -and does not test the number of observations in each group against nfilter.tab. +object, cannot then be seen, abstracted or copied). This can be done using +ds.tapply.assign which writes the results as a newobj to the server-side +and does not test the number of observations in each group against nfilter.tab. For more information see the help option of ds.tapply.assign function.

The native R tapply function has optional arguments such as na.rm = TRUE for FUN = mean -which will exclude any NAs from the outcome variable to be summarized. -However, in order to keep DataSHIELD's ds.tapply and ds.tapply.assign +which will exclude any NAs from the outcome variable to be summarized. +However, in order to keep DataSHIELD's ds.tapply and ds.tapply.assign functions straightforward, the server-side functions tapplyDS and tapplyDS.assign both starts by stripping any observations which have missing (NA) values in either the outcome variable or in @@ -131,7 +131,7 @@

Details

on complete cases.

In INDEX.names argument the native R tapply function can coerce non-factor vectors into factors. However, this does not always work when -using the DataSHIELD ds.tapply or ds.tapply.assign +using the DataSHIELD ds.tapply or ds.tapply.assign functions so if you are concerned that an indexing vector is not being treated correctly as a factor, please first declare it explicitly as a factor using ds.asFactor.

@@ -148,51 +148,51 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  # Apply a Function Over a Server-Side Ragged Array
-  
-  ds.assign(toAssign = "D$LAB_TSC",
-            newobj = "LAB_TSC",
-            datasources = connections)
-            
-  ds.assign(toAssign = "D$GENDER",
-            newobj =  "GENDER",
-            datasources = connections)
-            
-  ds.tapply(X.name = "LAB_TSC",
-            INDEX.names = c("GENDER"),
-            FUN.name = "mean",
-            datasources = connections)
-                 
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-} 
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  # Apply a Function Over a Server-Side Ragged Array
+  
+  ds.assign(toAssign = "D$LAB_TSC",
+            newobj = "LAB_TSC",
+            datasources = connections)
+            
+  ds.assign(toAssign = "D$GENDER",
+            newobj =  "GENDER",
+            datasources = connections)
+            
+  ds.tapply(X.name = "LAB_TSC",
+            INDEX.names = c("GENDER"),
+            FUN.name = "mean",
+            datasources = connections)
+                 
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # } 
 
@@ -203,19 +203,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.testObjExists.html b/docs/reference/ds.testObjExists.html index e0c8b015f..241fcd881 100644 --- a/docs/reference/ds.testObjExists.html +++ b/docs/reference/ds.testObjExists.html @@ -1,10 +1,10 @@ -Checks if an R object exists on the server-side — ds.testObjExists • dsBaseClientChecks if an R object exists on the server-side — ds.testObjExists • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,28 +46,28 @@

Checks if an R object exists on the server-side

-
ds.testObjExists(test.obj.name = NULL, datasources = NULL)
+
ds.testObjExists(test.obj.name = NULL, datasources = NULL)

Arguments

-
test.obj.name
+ + +
test.obj.name

a character string specifying the name of the object to search.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.testObjExists returns a list of messages specifying that the object exists +

ds.testObjExists returns a list of messages specifying that the object exists on the server-side. -If the specified object does not exist in at least one +If the specified object does not exist in at least one of the specified data sources or it exists but is of class NULL, the function returns an error message specifying that the object does not exist in all data sources.

@@ -86,41 +86,41 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Check if D object exists on the server-side
-  
-  ds.testObjExists(test.obj.name = "D",
-                   datasources = connections)
- 
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Check if D object exists on the server-side
+  
+  ds.testObjExists(test.obj.name = "D",
+                   datasources = connections)
+ 
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
 
@@ -131,19 +131,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.unList.html b/docs/reference/ds.unList.html index 9123347ae..c31ca868e 100644 --- a/docs/reference/ds.unList.html +++ b/docs/reference/ds.unList.html @@ -1,10 +1,10 @@ -Flattens Server-Side Lists — ds.unList • dsBaseClientFlattens Server-Side Lists — ds.unList • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,32 +46,32 @@

Flattens Server-Side Lists

-
ds.unList(x.name = NULL, newobj = NULL, datasources = NULL)
+
ds.unList(x.name = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x.name
+ + +
x.name

a character string specifying the name of the input object to be unlisted.

-
newobj
-

a character string that provides the name for the output variable +

newobj
+

a character string that provides the name for the output variable that is stored on the data servers. Default unlist.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.unList returns to the server-side the unlist object. -Also, two validity messages are returned to the client-side +

ds.unList returns to the server-side the unlist object. +Also, two validity messages are returned to the client-side indicating whether the new object has been created in each data source and if so whether it is in a valid form.

@@ -97,48 +97,48 @@

Author

Examples

-
if (FALSE) {
-  ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Create a list on the server-side
-  
-  ds.asList(x.name = "D", 
-            newobj = "list.D",
-            datasources = connections)
-  
-  #Flatten a server-side lists
-  
-  ds.unList(x.name = "list.D",
-            newobj = "un.list.D",
-           datasources = connections)
- 
-  # Clear the Datashield R sessions and logout                 
-  datashield.logout(connections) 
-  
-}   
+    
if (FALSE) { # \dontrun{
+  ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Create a list on the server-side
+  
+  ds.asList(x.name = "D", 
+            newobj = "list.D",
+            datasources = connections)
+  
+  #Flatten a server-side lists
+  
+  ds.unList(x.name = "list.D",
+            newobj = "un.list.D",
+           datasources = connections)
+ 
+  # Clear the Datashield R sessions and logout                 
+  datashield.logout(connections) 
+  
+} # }   
 
@@ -149,19 +149,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.unique.html b/docs/reference/ds.unique.html index d439ecee6..a887836e2 100644 --- a/docs/reference/ds.unique.html +++ b/docs/reference/ds.unique.html @@ -1,9 +1,9 @@ -Perform 'unique' on a variable on the server-side — ds.unique • dsBaseClientPerform 'unique' on a variable on the server-side — ds.unique • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,31 +44,31 @@

Perform 'unique' on a variable on the server-side

-
ds.unique(x.name = NULL, newobj = NULL, datasources = NULL)
+
ds.unique(x.name = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x.name
-

a character string providing the name of the varable, in the server, to perform unique upon

-
newobj
+
x.name
+

a character string providing the name of the variable, in the server, to perform unique upon

+ + +
newobj

a character string that provides the name for the output object that is stored on the data servers. Default unique.newobj.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.unique returns the vector of unique R objects which are written to the server-side.

+

ds.unique returns the vector of unique R objects which are written to the server-side.

Details

@@ -82,36 +82,36 @@

Author

Examples

-
if (FALSE) {
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1",
-                 url = "http://192.168.56.100:8080/",
-                 user = "administrator", password = "datashield_test&",
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2",
-                 url = "http://192.168.56.100:8080/",
-                 user = "administrator", password = "datashield_test&",
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/",
-                 user = "administrator", password = "datashield_test&",
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
-
-  # Create a vector with combined objects
-  ds.unique(x.name = "D$LAB_TSC", newobj = "new.vect", datasources = connections)
-
-  # Clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
+    
if (FALSE) { # \dontrun{
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1",
+                 url = "http://192.168.56.100:8080/",
+                 user = "administrator", password = "datashield_test&",
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2",
+                 url = "http://192.168.56.100:8080/",
+                 user = "administrator", password = "datashield_test&",
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/",
+                 user = "administrator", password = "datashield_test&",
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
+
+  # Create a vector with combined objects
+  ds.unique(x.name = "D$LAB_TSC", newobj = "new.vect", datasources = connections)
+
+  # Clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
 
@@ -122,19 +122,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.var.html b/docs/reference/ds.var.html index d500a48ea..109a1e78e 100644 --- a/docs/reference/ds.var.html +++ b/docs/reference/ds.var.html @@ -1,9 +1,9 @@ -Computes server-side vector variance — ds.var • dsBaseClientComputes server-side vector variance — ds.var • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,75 +44,57 @@

Computes server-side vector variance

-
ds.var(x = NULL, type = "split", checks = FALSE, datasources = NULL)
+
ds.var(x = NULL, type = "split", checks = FALSE, datasources = NULL)

Arguments

-
x
+ + +
x

a character specifying the name of a numerical vector.

-
type
+
type

a character string that represents the type of analysis to carry out. This can be set as 'combine', 'combined', 'combines', 'split', 'splits', 's', -'both' or 'b'. +'both' or 'b'. For more information see Details.

-
checks
+
checks

logical. If TRUE optional checks of model -components will be undertaken. Default is FALSE to save time. +components will be undertaken. Default is FALSE to save time. It is suggested that checks should only be undertaken once the function call has failed.

-
datasources
-

a list of DSConnection-class +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

Value

- - -

ds.var returns to the client-side a list including:

- - -

-

Variance.by.Study: estimated variance, Nmissing

- - -

(number of missing observations), Nvalid (number of valid observations) and -Ntotal (sum of missing and valid observations) -separately for each study (if type = split or type = both).

- - -

Global.Variance: estimated variance, Nmissing, Nvalid and Ntotal

- - -

across all studies combined (if type = combine or type = both).

- - -

Nstudies: number of studies being analysed.

- - -

ValidityMessage: indicates if the analysis was possible.

- - +

ds.var returns to the client-side a list including:

+

Variance.by.Study: estimated variance, Nmissing +(number of missing observations), Nvalid (number of valid observations) and +Ntotal (sum of missing and valid observations) +separately for each study (if type = split or type = both).
Global.Variance: estimated variance, Nmissing, Nvalid and Ntotal +across all studies combined (if type = combine or type = both).
Nstudies: number of studies being analysed.
ValidityMessage: indicates if the analysis was possible.

Details

This function is similar to the R function var.

The function can carry out 3 types of analysis depending on the argument type:
-(1) If type is set to 'combine', 'combined', +(1) If type is set to 'combine', 'combined', 'combines' or 'c', a global variance is calculated.
(2) If type is set to 'split', 'splits' or 's', the variance is calculated separately for each study.
-(3) If type is set to 'both' or 'b', +(3) If type is set to 'both' or 'b', both sets of outputs are produced.

Server function called: varDS

@@ -123,44 +105,44 @@

Author

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
-  
-  #Calculate the variance of a vector in the server-side
-  
-  ds.var(x = "D$LAB_TSC",
-          type = "split",
-          checks = FALSE,
-          datasources = connections)
-             
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D") 
+  
+  #Calculate the variance of a vector in the server-side
+  
+  ds.var(x = "D$LAB_TSC",
+          type = "split",
+          checks = FALSE,
+          datasources = connections)
+             
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -171,19 +153,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/ds.vectorCalc.html b/docs/reference/ds.vectorCalc.html index a1dacae0b..385c3eca9 100644 --- a/docs/reference/ds.vectorCalc.html +++ b/docs/reference/ds.vectorCalc.html @@ -1,10 +1,10 @@ -Performs a mathematical operation on two or more vectors — ds.vectorCalc • dsBaseClientPerforms a mathematical operation on two or more vectors — ds.vectorCalc • dsBaseClient - +
@@ -30,13 +30,13 @@
- +
@@ -46,42 +46,42 @@

Performs a mathematical operation on two or more vectors

-
ds.vectorCalc(x = NULL, calc = NULL, newobj = NULL, datasources = NULL)
+
ds.vectorCalc(x = NULL, calc = NULL, newobj = NULL, datasources = NULL)

Arguments

-
x
+ + +
x

a vector of characters, the names of the vectors to include in the operation.

-
calc
+
calc

a character, a symbol that indicates the mathematical operation to carry out: '+' for addition, '/' for division, *' for multiplication and '-' for subtraction.

-
newobj
+
newobj

the name of the output object. By default the name is 'vectorcalc.newobj'.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

Value

- - -

no data are returned to user, the output vector is stored on the server side.

+

no data are returned to user, the output vector is stored on the server side.

Details

In DataSHIELD it is possible to perform an operation on vectors by just using the relevant -R symbols (e.g. '+' for addtion, '*' for multiplication, '-' for substraction and '/' for division). +R symbols (e.g. '+' for addition, '*' for multiplication, '-' for subtraction and '/' for division). This might however be inconvenient if the number of vectors to include in the operation is large. This function takes the names of two or more vectors and performs the desired operation which could be -an addition, a multiplication, a substraction or a division. If one or more vectors have a missing value +an addition, a multiplication, a subtraction or a division. If one or more vectors have a missing value at any one entry (i.e. observation), the operation returns a missing value ('NA') for that entry; the output vectors has, hence the same length as the input vectors.

@@ -92,24 +92,24 @@

Author

Examples

-
if (FALSE) {
-
-  # load the file that contains the login details
-  data(logindata)
-
-  # login and assign the required variables to R
-  myvar <- list('LAB_TSC','LAB_HDL')
-  conns <- datashield.login(logins=logindata,assign=TRUE,variables=myvar)
-
-  # performs an addtion of 'LAB_TSC' and 'LAB_HDL'
-  myvectors <- c('D$LAB_TSC', 'D$LAB_HDL')
-  ds.vectorCalc(x=myvectors, calc='+')
-
-  # clear the Datashield R sessions and logout
-  datashield.logout(conns)
-
-}
-
+    
if (FALSE) { # \dontrun{
+
+  # load the file that contains the login details
+  data(logindata)
+
+  # login and assign the required variables to R
+  myvar <- list('LAB_TSC','LAB_HDL')
+  conns <- datashield.login(logins=logindata,assign=TRUE,variables=myvar)
+
+  # performs an addtion of 'LAB_TSC' and 'LAB_HDL'
+  myvectors <- c('D$LAB_TSC', 'D$LAB_HDL')
+  ds.vectorCalc(x=myvectors, calc='+')
+
+  # clear the Datashield R sessions and logout
+  datashield.logout(conns)
+
+} # }
+
 
@@ -120,19 +120,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/extract.html b/docs/reference/extract.html index 432d8ef3c..ee10ef5d0 100644 --- a/docs/reference/extract.html +++ b/docs/reference/extract.html @@ -1,9 +1,9 @@ -Splits character by '$' and returns the single characters — extract • dsBaseClientSplits character by '$' and returns the single characters — extract • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,20 +44,20 @@

Splits character by '$' and returns the single characters

-
extract(input)
+
extract(input)

Arguments

-
input
+ + +
input

a vector or a list of characters

Value

- - -

a vector of characters

+

a vector of characters

Details

@@ -72,19 +72,19 @@

Details

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/getPooledMean.html b/docs/reference/getPooledMean.html index d0f3e42de..5f25fe1c9 100644 --- a/docs/reference/getPooledMean.html +++ b/docs/reference/getPooledMean.html @@ -1,9 +1,9 @@ -Gets a pooled statistical mean — getPooledMean • dsBaseClientGets a pooled statistical mean — getPooledMean • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,25 +44,25 @@

Gets a pooled statistical mean

-
getPooledMean(dtsources, x)
+
getPooledMean(dtsources, x)

Arguments

-
dtsources
+ + +
dtsources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

-
x
+
x

a character, the name of a numeric vector

Value

- - -

a pooled mean

+

a pooled mean

Details

@@ -79,19 +79,19 @@

Details

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/getPooledVar.html b/docs/reference/getPooledVar.html index fde54485f..b1136e230 100644 --- a/docs/reference/getPooledVar.html +++ b/docs/reference/getPooledVar.html @@ -1,9 +1,9 @@ -Gets a pooled variance — getPooledVar • dsBaseClientGets a pooled variance — getPooledVar • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,25 +44,25 @@

Gets a pooled variance

-
getPooledVar(dtsources, x)
+
getPooledVar(dtsources, x)

Arguments

-
dtsources
+ + +
dtsources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

-
x
+
x

a character, the name of a numeric vector

Value

- - -

a pooled variance

+

a pooled variance

Details

@@ -79,19 +79,19 @@

Details

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/glmChecks.html b/docs/reference/glmChecks.html index 0d4b8bcc9..f2ea44350 100644 --- a/docs/reference/glmChecks.html +++ b/docs/reference/glmChecks.html @@ -1,10 +1,10 @@ -Checks if the elements in the glm model have the right characteristics — glmChecks • dsBaseClientChecks if the elements in the glm model have the right characteristics — glmChecks • dsBaseClient - +
@@ -30,61 +30,61 @@
- +

This is an internal function required by the client function ds.glm -to verify all the variables and ensure the process does not halt inadvertanly.

+to verify all the variables and ensure the process does not halt inadvertently

-
glmChecks(formula, data, offset, weights, datasources)
+
glmChecks(formula, data, offset, weights, datasources)

Arguments

-
formula
+ + +
formula

a character, a regression formula given as a string character

-
data
+
data

a character, the name of an optional data frame containing the variables in in the formula.

-
offset
-

null or a numreric vector that can be used to specify an a priori known component to be +

offset
+

null or a numeric vector that can be used to specify an a priori known component to be included in the linear predictor during fitting.

-
weights
+
weights

a character, the name of an optional vector of 'prior weights' to be used in the fitting process. Should be NULL or a numeric vector.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

Value

- - -

an integer 0 if check was passed and 1 if failed

+

an integer 0 if check was passed and 1 if failed

Details

the variables are checked to ensure they are defined, not empty (i.e. are not missing -at complete) and evantually (if 'offset' or 'weights') are of 'numeric' with non negative value +at complete) and eventually (if 'offset' or 'weights') are of 'numeric' with non negative value (if 'weights').

@@ -100,19 +100,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/index.html b/docs/reference/index.html index ff0d512f8..66ffc0598 100644 --- a/docs/reference/index.html +++ b/docs/reference/index.html @@ -1,9 +1,9 @@ -Function reference • dsBaseClientPackage index • dsBaseClient - +
@@ -29,7 +29,7 @@
- +
@@ -42,6 +42,10 @@

All functions

+

computeWeightedMeans()

+ +

Compute Weighted Mean by Group

+

ds.Boole()

Converts a server-side R object into Boolean indicators

@@ -80,7 +84,7 @@

All functions

ds.asMatrix()

-

Converts a server-side R object into a matrix

+

Converts a server-side R object into a matrix

ds.asNumeric()

@@ -204,8 +208,7 @@

All functions

ds.extractQuantiles()

-

Secure ranking of a vector across all sources and use of these ranks -to estimate global quantiles across all studies

+

Secure ranking of a vector across all sources and use of these ranks to estimate global quantiles across all studies

ds.forestplot()

@@ -413,7 +416,7 @@

All functions

ds.rUnif()

-

Generates Uniform distribution in the server-side

+

Generates Uniform distribution in the server-side

ds.ranksSecure()

@@ -489,8 +492,7 @@

All functions

ds.table()

-

Generates 1-, 2-, and 3-dimensional contingency tables with option -of assigning to serverside only and producing chi-squared statistics

+

Generates 1-, 2-, and 3-dimensional contingency tables with option of assigning to serverside only and producing chi-squared statistics

ds.table1D()

@@ -536,19 +538,19 @@

All functions
-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/isAssigned.html b/docs/reference/isAssigned.html index 69c75a792..8044eb8b6 100644 --- a/docs/reference/isAssigned.html +++ b/docs/reference/isAssigned.html @@ -1,9 +1,9 @@ -Checks an object has been generated on the server side — isAssigned • dsBaseClientChecks an object has been generated on the server side — isAssigned • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,25 +44,25 @@

Checks an object has been generated on the server side

-
isAssigned(datasources = NULL, newobj = NULL)
+
isAssigned(datasources = NULL, newobj = NULL)

Arguments

-
datasources
+ + +
datasources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

-
newobj
+
newobj

a character, the name the object to look for.

Value

- - -

nothing is return but the process is stopped if +

nothing is return but the process is stopped if the object was not generated in any one server.

@@ -80,19 +80,19 @@

Details

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/isDefined.html b/docs/reference/isDefined.html index fc7c9e372..f2cbb94bb 100644 --- a/docs/reference/isDefined.html +++ b/docs/reference/isDefined.html @@ -1,9 +1,9 @@ -Checks if the objects are defined in all studies — isDefined • dsBaseClientChecks if the objects are defined in all studies — isDefined • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,22 +44,24 @@

Checks if the objects are defined in all studies

-
isDefined(datasources = NULL, obj = NULL, error.message = TRUE)
+
isDefined(datasources = NULL, obj = NULL, error.message = TRUE)

Arguments

-
datasources
-

a list of DSConnection-class objects obtained after login. + + +

datasources
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified, the default set of connections will be used: see datashield.connections_default.

-
obj
+
obj

a character vector, the name of the object(s) to look for.

-
error.message
+
error.message

a Boolean which specifies if the function should stop and return an error message when the input object is not defined in one or more studies or to return a list of TRUE/FALSE indicating in which studies the object is defined

@@ -67,9 +69,7 @@

Arguments

Value

- - -

returns an error message if error.message argument is set to TRUE (default) +

returns an error message if error.message argument is set to TRUE (default) and if the input object is not defined in one or more studies, or a Boolean value if error.message argument is set to FALSE.

@@ -91,19 +91,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/logical2int.html b/docs/reference/logical2int.html index 3e0ba4d9f..34f4853fb 100644 --- a/docs/reference/logical2int.html +++ b/docs/reference/logical2int.html @@ -1,9 +1,9 @@ -Turns a logical operator into an integer — logical2int • dsBaseClientTurns a logical operator into an integer — logical2int • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,25 +44,25 @@

Turns a logical operator into an integer

-
logical2int(obj = NULL)
+
logical2int(obj = NULL)

Arguments

-
obj
+ + +
obj

a character, the logical parameter to turn into an integer

Value

- - -

an integer

+

an integer

Details

-

This function is called to turn a logical oprator given as a -character into an integer: '>' is turned into 1, '>=' into 2, '<' into 3, +

This function is called to turn a logical operator given as a +character into an integer: '>' is turned into 1, '>=' into 2, '<' into 3, '<=' into 4, '==' into 5 and '!=' into 6.

@@ -74,19 +74,19 @@

Details

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/meanByClassHelper0a.html b/docs/reference/meanByClassHelper0a.html index 3f2adc586..98dd1e0e7 100644 --- a/docs/reference/meanByClassHelper0a.html +++ b/docs/reference/meanByClassHelper0a.html @@ -1,9 +1,9 @@ -Computes the mean values of a numeric vector across a factor vector — meanByClassHelper0a • dsBaseClientComputes the mean values of a numeric vector across a factor vector — meanByClassHelper0a • dsBaseClient - +
@@ -29,13 +29,13 @@

- +
@@ -44,35 +44,35 @@

Computes the mean values of a numeric vector across a factor vector

-
meanByClassHelper0a(a, b, type, datasources)
+
meanByClassHelper0a(a, b, type, datasources)

Arguments

-
a
+ + +
a

a character, the name of a numeric vector.

-
b
+
b

a character, the name of a factor vector.

-
type
+
type

a character which represents the type of analysis to carry out. If type is set to 'combine', a pooled table of results is generated. If type is set to 'split', a table of results -is genrated for each study.

+is generated for each study.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

Value

- - -

a table or a list of tables that hold the length of the numeric variable and its mean +

a table or a list of tables that hold the length of the numeric variable and its mean and standard deviation in each subgroup (subset).

@@ -93,19 +93,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/meanByClassHelper0b.html b/docs/reference/meanByClassHelper0b.html index 59438f7a3..4e4fb247f 100644 --- a/docs/reference/meanByClassHelper0b.html +++ b/docs/reference/meanByClassHelper0b.html @@ -1,9 +1,9 @@ -Runs the computation if variables are within a table structure — meanByClassHelper0b • dsBaseClientRuns the computation if variables are within a table structure — meanByClassHelper0b • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,45 +44,45 @@

Runs the computation if variables are within a table structure

-
meanByClassHelper0b(x, outvar, covar, type, datasources)
+
meanByClassHelper0b(x, outvar, covar, type, datasources)

Arguments

-
x
+ + +
x

a character, the name of the dataset to get the subsets from.

-
outvar
+
outvar

a character vector, the names of the continuous variables

-
covar
+
covar

a character vector, the names of up to 3 categorical variables

-
type
+
type

a character which represents the type of analysis to carry out. If type is set to 'combine', a pooled table of results is generated. If type is set to 'split', a table of results -is genrated for each study.

+is generated for each study.

-
datasources
+
datasources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

Value

- - -

a table or a list of tables that hold the length of the numeric variable(s) and their mean +

a table or a list of tables that hold the length of the numeric variable(s) and their mean and standard deviation in each subgroup (subset).

Details

This function is called by the function 'ds.meanByClass' to produce the final tables -if the user soecify a table structure.

+if the user specify a table structure.

Author

@@ -97,19 +97,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/meanByClassHelper1.html b/docs/reference/meanByClassHelper1.html index d1e2a5aa5..9df03a529 100644 --- a/docs/reference/meanByClassHelper1.html +++ b/docs/reference/meanByClassHelper1.html @@ -1,9 +1,9 @@ -Generates subset tables — meanByClassHelper1 • dsBaseClientGenerates subset tables — meanByClassHelper1 • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,33 +44,33 @@

Generates subset tables

-
meanByClassHelper1(dtsource, tables, variable, categories)
+
meanByClassHelper1(dtsource, tables, variable, categories)

Arguments

-
dtsource
+ + +
dtsource

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

-
tables
+
tables

a character vector, the tables to breakdown

-
variable
+
variable

a character, the variable to subset on

-
categories
+
categories

a character vector, the classes in the variables to subset on

Value

- - -

a character the names of the new subset tables.

+

a character the names of the new subset tables.

Details

@@ -90,19 +90,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/meanByClassHelper2.html b/docs/reference/meanByClassHelper2.html index f224c9e40..5466cbedf 100644 --- a/docs/reference/meanByClassHelper2.html +++ b/docs/reference/meanByClassHelper2.html @@ -1,9 +1,9 @@ -Generates a table for pooled results — meanByClassHelper2 • dsBaseClientGenerates a table for pooled results — meanByClassHelper2 • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,39 +44,39 @@

Generates a table for pooled results

-
meanByClassHelper2(dtsources, tablenames, variables, invalidrecorder)
+
meanByClassHelper2(dtsources, tablenames, variables, invalidrecorder)

Arguments

-
dtsources
+ + +
dtsources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

-
tablenames
+
tablenames

a character vector, the name of the subset tables

-
variables
+
variables

a character vector, the names of the continuous variables to computes a mean for.

-
invalidrecorder
-

a list, holds informations about invalid subsets in each study.

+
invalidrecorder
+

a list, holds information about invalid subsets in each study.

Value

- - -

a matrix, a table which contains the length, mean and standard deviation of each of the +

a matrix, a table which contains the length, mean and standard deviation of each of the specified 'variables' in each subset table.

Details

This function is called by the function 'ds.meanByClass' to produce the final table -if the user sets the parmater 'type' to combine (the default behaviour of 'ds.meanByClass').

+if the user sets the parameter 'type' to combine (the default behaviour of 'ds.meanByClass').

Author

@@ -91,19 +91,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/meanByClassHelper3.html b/docs/reference/meanByClassHelper3.html index 26b8c8987..d3bcf8f14 100644 --- a/docs/reference/meanByClassHelper3.html +++ b/docs/reference/meanByClassHelper3.html @@ -1,9 +1,9 @@ -Generates results tables for each study separately — meanByClassHelper3 • dsBaseClientGenerates results tables for each study separately — meanByClassHelper3 • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,38 +44,38 @@

Generates results tables for each study separately

-
meanByClassHelper3(dtsources, tablenames, variables, invalidrecorder)
+
meanByClassHelper3(dtsources, tablenames, variables, invalidrecorder)

Arguments

-
dtsources
+ + +
dtsources

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

-
tablenames
+
tablenames

a character vector, the name of the subset tables

-
variables
+
variables

a character vector, the names of the continuous variables to computes a mean for.

-
invalidrecorder
-

a list, holds informations about invalid subsets in each study

+
invalidrecorder
+

a list, holds information about invalid subsets in each study

Value

- - -

a list which one results table for each study.

+

a list which one results table for each study.

Details

This function is called by the function 'ds.meanByClass' to produce the final tables -if the user sets the parmater 'type' to 'split'.

+if the user sets the parameter 'type' to 'split'.

Author

@@ -90,19 +90,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/meanByClassHelper4.html b/docs/reference/meanByClassHelper4.html index bd00fc5ab..2b8f40b7d 100644 --- a/docs/reference/meanByClassHelper4.html +++ b/docs/reference/meanByClassHelper4.html @@ -1,9 +1,9 @@ -Gets the subset tables out of the list (i.e. unlist) — meanByClassHelper4 • dsBaseClientGets the subset tables out of the list (i.e. unlist) — meanByClassHelper4 • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,43 +44,43 @@

Gets the subset tables out of the list (i.e. unlist)

-
meanByClassHelper4(
-  dtsource,
-  alist,
-  initialtable,
-  variable = NA,
-  categories = NA
-)
+
meanByClassHelper4(
+  dtsource,
+  alist,
+  initialtable,
+  variable = NA,
+  categories = NA
+)

Arguments

-
dtsource
+ + +
dtsource

a list of DSConnection-class objects obtained after login. If the <datasources> the default set of connections will be used: see datashield.connections_default.

-
alist
+
alist

the name of the list that holds the final subset tables

-
initialtable
+
initialtable

a character the name of the table that the subset were generated from

-
variable
+
variable

a character, the variable to subset on

-
categories
+
categories

a character vector, the classes in the variables to subset on

Value

- - -

the 'loose' subset tables are stored on the server side

+

the 'loose' subset tables are stored on the server side

Details

@@ -100,19 +100,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/rowPercent.html b/docs/reference/rowPercent.html index 5e0ad9c23..df9685296 100644 --- a/docs/reference/rowPercent.html +++ b/docs/reference/rowPercent.html @@ -1,9 +1,9 @@ -Produces row percentages — rowPercent • dsBaseClientProduces row percentages — rowPercent • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,20 +44,20 @@

Produces row percentages

-
rowPercent(dataframe)
+
rowPercent(dataframe)

Arguments

-
dataframe
+ + +
dataframe

a data frame

Value

- - -

a data frame

+

a data frame

Details

@@ -76,19 +76,19 @@

Author

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/reference/subsetHelper.html b/docs/reference/subsetHelper.html index 8f24831d5..0a94aa1b3 100644 --- a/docs/reference/subsetHelper.html +++ b/docs/reference/subsetHelper.html @@ -1,9 +1,9 @@ -Ensures that the requested subset is not larger than the original object — subsetHelper • dsBaseClientEnsures that the requested subset is not larger than the original object — subsetHelper • dsBaseClient - +
@@ -29,13 +29,13 @@
- +
@@ -44,35 +44,35 @@

Ensures that the requested subset is not larger than the original object

-
subsetHelper(dts, data, rs = NULL, cs = NULL)
+
subsetHelper(dts, data, rs = NULL, cs = NULL)

Arguments

-
dts
-

a list of DSConnection-class + + +

dts
+

a list of DSConnection-class objects obtained after login. If the datasources argument is not specified the default set of connections will be used: see datashield.connections_default.

-
data
+
data

a character string specifying the name of the data frame or the factor vector and the range of the subset.

-
rs
+
rs

a vector of two integers specifying the indices of the rows de extract.

-
cs
+
cs

a vector of two integers or one or more characters.

Value

- - -

subsetHelper returns a message or the class of the object if the +

subsetHelper returns a message or the class of the object if the object has the same class in all studies.

@@ -85,42 +85,42 @@

Details

Examples

-
if (FALSE) {
-
- ## Version 6, for version 5 see the Wiki
-  
-  # connecting to the Opal servers
-
-  require('DSI')
-  require('DSOpal')
-  require('dsBaseClient')
-
-  builder <- DSI::newDSLoginBuilder()
-  builder$append(server = "study1", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
-  builder$append(server = "study2", 
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
-  builder$append(server = "study3",
-                 url = "http://192.168.56.100:8080/", 
-                 user = "administrator", password = "datashield_test&", 
-                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
-  logindata <- builder$build()
-  
-  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
-  
-  subsetHelper(dts = connections, 
-               data = "D", 
-               rs = 1:10, 
-               cs = c("D$LAB_TSC","D$LAB_TRIG"))  
-                      
-  # clear the Datashield R sessions and logout
-  datashield.logout(connections)
-}
-
+    
if (FALSE) { # \dontrun{
+
+ ## Version 6, for version 5 see the Wiki
+  
+  # connecting to the Opal servers
+
+  require('DSI')
+  require('DSOpal')
+  require('dsBaseClient')
+
+  builder <- DSI::newDSLoginBuilder()
+  builder$append(server = "study1", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM1", driver = "OpalDriver")
+  builder$append(server = "study2", 
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM2", driver = "OpalDriver")
+  builder$append(server = "study3",
+                 url = "http://192.168.56.100:8080/", 
+                 user = "administrator", password = "datashield_test&", 
+                 table = "CNSIM.CNSIM3", driver = "OpalDriver")
+  logindata <- builder$build()
+  
+  connections <- DSI::datashield.login(logins = logindata, assign = TRUE, symbol = "D")
+  
+  subsetHelper(dts = connections, 
+               data = "D", 
+               rs = 1:10, 
+               cs = c("D$LAB_TSC","D$LAB_TRIG"))  
+                      
+  # clear the Datashield R sessions and logout
+  datashield.logout(connections)
+} # }
+
 
@@ -131,19 +131,19 @@

Examples

-

Site built with pkgdown 2.0.7.

+

Site built with pkgdown 2.1.3.

- - + + diff --git a/docs/sitemap.xml b/docs/sitemap.xml index 99412924f..39df9eefa 100644 --- a/docs/sitemap.xml +++ b/docs/sitemap.xml @@ -1,438 +1,149 @@ - - - - /404.html - - - /LICENSE.html - - - /authors.html - - - /index.html - - - /reference/checkClass.html - - - /reference/colPercent.html - - - /reference/ds.Boole.html - - - /reference/ds.abs.html - - - /reference/ds.asCharacter.html - - - /reference/ds.asDataMatrix.html - - - /reference/ds.asFactor.html - - - /reference/ds.asFactorSimple.html - - - /reference/ds.asInteger.html - - - /reference/ds.asList.html - - - /reference/ds.asLogical.html - - - /reference/ds.asMatrix.html - - - /reference/ds.asNumeric.html - - - /reference/ds.assign.html - - - /reference/ds.auc.html - - - /reference/ds.boxPlot.html - - - /reference/ds.boxPlotGG.html - - - /reference/ds.boxPlotGG_data_Treatment.html - - - /reference/ds.boxPlotGG_data_Treatment_numeric.html - - - /reference/ds.boxPlotGG_numeric.html - - - /reference/ds.boxPlotGG_table.html - - - /reference/ds.bp_standards.html - - - /reference/ds.c.html - - - /reference/ds.cbind.html - - - /reference/ds.changeRefGroup.html - - - /reference/ds.class.html - - - /reference/ds.colnames.html - - - /reference/ds.completeCases.html - - - /reference/ds.contourPlot.html - - - /reference/ds.cor.html - - - /reference/ds.corTest.html - - - /reference/ds.cov.html - - - /reference/ds.dataFrame.html - - - /reference/ds.dataFrameFill.html - - - /reference/ds.dataFrameSort.html - - - /reference/ds.dataFrameSubset.html - - - /reference/ds.densityGrid.html - - - /reference/ds.dim.html - - - /reference/ds.dmtC2S.html - - - /reference/ds.elspline.html - - - /reference/ds.exists.html - - - /reference/ds.exp.html - - - /reference/ds.extractQuantiles.html - - - /reference/ds.forestplot.html - - - /reference/ds.gamlss.html - - - /reference/ds.getWGSR.html - - - /reference/ds.glm.html - - - /reference/ds.glmPredict.html - - - /reference/ds.glmSLMA.html - - - /reference/ds.glmSummary.html - - - /reference/ds.glmerSLMA.html - - - /reference/ds.heatmapPlot.html - - - /reference/ds.hetcor.html - - - /reference/ds.histogram.html - - - /reference/ds.igb_standards.html - - - /reference/ds.isNA.html - - - /reference/ds.isValid.html - - - /reference/ds.kurtosis.html - - - /reference/ds.length.html - - - /reference/ds.levels.html - - - /reference/ds.lexis.html - - - /reference/ds.list.html - - - /reference/ds.listClientsideFunctions.html - - - /reference/ds.listDisclosureSettings.html - - - /reference/ds.listOpals.html - - - /reference/ds.listServersideFunctions.html - - - /reference/ds.lmerSLMA.html - - - /reference/ds.log.html - - - /reference/ds.look.html - - - /reference/ds.ls.html - - - /reference/ds.lspline.html - - - /reference/ds.make.html - - - /reference/ds.matrix.html - - - /reference/ds.matrixDet.html - - - /reference/ds.matrixDet.report.html - - - /reference/ds.matrixDiag.html - - - /reference/ds.matrixDimnames.html - - - /reference/ds.matrixInvert.html - - - /reference/ds.matrixMult.html - - - /reference/ds.matrixTranspose.html - - - /reference/ds.mean.html - - - /reference/ds.meanByClass.html - - - /reference/ds.meanSdGp.html - - - /reference/ds.merge.html - - - /reference/ds.message.html - - - /reference/ds.metadata.html - - - /reference/ds.mice.html - - - /reference/ds.names.html - - - /reference/ds.ns.html - - - /reference/ds.numNA.html - - - /reference/ds.qlspline.html - - - /reference/ds.quantileMean.html - - - /reference/ds.rBinom.html - - - /reference/ds.rNorm.html - - - /reference/ds.rPois.html - - - /reference/ds.rUnif.html - - - /reference/ds.ranksSecure.html - - - /reference/ds.rbind.html - - - /reference/ds.reShape.html - - - /reference/ds.recodeLevels.html - - - /reference/ds.recodeValues.html - - - /reference/ds.rep.html - - - /reference/ds.replaceNA.html - - - /reference/ds.rm.html - - - /reference/ds.rowColCalc.html - - - /reference/ds.sample.html - - - /reference/ds.scatterPlot.html - - - /reference/ds.seq.html - - - /reference/ds.setDefaultOpals.html - - - /reference/ds.setSeed.html - - - /reference/ds.skewness.html - - - /reference/ds.sqrt.html - - - /reference/ds.subset.html - - - /reference/ds.subsetByClass.html - - - /reference/ds.summary.html - - - /reference/ds.table.html - - - /reference/ds.table1D.html - - - /reference/ds.table2D.html - - - /reference/ds.tapply.assign.html - - - /reference/ds.tapply.html - - - /reference/ds.testObjExists.html - - - /reference/ds.unList.html - - - /reference/ds.unique.html - - - /reference/ds.var.html - - - /reference/ds.vectorCalc.html - - - /reference/extract.html - - - /reference/getPooledMean.html - - - /reference/getPooledVar.html - - - /reference/glmChecks.html - - - /reference/index.html - - - /reference/isAssigned.html - - - /reference/isDefined.html - - - /reference/logical2int.html - - - /reference/meanByClassHelper0a.html - - - /reference/meanByClassHelper0b.html - - - /reference/meanByClassHelper1.html - - - /reference/meanByClassHelper2.html - - - /reference/meanByClassHelper3.html - - - /reference/meanByClassHelper4.html - - - /reference/rowPercent.html - - - /reference/subsetHelper.html - + +/404.html +/LICENSE.html +/authors.html +/index.html +/reference/checkClass.html +/reference/colPercent.html +/reference/computeWeightedMeans.html +/reference/ds.Boole.html +/reference/ds.abs.html +/reference/ds.asCharacter.html +/reference/ds.asDataMatrix.html +/reference/ds.asFactor.html +/reference/ds.asFactorSimple.html +/reference/ds.asInteger.html +/reference/ds.asList.html +/reference/ds.asLogical.html +/reference/ds.asMatrix.html +/reference/ds.asNumeric.html +/reference/ds.assign.html +/reference/ds.auc.html +/reference/ds.boxPlot.html +/reference/ds.boxPlotGG.html +/reference/ds.boxPlotGG_data_Treatment.html +/reference/ds.boxPlotGG_data_Treatment_numeric.html +/reference/ds.boxPlotGG_numeric.html +/reference/ds.boxPlotGG_table.html +/reference/ds.bp_standards.html +/reference/ds.c.html +/reference/ds.cbind.html +/reference/ds.changeRefGroup.html +/reference/ds.class.html +/reference/ds.colnames.html +/reference/ds.completeCases.html +/reference/ds.contourPlot.html +/reference/ds.cor.html +/reference/ds.corTest.html +/reference/ds.cov.html +/reference/ds.dataFrame.html +/reference/ds.dataFrameFill.html +/reference/ds.dataFrameSort.html +/reference/ds.dataFrameSubset.html +/reference/ds.densityGrid.html +/reference/ds.dim.html +/reference/ds.dmtC2S.html +/reference/ds.elspline.html +/reference/ds.exists.html +/reference/ds.exp.html +/reference/ds.extractQuantiles.html +/reference/ds.forestplot.html +/reference/ds.gamlss.html +/reference/ds.getWGSR.html +/reference/ds.glm.html +/reference/ds.glmPredict.html +/reference/ds.glmSLMA.html +/reference/ds.glmSummary.html +/reference/ds.glmerSLMA.html +/reference/ds.heatmapPlot.html +/reference/ds.hetcor.html +/reference/ds.histogram.html +/reference/ds.igb_standards.html +/reference/ds.isNA.html +/reference/ds.isValid.html +/reference/ds.kurtosis.html +/reference/ds.length.html +/reference/ds.levels.html +/reference/ds.lexis.html +/reference/ds.list.html +/reference/ds.listClientsideFunctions.html +/reference/ds.listDisclosureSettings.html +/reference/ds.listOpals.html +/reference/ds.listServersideFunctions.html +/reference/ds.lmerSLMA.html +/reference/ds.log.html +/reference/ds.look.html +/reference/ds.ls.html +/reference/ds.lspline.html +/reference/ds.make.html +/reference/ds.matrix.html +/reference/ds.matrixDet.html +/reference/ds.matrixDet.report.html +/reference/ds.matrixDiag.html +/reference/ds.matrixDimnames.html +/reference/ds.matrixInvert.html +/reference/ds.matrixMult.html +/reference/ds.matrixTranspose.html +/reference/ds.mean.html +/reference/ds.meanByClass.html +/reference/ds.meanSdGp.html +/reference/ds.merge.html +/reference/ds.message.html +/reference/ds.metadata.html +/reference/ds.mice.html +/reference/ds.names.html +/reference/ds.ns.html +/reference/ds.numNA.html +/reference/ds.qlspline.html +/reference/ds.quantileMean.html +/reference/ds.rBinom.html +/reference/ds.rNorm.html +/reference/ds.rPois.html +/reference/ds.rUnif.html +/reference/ds.ranksSecure.html +/reference/ds.rbind.html +/reference/ds.reShape.html +/reference/ds.recodeLevels.html +/reference/ds.recodeValues.html +/reference/ds.rep.html +/reference/ds.replaceNA.html +/reference/ds.rm.html +/reference/ds.rowColCalc.html +/reference/ds.sample.html +/reference/ds.scatterPlot.html +/reference/ds.seq.html +/reference/ds.setDefaultOpals.html +/reference/ds.setSeed.html +/reference/ds.skewness.html +/reference/ds.sqrt.html +/reference/ds.subset.html +/reference/ds.subsetByClass.html +/reference/ds.summary.html +/reference/ds.table.html +/reference/ds.table1D.html +/reference/ds.table2D.html +/reference/ds.tapply.assign.html +/reference/ds.tapply.html +/reference/ds.testObjExists.html +/reference/ds.unList.html +/reference/ds.unique.html +/reference/ds.var.html +/reference/ds.vectorCalc.html +/reference/extract.html +/reference/getPooledMean.html +/reference/getPooledVar.html +/reference/glmChecks.html +/reference/index.html +/reference/isAssigned.html +/reference/isDefined.html +/reference/logical2int.html +/reference/meanByClassHelper0a.html +/reference/meanByClassHelper0b.html +/reference/meanByClassHelper1.html +/reference/meanByClassHelper2.html +/reference/meanByClassHelper3.html +/reference/meanByClassHelper4.html +/reference/rowPercent.html +/reference/subsetHelper.html + diff --git a/dsBase_6.3.0-permissive.tar.gz b/dsBase_6.3.0-permissive.tar.gz deleted file mode 100644 index b3dfd5c36..000000000 Binary files a/dsBase_6.3.0-permissive.tar.gz and /dev/null differ diff --git a/dsBase_6.3.0.tar.gz b/dsBase_6.3.0.tar.gz deleted file mode 100644 index 2b90bc3cf..000000000 Binary files a/dsBase_6.3.0.tar.gz and /dev/null differ diff --git a/dsBase_6.3.4-permissive.tar.gz b/dsBase_6.3.4-permissive.tar.gz new file mode 100644 index 000000000..c7c2f2b24 Binary files /dev/null and b/dsBase_6.3.4-permissive.tar.gz differ diff --git a/dsBase_6.3.4.tar.gz b/dsBase_6.3.4.tar.gz new file mode 100644 index 000000000..c7725adf9 Binary files /dev/null and b/dsBase_6.3.4.tar.gz differ diff --git a/dsDanger_6.3.0.tar.gz b/dsDanger_6.3.0.tar.gz deleted file mode 100644 index 19e3ed60f..000000000 Binary files a/dsDanger_6.3.0.tar.gz and /dev/null differ diff --git a/dsDanger_6.3.4.tar.gz b/dsDanger_6.3.4.tar.gz new file mode 100644 index 000000000..25ca59feb Binary files /dev/null and b/dsDanger_6.3.4.tar.gz differ diff --git a/inst/CITATION b/inst/CITATION new file mode 100644 index 000000000..cf856ce67 --- /dev/null +++ b/inst/CITATION @@ -0,0 +1,134 @@ +bibentry("Manual", + other = unlist(citation(auto = meta), recursive = FALSE)) + +bibentry( + bibtype = "Article", + title = "{DataSHIELD: taking the analysis to the data, not the data to the analysis}", + author = c( + person("Amadou", "Gaye"), + person("Yannick", "Marcon"), + person("Julia", "Isaeva"), + person("Philippe", "{LaFlamme}"), + person("Andrew", "Turner"), + person("Elinor M", "Jones"), + person("Joel", "Minion"), + person("Andrew W", "Boyd"), + person("Christopher J", "Newby"), + person("Marja-Liisa", "Nuotio"), + person("Rebecca", "Wilson"), + person("Oliver", "Butters"), + person("Barnaby", "Murtagh"), + person("Ipek", "Demir"), + person("Dany", "Doiron"), + person("Lisette", "Giepmans"), + person("Susan E", "Wallace"), + person("Isabelle", "Budin-Lj{\\o}sne"), + person("Carsten O.", "Schmidt"), + person("Paolo", "Boffetta"), + person("Mathieu", "Boniol"), + person("Maria", "Bota"), + person("Kim W", "Carter"), + person("Nick", "{deKlerk}"), + person("Chris", "Dibben"), + person("Richard W", "Francis"), + person("Tero", "Hiekkalinna"), + person("Kristian", "Hveem"), + person("Kirsti", "Kval{\\o}y"), + person("Sean", "Millar"), + person("Ivan J", "Perry"), + person("Annette", "Peters"), + person("Catherine M", "Phillips"), + person("Frank", "Popham"), + person("Gillian", "Raab"), + person("Eva", "Reischl"), + person("Nuala", "Sheehan"), + person("Melanie", "Waldenberger"), + person("Markus", "Perola"), + person("Edwin", "{van den Heuvel}"), + person("John", "Macleod"), + person("Bartha M", "Knoppers"), + person("Ronald P", "Stolk"), + person("Isabel", "Fortier"), + person("Jennifer R", "Harris"), + person("Bruce H R", "Woffenbuttel"), + person("Madeleine J", "Murtagh"), + person("Vincent", "Ferretti"), + person("Paul R", "Burton") + ), + journal = "International Journal of Epidemiology", + year = "2014", + volume = "43", + number = "6", + pages = "1929--1944", + doi = "10.1093/ije/dyu188", +) + +bibentry( + bibtype = "Article", + title = "{DataSHIELD – New Directions and Dimensions}", + author = c( + person("Rebecca C.", "Wilson"), + person("Oliver W.", "Butters"), + person("Demetris", "Avraam"), + person("James", "Baker"), + person("Jonathan A.", "Tedds"), + person("Andrew", "Turner"), + person("Madeleine", "Murtagh"), + person("Paul R.", "Burton") + ), + journal = "Data Science Journal", + year = "2017", + volume = "16", + number = "21", + pages = "1--21", + doi = "10.5334/dsj-2017-021" +) + +bibentry( + bibtype = "Article", + title = "{DataSHIELD: mitigating disclosure risk in a multi-site federated analysis platform}", + author = c( + person("Demetris", "Avraam"), + person("Rebecca C", "Wilson"), + person("Noemi", "{Aguirre Chan}"), + person("Soumya", "Banerjee"), + person("Tom R P", "Bishop"), + person("Olly", "Butters"), + person("Tim", "Cadman"), + person("Luise", "Cederkvist"), + person("Liesbeth", "Duijts"), + person("Xavier", "{Escrib{\\a`a} Montagut}"), + person("Hugh", "Garner"), + person("Gon{\\c c}alo", "Gon{\\c c}alves"), + person("Juan R", "Gonz{\\a'a}lez"), + person("Sido", "Haakma"), + person("Mette", "Hartlev"), + person("Jan", "Hasenauer"), + person("Manuel", "Huth"), + person("Eleanor", "Hyde"), + person("Vincent W V", "Jaddoe"), + person("Yannick", "Marcon"), + person("Michaela Th", "Mayrhofer"), + person("Fruzsina", "Molnar-Gabor"), + person("Andrei Scott", "Morgan"), + person("Madeleine", "Murtagh"), + person("Marc", "Nestor"), + person("Anne-Marie", "{Nybo Andersen}"), + person("Simon", "Parker"), + person("Angela", "{Pinot de Moira}"), + person("Florian", "Schwarz"), + person("Katrine", "Strandberg-Larsen"), + person("Morris A", "Swertz"), + person("Marieke", "Welten"), + person("Stuart", "Wheater"), + person("Paul R", "Burton") + ), + journal = "Bioinformatics Advances", + year = "2024", + volume = "5", + number = "1", + pages = "1--21", + doi = "10.1093/bioadv/vbaf046", + editor = person("Thomas", "Lengauer"), + publisher = "Oxford University Press (OUP)" +) diff --git a/inst/WORDLIST b/inst/WORDLIST new file mode 100644 index 000000000..b51917533 --- /dev/null +++ b/inst/WORDLIST @@ -0,0 +1,307 @@ +Amongst +BMI +Banerjee +Bannert +Barros +Bertino +Bhutta +Bioinformatics +Boffetta +Boniol +Bota +Bucur +Budin +Cadman +Carvalho +Cederkvist +Chumlea +Clientside +Codecov +DMT +DSI +DV +DataSHIELD +DataSHIELD's +Demir +Diaz +Dibben +Doiron +Duijts +Escribà +Fenton +Ferretti +Forestplot +Fortier +GAMLSS +GLME +GLMEs +Gabor +Giepmans +González +Gonçalves +Gravett +HC +Haakma +Hartlev +Hasenauer +Heuvel +Hiekkalinna +Huth +Hveem +IDSEQ +INTERGROWTH +IRLS +Isaeva +Jaddoe +Jaffer +Knoppers +Kvaløy +LME +LMEs +LaFlamme +Ljøsne +MUAC +Macleod +Mayrhofer +Millar +Montagut +Murtagh +Newby +Nuotio +Nvalid +Nybo +Ohuma +PIRLS +PRWa +Papageorghiou +Perola +Pinot +Polychoric +Polyserial +Popham +Purwar +REstricted +Raab +Raphson +Reischl +Rigby +Roption +Ryser +SDS +SDs +SLMA +SSF +SURVTIME +Schwarz +Serverside +Sheehan +Stasinopoulos +Stolk +Strandberg +Swertz +TSF +Tedds +UID +VarCovMatrix +Victora +Villar +Villegas +Waldenberger +Welch +Welten +Woffenbuttel +Zbp +ag +aic +analyse +analysed +analysing +anonymised +anthropometric +asFactor +asFactorDS +asFactorSimple +asFactorSimpleDS +behaviour +bfa +bp +byrow +casewise +categorising +cbind +cbinded +centile +centiles +centre +characterised +chisq +clientside +cov +crit +cvar +cyc +dataFrameSort +dataframefill +datashield +datasource +datasources +de +deKlerk +defaultMethod +df +disclosive +discriminative +dmt +dmtC +docx +doi +ds +dsBase +eg +elspline +exponentials +extractQuantile +extractQuantiles +extractQuantilesDS +forestplot +gamlss +gamlssDS +gd +getOpals +ggplot +glim +glm +glmPredict +glmPredict's +glmPredictDS +glmSLMA +glmSLMADS +glmSummary +glmSummaryDS +glmer +glmerMod +glms +hbp +hca +hcircm +hetcor +heterogenous +hfa +https +ie +igb +imputationSet +intergrowth +iter +kendall +labelled +lencm +lfa +listOpals +listOpalsIreminds +lme +lmer +lmerMod +loess +lspline +maximises +meanByClass +metafor +mfa +miceDS +mids +modelled +modelling +na +namesDS +ncol +ncols +nearPD +neighbour +neighbours +newObj +newdata +newdataname +newobj +nfilter +nhlbi +nih +normalised +nrows +ns +obiba +optimisation +organised +panelaggregation +parametrise +patho +pb +pearson +ped +penalised +perc +poisson +polychoric +polyserial +pre +predictorMatrix +probs +pseudodata +pseudorandom +qlspline +quasipoisson +rBinom +rNorm +rPois +rUnif +ra +ranksSecure +ranksSecureDS +rbinded +recodeValues +recognise +recognised +releasers +resid +rvar +sampleDS +se +serverside +setDefaultOpals +setSeedDS +skinfold +skinfolds +sortkey +spearman +ssa +standardised +stringShort +studyside +studysideMessages +stvar +subClasses +subsetByClass +subsetObject +summarised +summarises +tapply +th +tibble +tol +tsa +unlist +useNA +vals +vectorcalc +visualise +visualised +wfa +wfh +wfl +wlr +wtkg +www +xlevels +xyz +zscore +zscores diff --git a/man/checkClass.Rd b/man/checkClass.Rd index 85931e592..38dc642c1 100644 --- a/man/checkClass.Rd +++ b/man/checkClass.Rd @@ -7,8 +7,8 @@ checkClass(datasources = NULL, obj = NULL) } \arguments{ -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} \item{obj}{a string character, the name of the object to check for.} } diff --git a/man/computeWeightedMeans.Rd b/man/computeWeightedMeans.Rd new file mode 100644 index 000000000..0b8b0879f --- /dev/null +++ b/man/computeWeightedMeans.Rd @@ -0,0 +1,25 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/computeWeightedMeans.R +\name{computeWeightedMeans} +\alias{computeWeightedMeans} +\title{Compute Weighted Mean by Group} +\usage{ +computeWeightedMeans(data_table, variables, weight, by) +} +\arguments{ +\item{data_table}{a data.table} + +\item{variables}{character name of the variable(s) to focus on. The variables must be in the data.table} + +\item{weight}{character name of the data.table column that contains a weight.} + +\item{by}{character vector of the columns to group by} +} +\description{ +This function is originally from the panelaggregation package. +It has been ported here in order to bypass the package being +kicked off CRAN. +} +\author{ +Matthias Bannert, Gabriel Bucur +} diff --git a/man/ds.Boole.Rd b/man/ds.Boole.Rd index e102c872b..46d27e4f4 100644 --- a/man/ds.Boole.Rd +++ b/man/ds.Boole.Rd @@ -33,9 +33,9 @@ Default \code{'NA'}. For more information see details.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{boole.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.Boole} returns the object specified by the \code{newobj} argument diff --git a/man/ds.abs.Rd b/man/ds.abs.Rd index b408cacd1..639ebd3e9 100644 --- a/man/ds.abs.Rd +++ b/man/ds.abs.Rd @@ -12,9 +12,9 @@ ds.abs(x = NULL, newobj = NULL, datasources = NULL) \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default name is set to \code{abs.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified the default set of connections will be -used: see \code{\link{datashield.connections_default}}.} +used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.abs} assigns a vector for each study that includes the absolute values of diff --git a/man/ds.asCharacter.Rd b/man/ds.asCharacter.Rd index b11fc4f25..447d9cf9e 100644 --- a/man/ds.asCharacter.Rd +++ b/man/ds.asCharacter.Rd @@ -13,9 +13,9 @@ character.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{ascharacter.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.asCharacter} returns the object converted into a class character diff --git a/man/ds.asDataMatrix.Rd b/man/ds.asDataMatrix.Rd index 4f286eddf..e6ea9eb9c 100644 --- a/man/ds.asDataMatrix.Rd +++ b/man/ds.asDataMatrix.Rd @@ -13,9 +13,9 @@ a matrix.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{asdatamatrix.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.asDataMatrix} returns the object converted into a matrix diff --git a/man/ds.asFactor.Rd b/man/ds.asFactor.Rd index bdd9a79e5..c412df383 100644 --- a/man/ds.asFactor.Rd +++ b/man/ds.asFactor.Rd @@ -33,9 +33,9 @@ to be used in the creation of the matrix with dummy variables. If the \code{fixed.dummy.vars} is set to FALSE then any value of the baseline level is not taken into account.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.asFactor} returns the unique levels of the converted diff --git a/man/ds.asFactorSimple.Rd b/man/ds.asFactorSimple.Rd index 07746f8bc..d54776a18 100644 --- a/man/ds.asFactorSimple.Rd +++ b/man/ds.asFactorSimple.Rd @@ -17,9 +17,9 @@ the name of the variable to be converted to a factor.} \item{newobj.name}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{asfactor.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ an output vector of class factor to the serverside. In addition, returns a validity diff --git a/man/ds.asInteger.Rd b/man/ds.asInteger.Rd index 49d8ca563..d2f0455be 100644 --- a/man/ds.asInteger.Rd +++ b/man/ds.asInteger.Rd @@ -13,9 +13,9 @@ an integer.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{asinteger.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.asInteger} returns the R object converted into an integer diff --git a/man/ds.asList.Rd b/man/ds.asList.Rd index dbb8439b1..1e2e3c733 100644 --- a/man/ds.asList.Rd +++ b/man/ds.asList.Rd @@ -13,9 +13,9 @@ a list.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{aslist.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.asList} returns the R object converted into a list diff --git a/man/ds.asLogical.Rd b/man/ds.asLogical.Rd index 479ca72b0..c42d2e6aa 100644 --- a/man/ds.asLogical.Rd +++ b/man/ds.asLogical.Rd @@ -13,9 +13,9 @@ input object to be coerced to a logical.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{aslogical.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.asLogical} returns the R object converted into a logical diff --git a/man/ds.asMatrix.Rd b/man/ds.asMatrix.Rd index 5c2e991a8..709480148 100644 --- a/man/ds.asMatrix.Rd +++ b/man/ds.asMatrix.Rd @@ -13,9 +13,9 @@ a matrix.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{asmatrix.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.asMatrix} returns the object converted into a matrix diff --git a/man/ds.asNumeric.Rd b/man/ds.asNumeric.Rd index 90f181015..9928942a5 100644 --- a/man/ds.asNumeric.Rd +++ b/man/ds.asNumeric.Rd @@ -13,9 +13,9 @@ a numeric.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{asnumeric.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.asNumeric} returns the R object converted into a numeric class @@ -32,7 +32,7 @@ This function is based on the native R function \code{as.numeric}. This function is based on the native R function \code{as.numeric}. However, it behaves differently with some specific classes of variables. For example, if the input object is of class factor, it first converts its values into characters and then convert those to -numerics. This behavior is important for the case where the input object is of class factor having +numerics. This behaviour is important for the case where the input object is of class factor having numbers as levels. In that case, the native R \code{as.numeric} function returns the underlying level codes and not the values as numbers. For example \code{as.numeric} in R converts the factor vector: \cr diff --git a/man/ds.assign.Rd b/man/ds.assign.Rd index 30cf5d12c..e09d426f8 100644 --- a/man/ds.assign.Rd +++ b/man/ds.assign.Rd @@ -12,9 +12,9 @@ ds.assign(toAssign = NULL, newobj = NULL, datasources = NULL) \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{assign.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.assign} returns the R object assigned to a name diff --git a/man/ds.auc.Rd b/man/ds.auc.Rd index 1bdc075f3..0ca1a5d37 100644 --- a/man/ds.auc.Rd +++ b/man/ds.auc.Rd @@ -12,9 +12,9 @@ ds.auc(pred = NULL, y = NULL, datasources = NULL) \item{y}{the name of the outcome variable. Note that this variable should include the complete cases that are used in the regression model.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ returns the AUC and its standard error diff --git a/man/ds.boxPlot.Rd b/man/ds.boxPlot.Rd index 2831c4f90..0e8e7a336 100644 --- a/man/ds.boxPlot.Rd +++ b/man/ds.boxPlot.Rd @@ -32,7 +32,7 @@ holds the information to be plotted} \item{type}{\code{character} Return a pooled plot (\code{"pooled"}) or a split plot (one for each study server \code{"split"})} -\item{datasources}{a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login} } \value{ \code{ggplot} object @@ -79,7 +79,7 @@ of grouping using categorical variables on the dataset (only for data frames) ds.boxPlot("D", "LAB_HDL", type= "split", datasources = connections) ## Create a boxplot of two variables variable - ds.boxPlot("D", c("LAB_HDL", "LAB_TRIG", type="combine", + ds.boxPlot("D", c("LAB_HDL", "LAB_TRIG", type="pooled", datasources = connections) # only one plot is created (of the aggregated results of all servers) diff --git a/man/ds.boxPlotGG.Rd b/man/ds.boxPlotGG.Rd index 93f798b73..146271382 100644 --- a/man/ds.boxPlotGG.Rd +++ b/man/ds.boxPlotGG.Rd @@ -34,7 +34,7 @@ of this object must be: \cr \item{type}{\code{character} Return a pooled plot (\code{"pooled"}) or a split plot (one for each study server \code{"split"})} -\item{datasources}{a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login} } \value{ \code{ggplot} object diff --git a/man/ds.boxPlotGG_data_Treatment.Rd b/man/ds.boxPlotGG_data_Treatment.Rd index 8a264aed4..9788f0c79 100644 --- a/man/ds.boxPlotGG_data_Treatment.Rd +++ b/man/ds.boxPlotGG_data_Treatment.Rd @@ -21,7 +21,7 @@ ds.boxPlotGG_data_Treatment( \item{group2}{\code{character} (default \code{NULL}) Name of the second grouping variable.} -\item{datasources}{a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login} } \value{ Does not return nothing, it creates the table \code{"boxPlotRawData"} on the server arranged to be passed to the diff --git a/man/ds.boxPlotGG_data_Treatment_numeric.Rd b/man/ds.boxPlotGG_data_Treatment_numeric.Rd index 860e223e2..56ead361e 100644 --- a/man/ds.boxPlotGG_data_Treatment_numeric.Rd +++ b/man/ds.boxPlotGG_data_Treatment_numeric.Rd @@ -9,7 +9,7 @@ ds.boxPlotGG_data_Treatment_numeric(vector, datasources = NULL) \arguments{ \item{vector}{\code{character} Name of the table on the server side that holds the information to be plotted later} -\item{datasources}{a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login} } \value{ Does not return nothing, it creates the table \code{"boxPlotRawDataNumeric"} on the server arranged to be passed to the diff --git a/man/ds.boxPlotGG_numeric.Rd b/man/ds.boxPlotGG_numeric.Rd index cb98d9add..a71a0e0aa 100644 --- a/man/ds.boxPlotGG_numeric.Rd +++ b/man/ds.boxPlotGG_numeric.Rd @@ -22,7 +22,7 @@ ds.boxPlotGG_numeric( \item{type}{\code{character} Return a pooled plot (\code{"pooled"}) or a split plot (one for each study server \code{"split"})} -\item{datasources}{a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login} } \value{ \code{ggplot} object diff --git a/man/ds.boxPlotGG_table.Rd b/man/ds.boxPlotGG_table.Rd index a0f12f7c5..4318ffb86 100644 --- a/man/ds.boxPlotGG_table.Rd +++ b/man/ds.boxPlotGG_table.Rd @@ -31,7 +31,7 @@ ds.boxPlotGG_table( \item{type}{\code{character} Return a pooled plot (\code{"pooled"}) or a split plot (one for each study server \code{"split"})} -\item{datasources}{a list of \code{\link{DSConnection-class}} (default \code{NULL}) objects obtained after login} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} (default \code{NULL}) objects obtained after login} } \value{ \code{ggplot} object diff --git a/man/ds.bp_standards.Rd b/man/ds.bp_standards.Rd index 09ecfa01b..edbdd06dd 100644 --- a/man/ds.bp_standards.Rd +++ b/man/ds.bp_standards.Rd @@ -33,9 +33,9 @@ blood pressure.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default name is set to \code{bp.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified the default set of connections will be -used: see \code{\link{datashield.connections_default}}.} +used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ assigns a new object on the server-side. The assigned object is a list diff --git a/man/ds.c.Rd b/man/ds.c.Rd index ddd4db495..bc899891b 100644 --- a/man/ds.c.Rd +++ b/man/ds.c.Rd @@ -12,9 +12,9 @@ ds.c(x = NULL, newobj = NULL, datasources = NULL) \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{c.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.c} returns the vector of concatenating R diff --git a/man/ds.cbind.Rd b/man/ds.cbind.Rd index 4f3dc8ee4..ca20b9e88 100644 --- a/man/ds.cbind.Rd +++ b/man/ds.cbind.Rd @@ -30,9 +30,9 @@ For more information see \strong{Details}.} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Defaults \code{cbind.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} \item{notify.of.progress}{specifies if console output should be produced to indicate progress. Default FALSE.} diff --git a/man/ds.changeRefGroup.Rd b/man/ds.changeRefGroup.Rd index 124cd903e..1730a62f6 100644 --- a/man/ds.changeRefGroup.Rd +++ b/man/ds.changeRefGroup.Rd @@ -24,9 +24,9 @@ that is stored on the server-side. Default \code{changerefgroup.newobj}.} should be ordered by the reference group (i.e. putting the reference group first). The default is to not re-order (see the reasons in the details).} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.changeRefGroup} returns a new vector with the specified level as a reference diff --git a/man/ds.class.Rd b/man/ds.class.Rd index 59f1efa2c..b2fc0f07c 100644 --- a/man/ds.class.Rd +++ b/man/ds.class.Rd @@ -9,9 +9,9 @@ ds.class(x = NULL, datasources = NULL) \arguments{ \item{x}{a character string providing the name of the input R object.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.class} returns the type of the R object. diff --git a/man/ds.colnames.Rd b/man/ds.colnames.Rd index 3f44ee306..e73910812 100644 --- a/man/ds.colnames.Rd +++ b/man/ds.colnames.Rd @@ -9,9 +9,9 @@ ds.colnames(x = NULL, datasources = NULL) \arguments{ \item{x}{a character string providing the name of the input data frame or matrix.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.colnames} returns the column names of diff --git a/man/ds.completeCases.Rd b/man/ds.completeCases.Rd index a5b0b8a01..f5df76586 100644 --- a/man/ds.completeCases.Rd +++ b/man/ds.completeCases.Rd @@ -15,9 +15,9 @@ that is stored on the data servers. If the user does not specify a name, then th generates a name for the generated object that is the name of the input object with the suffix "_complete.cases"} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified, the default set of connections will be -used: see \code{\link{datashield.connections_default}}.} +used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.completeCases} generates a modified data frame, matrix or vector from which diff --git a/man/ds.contourPlot.Rd b/man/ds.contourPlot.Rd index e0df915f8..f9335d34c 100644 --- a/man/ds.contourPlot.Rd +++ b/man/ds.contourPlot.Rd @@ -44,9 +44,9 @@ see details.} \item{noise}{the percentage of the initial variance that is used as the variance of the embedded noise if the argument \code{method} is set to \code{'probabilistic'}. For more information see details.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.contourPlot} returns a contour plot to the client-side. diff --git a/man/ds.cor.Rd b/man/ds.cor.Rd index d01c9b3b6..030defaed 100644 --- a/man/ds.cor.Rd +++ b/man/ds.cor.Rd @@ -15,9 +15,9 @@ Default NULL.} \item{type}{a character string that represents the type of analysis to carry out. This must be set to \code{'split'} or \code{'combine'}. Default \code{'split'}. For more information see details.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.cor} returns a list containing the number of missing values in each variable, diff --git a/man/ds.corTest.Rd b/man/ds.corTest.Rd index bb2384d4e..6277df16a 100644 --- a/man/ds.corTest.Rd +++ b/man/ds.corTest.Rd @@ -24,7 +24,7 @@ used for the test. One of "pearson", "kendall", or "spearman", can be abbreviate Default is set to "pearson".} \item{exact}{a logical indicating whether an exact p-value should be computed. Used for -Kendall's tau and Spearman's rho. See ‘Details’ of R stats function \code{cor.test} for +Kendall's tau and Spearman's rho. See \emph{Details} of R stats function \code{cor.test} for the meaning of NULL (the default).} \item{conf.level}{confidence level for the returned confidence interval. Currently @@ -36,9 +36,9 @@ This must be set to \code{'split'} or \code{'combine'}. Default is set to \code{ \code{type} is set to "combine" then an approximated pooled correlation is estimated based on Fisher's z transformation.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.corTest} returns to the client-side the results of the correlation test. diff --git a/man/ds.cov.Rd b/man/ds.cov.Rd index 1d8b5455b..3fe9ec8f8 100644 --- a/man/ds.cov.Rd +++ b/man/ds.cov.Rd @@ -25,9 +25,9 @@ presence of missing values. This must be set to \code{'casewise.complete'} or \item{type}{a character string that represents the type of analysis to carry out. This must be set to \code{'split'} or \code{'combine'}. Default \code{'split'}. For more information see details.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.cov} returns a list containing the number of missing values in each variable, the number of missing values diff --git a/man/ds.dataFrame.Rd b/man/ds.dataFrame.Rd index 4a3e80dde..5c7d82d7c 100644 --- a/man/ds.dataFrame.Rd +++ b/man/ds.dataFrame.Rd @@ -48,9 +48,9 @@ are the same} \item{newobj}{a character string that provides the name for the output data frame that is stored on the data servers. Default \code{dataframe.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} \item{notify.of.progress}{specifies if console output should be produced to indicate progress. Default is FALSE.} diff --git a/man/ds.dataFrameFill.Rd b/man/ds.dataFrameFill.Rd index 3183d61cd..44eef9e55 100644 --- a/man/ds.dataFrameFill.Rd +++ b/man/ds.dataFrameFill.Rd @@ -13,9 +13,9 @@ filled with extra columns of missing values.} \item{newobj}{a character string that provides the name for the output data frame that is stored on the data servers. Default value is "dataframefill.newobj".} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.dataFrameFill} returns the object specified by the \code{newobj} argument which diff --git a/man/ds.dataFrameSort.Rd b/man/ds.dataFrameSort.Rd index 7b9e843ab..252227e5f 100644 --- a/man/ds.dataFrameSort.Rd +++ b/man/ds.dataFrameSort.Rd @@ -30,9 +30,9 @@ to sort the data frame. This can be set as that is stored on the data servers. Default \code{dataframesort.newobj}. where \code{df.name} is the first argument of \code{ds.dataFrameSort()}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.dataFrameSort} returns the sorted data frame is written to the server-side. diff --git a/man/ds.dataFrameSubset.Rd b/man/ds.dataFrameSubset.Rd index 2f6068d3d..4d2afc18d 100644 --- a/man/ds.dataFrameSubset.Rd +++ b/man/ds.dataFrameSubset.Rd @@ -18,7 +18,7 @@ ds.dataFrameSubset( ) } \arguments{ -\item{df.name}{a character string providing the name of the data frame to be subseted.} +\item{df.name}{a character string providing the name of the data frame to be subset.} \item{V1.name}{A character string specifying the name of the vector to which the Boolean operator is to be applied to define the subset. @@ -42,9 +42,9 @@ If FALSE or NULL all rows with at least one missing values are removed from the \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{dataframesubset.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} \item{notify.of.progress}{specifies if console output should be produced to indicate progress. Default FALSE.} diff --git a/man/ds.densityGrid.Rd b/man/ds.densityGrid.Rd index 15e299775..89cc4348d 100644 --- a/man/ds.densityGrid.Rd +++ b/man/ds.densityGrid.Rd @@ -26,9 +26,9 @@ If \code{type} is set to instead if \code{type} is set to \code{'split'} one grid density matrix is generated. Default \code{'combine'}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.densityGrid} returns a grid density matrix. diff --git a/man/ds.dim.Rd b/man/ds.dim.Rd index b6a86b579..ea3aaa6d1 100644 --- a/man/ds.dim.Rd +++ b/man/ds.dim.Rd @@ -20,9 +20,9 @@ Default \code{'both'}.} \item{checks}{logical. If TRUE undertakes all DataSHIELD checks (time-consuming). Default FALSE.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.dim} retrieves to the client-side the dimension of the object diff --git a/man/ds.dmtC2S.Rd b/man/ds.dmtC2S.Rd index 4453cf73a..51ef6a964 100644 --- a/man/ds.dmtC2S.Rd +++ b/man/ds.dmtC2S.Rd @@ -23,7 +23,7 @@ that will be the dataset used by default if no other dataset is specified. If yo wish to change the connections you wish to use by default the call datashield.connections_default('opals.a') will set 'default.connections' to be 'opals.a' and so in the absence of specific instructions to the contrary -(e.g. by specifiying a particular dataset to be used via the +(e.g. by specifying a particular dataset to be used via the argument) all subsequent function calls will be to the datasets held in opals.a. If the argument is specified, it should be set without inverted commas: e.g. datasources=opals.a or datasources=default.connections. diff --git a/man/ds.elspline.Rd b/man/ds.elspline.Rd index 26c5f6ccf..d7f20819d 100644 --- a/man/ds.elspline.Rd +++ b/man/ds.elspline.Rd @@ -19,16 +19,16 @@ ds.elspline( \item{n}{integer greater than 2, knots are computed such that they cut n equally-spaced intervals along the range of x} -\item{marginal}{logical, how to parametrize the spline, see Details} +\item{marginal}{logical, how to parametrise the spline, see Details} \item{names}{character, vector of names for constructed variables} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{elspline.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ an object of class "lspline" and "matrix", which its name is specified by the diff --git a/man/ds.exists.Rd b/man/ds.exists.Rd index 029c66b9e..2352a2941 100644 --- a/man/ds.exists.Rd +++ b/man/ds.exists.Rd @@ -9,9 +9,9 @@ ds.exists(x = NULL, datasources = NULL) \arguments{ \item{x}{a character string providing the name of the object to look for.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.exists} returns a logical object. diff --git a/man/ds.exp.Rd b/man/ds.exp.Rd index 07e212c9b..875dbe00e 100644 --- a/man/ds.exp.Rd +++ b/man/ds.exp.Rd @@ -12,9 +12,9 @@ ds.exp(x = NULL, newobj = NULL, datasources = NULL) \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{exp.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.exp} returns a vector for each study of the exponential values for the numeric vector diff --git a/man/ds.forestplot.Rd b/man/ds.forestplot.Rd index 6ce3d54c6..408dc2d30 100644 --- a/man/ds.forestplot.Rd +++ b/man/ds.forestplot.Rd @@ -7,10 +7,10 @@ ds.forestplot(mod, variable = NULL, method = "ML", layout = "JAMA") } \arguments{ -\item{mod}{\code{list} List outputed by any of the SLMA models of DataSHIELD (\code{ds.glmerSLMA}, +\item{mod}{\code{list} List outputted by any of the SLMA models of DataSHIELD (\code{ds.glmerSLMA}, \code{ds.glmSLMA}, \code{ds.lmerSLMA})} -\item{variable}{\code{character} (default \code{NULL}) Variable to meta-analyze and visualize, by setting this +\item{variable}{\code{character} (default \code{NULL}) Variable to meta-analyse and visualise, by setting this argument to \code{NULL} (default) the first independent variable will be used.} \item{method}{\code{character} (Default \code{"ML"}) Method to estimate the between study variance. @@ -20,7 +20,7 @@ See details from \code{?meta::metagen} for the different options.} See details from \code{?meta::metagen} for the different options.} } \description{ -Draws a foresplot of the coefficients for Study-Level Meta-Analysis performed with +Draws a forestplot of the coefficients for Study-Level Meta-Analysis performed with DataSHIELD } \examples{ diff --git a/man/ds.gamlss.Rd b/man/ds.gamlss.Rd index 60bd6d173..4c7bc1692 100644 --- a/man/ds.gamlss.Rd +++ b/man/ds.gamlss.Rd @@ -96,9 +96,9 @@ only.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{gamlss_res}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ a gamlss object with all components as in the native R gamlss function. diff --git a/man/ds.getWGSR.Rd b/man/ds.getWGSR.Rd index 1e3b86721..f48b9a28a 100644 --- a/man/ds.getWGSR.Rd +++ b/man/ds.getWGSR.Rd @@ -66,9 +66,9 @@ by the formula $age_days=age_months*(365.25/12)$.} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Defaults \code{getWGSR.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified the default set of connections will be -used: see \code{\link{datashield.connections_default}}.} +used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.getWGSR} assigns a vector for each study that includes the z-scores for the diff --git a/man/ds.glm.Rd b/man/ds.glm.Rd index 735682765..67728e009 100644 --- a/man/ds.glm.Rd +++ b/man/ds.glm.Rd @@ -58,9 +58,9 @@ of parameter estimates is returned. Default FALSE.} \item{viewCor}{logical. If TRUE the correlation matrix of parameter estimates is returned. Default FALSE.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ Many of the elements of the output list returned by \code{ds.glm} are @@ -95,7 +95,7 @@ The list of elements returned by \code{ds.glm} is mentioned below: \code{formula}: model formula, see description of formula as an input parameter (above). \code{coefficients}: a matrix with 5 columns: - \itemize{ + \describe{ \item{First}{: the names of all of the regression parameters (coefficients) in the model} \item{second}{: the estimated values} \item{third}{: corresponding standard errors of the estimated values} @@ -138,7 +138,7 @@ function is also allowed by \code{ds.glm}. Many GLMs can be fitted very simply using a formula such as: -\deqn{y~a+b+c+d} +\eqn{y~a+b+c+d} which simply means fit a GLM with \code{y} as the outcome variable and \code{a}, \code{b}, \code{c} and \code{d} as covariates. @@ -147,7 +147,7 @@ By default all such models also include an intercept (regression constant) term. Instead, if you need to fit a more complex model, for example: - \deqn{EVENT~1+TID+SEXF*AGE.60} + \eqn{EVENT~1+TID+SEXF*AGE.60} In the above model the outcome variable is \code{EVENT} and the covariates @@ -166,7 +166,7 @@ means fit all possible main effects and interactions for and between those two c In the \code{family} argument can be specified three types of models to fit: - \itemize{ + \describe{ \item{\code{"gaussian"}}{: conventional linear model with normally distributed errors} \item{\code{"binomial"}}{: conventional unconditional logistic regression model} \item{\code{"poisson"}}{: Poisson regression model which is the most used in survival analysis. @@ -183,7 +183,7 @@ an \code{identity} link function, the binomial family with a The \code{data} argument avoids you having to specify the name of the data frame in front of each covariate in the formula. For example, if the data frame is called \code{DataFrame} you -avoid having to write: \eqn{DataFrame$y~DataFrame$a+DataFrame$b+DataFrame$c+DataFrame$d} +avoid having to write: \eqn{DataFrame\$y ~ DataFrame\$a + DataFrame\$b + DataFrame\$c + DataFrame\$d} The \code{checks} argument verifies that the variables in the model are all defined (exist) on the server-side at every study diff --git a/man/ds.glmPredict.Rd b/man/ds.glmPredict.Rd index e156e6c97..32e5c9f5d 100644 --- a/man/ds.glmPredict.Rd +++ b/man/ds.glmPredict.Rd @@ -87,7 +87,7 @@ that will be the dataset used by default if no other dataset is specified. If yo wish to change the connections you wish to use by default the call datashield.connections_default('opals.a') will set 'default.connections' to be 'opals.a' and so in the absence of specific instructions to the contrary -(e.g. by specifiying a particular dataset to be used via the +(e.g. by specifying a particular dataset to be used via the argument) all subsequent function calls will be to the datasets held in opals.a. If the argument is specified, it should be set without inverted commas: e.g. datasources=opals.a or datasources=default.connections. @@ -114,7 +114,7 @@ the output always includes: the name of the serverside glm object being predicte the name - if one was specified - of the dataframe being used as the basis for predictions, the output.type specified ('link', 'response' or 'terms'), the value of the dispersion parameter if one had been specified and the residual scale parameter (which is -multipled by sqrt(dispersion parameter) if one has been set). If output.type = 'terms', +multiplied by sqrt(dispersion parameter) if one has been set). If output.type = 'terms', the summary statistics for the fit and se.fit vectors are replaced by equivalent summary statistics for each column in fit and se.fit matrices which each have k columns if k terms are being summarised. diff --git a/man/ds.glmSLMA.Rd b/man/ds.glmSLMA.Rd index ad1d4314b..d4acff4cf 100644 --- a/man/ds.glmSLMA.Rd +++ b/man/ds.glmSLMA.Rd @@ -58,9 +58,9 @@ For more information see \strong{Details}.} \item{notify.of.progress}{specifies if console output should be produced to indicate progress. Default FALSE.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ The serverside aggregate functions \code{glmSLMADS1} and \code{glmSLMADS2} return @@ -80,7 +80,7 @@ a series of other list objects that represent inferences aggregated across studi the study specific items include: \code{coefficients}: a matrix with 5 columns: - \itemize{ + \describe{ \item{First}{: the names of all of the regression parameters (coefficients) in the model} \item{second}{: the estimated values} \item{third}{: corresponding standard errors of the estimated values} @@ -172,7 +172,7 @@ them into their meta-analysis package of choice. \code{is.object.created} and \code{validity.check} are standard items returned by an assign function when the designated newobj appears to have -been successfuly created on the serverside at each study. This output is +been successfully created on the serverside at each study. This output is produced specifically by the assign function \code{glmSLMADS.assign} that writes out the glm object on the serverside } @@ -241,7 +241,7 @@ function is also allowed by \code{ds.glmSLMA}. Many glms can be fitted very simply using a formula such as: -\deqn{y~a+b+c+d} +\eqn{y~a+b+c+d} which simply means fit a glm with \code{y} as the outcome variable and \code{a}, \code{b}, \code{c} and \code{d} as covariates. @@ -250,7 +250,7 @@ By default all such models also include an intercept (regression constant) term. Instead, if you need to fit a more complex model, for example: - \deqn{EVENT~1+TID+SEXF*AGE.60} +\eqn{EVENT~1+TID+SEXF*AGE.60} In the above model the outcome variable is \code{EVENT} and the covariates @@ -271,7 +271,7 @@ been extended to include a number of model types that are non-standard but are u relatively widely. The standard models include: - \itemize{ + \describe{ \item{\code{"gaussian"}}{: conventional linear model with normally distributed errors} \item{\code{"binomial"}}{: conventional unconditional logistic regression model} \item{\code{"poisson"}}{: Poisson regression model which is often used in epidemiological @@ -312,7 +312,7 @@ maximises the information extraction by the model. The gaussian family uses the The \code{dataName} argument avoids you having to specify the name of the data frame in front of each covariate in the formula. For example, if the data frame is called \code{DataFrame} you -avoid having to write: \eqn{DataFrame$y~DataFrame$a+DataFrame$b+DataFrame$c+DataFrame$d} +avoid having to write: \eqn{DataFrame\$y ~ DataFrame\$a + DataFrame\$b + DataFrame\$c + DataFrame\$d} The \code{checks} argument verifies that the variables in the model are all defined (exist) on the server-site at every study diff --git a/man/ds.glmSummary.Rd b/man/ds.glmSummary.Rd index d7906d311..897dbae90 100644 --- a/man/ds.glmSummary.Rd +++ b/man/ds.glmSummary.Rd @@ -24,7 +24,7 @@ that will be the dataset used by default if no other dataset is specified. If yo wish to change the connections you wish to use by default the call datashield.connections_default('opals.a') will set 'default.connections' to be 'opals.a' and so in the absence of specific instructions to the contrary -(e.g. by specifiying a particular dataset to be used via the +(e.g. by specifying a particular dataset to be used via the argument) all subsequent function calls will be to the datasets held in opals.a. If the argument is specified, it should be set without inverted commas: e.g. datasources=opals.a or datasources=default.connections. diff --git a/man/ds.glmerSLMA.Rd b/man/ds.glmerSLMA.Rd index 0ce88508d..8ad49c3ae 100644 --- a/man/ds.glmerSLMA.Rd +++ b/man/ds.glmerSLMA.Rd @@ -45,9 +45,9 @@ that contains all of the variables in the GLME formula. For more information see \item{checks}{logical. If TRUE \code{ds.glmerSLMA} checks the structural integrity of the model. Default FALSE. For more information see \strong{Details}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} \item{family}{a character string specifying the distribution of the observed value of the outcome variable around the predictions generated by the linear predictor. @@ -100,7 +100,7 @@ In this case, only non-disclosive elements are returned from each study separate The list of elements returned by \code{ds.glmerSLMA} is mentioned below: \code{coefficients}: a matrix with 5 columns: - \itemize{ + \describe{ \item{First}{: the names of all of the regression parameters (coefficients) in the model} \item{second}{: the estimated values} \item{third}{: corresponding standard errors of the estimated values} @@ -169,23 +169,23 @@ If it did not some information about the reason for this is reported. In \code{formula} most shortcut notation allowed by \code{glmer()} function is also allowed by \code{ds.glmerSLMA}. Many GLMEs can be fitted very simply using a formula like: -\deqn{y~a+b+(1|c)} +\eqn{y~a+b+(1|c)} which simply means fit an GLME with \code{y} as the outcome variable (e.g. a binary case-control using a logistic regression model or a count or a survival time using a Poisson regression model), \code{a} and \code{b} as fixed effects, and \code{c} as a random effect or grouping factor. It is also possible to fit models with random slopes by specifying a model such as -\deqn{y~a+b+(1+b|c)} +\eqn{y~a+b+(1+b|c)} where the effect of \code{b} can vary randomly between groups defined by \code{c}. Implicit nesting can be specified with formulas such as: \eqn{y~a+b+(1|c/d)} -or \eqn{y~a+b+(1|c)+(1|c:d)}. +or \eqn{y~a+b+(1|c)+(1|c:d)}. The \code{dataName} argument avoids you having to specify the name of the data frame in front of each covariate in the formula. For example, if the data frame is called \code{DataFrame} you avoid having to write: -\eqn{DataFrame$y~DataFrame$a+DataFrame$b+(1|DataFrame$c)}. +\eqn{DataFrame\$y ~ DataFrame\$a + DataFrame\$b + (1 | DataFrame\$c)}. The \code{checks} argument verifies that the variables in the model are all defined (exist) on the server-site at every study @@ -195,7 +195,7 @@ problem in the model fit is encountered because the running process takes severa In the \code{family} argument can be specified two types of models to fit: - \itemize{ + \describe{ \item{\code{"binomial"}}{: logistic regression models} \item{\code{"poisson"}}{: poisson regression models} } diff --git a/man/ds.heatmapPlot.Rd b/man/ds.heatmapPlot.Rd index b8d4d08c1..3ab17da98 100644 --- a/man/ds.heatmapPlot.Rd +++ b/man/ds.heatmapPlot.Rd @@ -49,9 +49,9 @@ noise if the argument \code{method} is set to \code{'probabilistic'}. Default \code{noise} value is \code{0.25}. For more information see \strong{Details}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.heatmapPlot} returns to the client-side a heat map plot and a message specifying @@ -71,19 +71,19 @@ are not the exact minimum and maximum values but rather close approximates of th minimum and maximum value. This was done to reduce the risk of potential disclosure. In the argument \code{type} can be specified two types of graphics to display: - \itemize{ + \describe{ \item{\code{'combine'}}{: a combined heat map plot is displayed} \item{\code{'split'}}{: each heat map is plotted separately} } In the argument \code{show} can be specified two options: - \itemize{ + \describe{ \item{\code{'all'}}{: the ranges of the variables are used as plot limits} \item{\code{'zoomed'}}{: the plot is zoomed to the region where the actual data are} } In the argument \code{method} can be specified 3 different heat map to be created: - \itemize{ + \describe{ \item{\code{'smallCellsRule'}}{: the heat map of the actual variables is created but grids with low counts are replaced with grids with zero counts} \item{\code{'deterministic'}}{: the heat map of the scaled centroids of each diff --git a/man/ds.hetcor.Rd b/man/ds.hetcor.Rd index aa7d07133..e236a0e9a 100644 --- a/man/ds.hetcor.Rd +++ b/man/ds.hetcor.Rd @@ -34,9 +34,9 @@ nearPD directly.} \item{use}{if "complete.obs", remove observations with any missing data; if "pairwise.complete.obs", compute each correlation using all observations with valid data for that pair of variables.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified the default set of connections will be -used: see \code{\link{datashield.connections_default}}.} +used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ Returns an object of class "hetcor" from each study, with the following components: the diff --git a/man/ds.histogram.Rd b/man/ds.histogram.Rd index 0fedbd101..17de73da6 100644 --- a/man/ds.histogram.Rd +++ b/man/ds.histogram.Rd @@ -46,9 +46,9 @@ plot. The \code{vertical.axis} argument can be set as \code{'Frequency'} or \cod Default \code{'Frequency'}. For more information see \strong{Details}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ one or more histogram objects and plots depending on the argument \code{type} @@ -62,13 +62,13 @@ distinct histograms (one for each study) or a combined histogram that merges the single plots. In the argument \code{type} can be specified two types of graphics to display: - \itemize{ + \describe{ \item{\code{'combine'}}{: a histogram that merges the single plot is displayed.} \item{\code{'split'}}{: each histogram is plotted separately.} } In the argument \code{method} can be specified 3 different histograms to be created: - \itemize{ + \describe{ \item{\code{'smallCellsRule'}}{: the histogram of the actual variable is created but bins with low counts are removed.} \item{\code{'deterministic'}}{: the histogram of the scaled centroids of each @@ -104,7 +104,7 @@ than the pre-specified threshold \code{'nfilter.noise'}. By default the value of noise is set to be equal to 0.25. In the argument \code{vertical.axis} can be specified two types of histograms: -\itemize{ +\describe{ \item{\code{'Frequency'}}{: the histogram of the frequencies is returned.} \item{\code{'Density'}}{: the histogram of the densities diff --git a/man/ds.igb_standards.Rd b/man/ds.igb_standards.Rd index 6ea70b2fc..e9ab20f51 100644 --- a/man/ds.igb_standards.Rd +++ b/man/ds.igb_standards.Rd @@ -39,9 +39,9 @@ recode the categories to Male/Female before the use of ds.igb_standards.} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default name is set to \code{igb.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified the default set of connections will be -used: see \code{\link{datashield.connections_default}}.} +used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ assigns the converted measurement as a new object on the server-side @@ -54,11 +54,19 @@ For gestational ages between 24 and 33 weeks, the INTERGROWTH very early preterm standard is used. } \references{ -International standards for newborn weight, length, and head circumference by -gestational age and sex: the Newborn Cross-Sectional Study of the INTERGROWTH-21st Project -Villar, José et al. The Lancet, Volume 384, Issue 9946, 857-868 -INTERGROWTH-21st very preterm size at birth reference charts. Lancet 2016 -doi.org/10.1016/S0140-6736(16) 00384-6. Villar, José et al. +\itemize{ + \item Villar, J., Ismail, L.C., Victora, C.G., Ohuma, E.O., Bertino, E., + Altman, D.G., Lambert, A., Papageorghiou, A.T., Carvalho, M., Jaffer, Y.A., + Gravett, M.G., Purwar, M., Frederick, I.O., Noble, A.J., Pang, R., Barros, + F.C., Chumlea, C., Bhutta, Z.A., Kennedy, S.H., 2014. International + standards for newborn weight, length, and head circumference by gestational + age and sex: the Newborn Cross-Sectional Study of the INTERGROWTH-21st + Project. The Lancet 384, 857--868. https://doi.org/10.1016/S0140-6736(14)60932-6 + \item Villar, J., Giuliani, F., Fenton, T.R., Ohuma, E.O., Ismail, L.C., + Kennedy, S.H., 2016. INTERGROWTH-21st very preterm size at birth reference + charts. The Lancet 387, 844--845. https://doi.org/10.1016/S0140-6736(16)00384-6 + +} } \author{ Demetris Avraam for DataSHIELD Development Team diff --git a/man/ds.isNA.Rd b/man/ds.isNA.Rd index 0c94b9ebe..ec6b2f6fe 100644 --- a/man/ds.isNA.Rd +++ b/man/ds.isNA.Rd @@ -9,9 +9,9 @@ ds.isNA(x = NULL, datasources = NULL) \arguments{ \item{x}{a character string specifying the name of the vector to check.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.isNA} returns a boolean. If it is TRUE the vector is empty diff --git a/man/ds.isValid.Rd b/man/ds.isValid.Rd index 2bda85571..2f927ae23 100644 --- a/man/ds.isValid.Rd +++ b/man/ds.isValid.Rd @@ -9,9 +9,9 @@ ds.isValid(x = NULL, datasources = NULL) \arguments{ \item{x}{a character string specifying the name of a vector, dataframe or matrix.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.isValid} returns a boolean. If it is TRUE input object is valid, FALSE otherwise. diff --git a/man/ds.kurtosis.Rd b/man/ds.kurtosis.Rd index 4a9b2a32d..4b698adff 100644 --- a/man/ds.kurtosis.Rd +++ b/man/ds.kurtosis.Rd @@ -18,9 +18,9 @@ if \code{type} is set to 'split', 'splits' or 's', the kurtosis is returned sepa if \code{type} is set to 'both' or 'b', both sets of outputs are produced. The default value is set to 'both'.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ a matrix showing the kurtosis of the input numeric variable, the number of valid observations and diff --git a/man/ds.length.Rd b/man/ds.length.Rd index 08b513a19..27e105bc4 100644 --- a/man/ds.length.Rd +++ b/man/ds.length.Rd @@ -22,9 +22,9 @@ Default \code{'both'}.} Default FALSE to save time. It is suggested that checks should only be undertaken once the function call has failed.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.length} returns to the client-side the pooled length of a vector or a list, diff --git a/man/ds.levels.Rd b/man/ds.levels.Rd index 0a372660c..fbdab0c46 100644 --- a/man/ds.levels.Rd +++ b/man/ds.levels.Rd @@ -9,9 +9,9 @@ ds.levels(x = NULL, datasources = NULL) \arguments{ \item{x}{a character string specifying the name of a factor variable.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.levels} returns to the client-side the levels of a factor diff --git a/man/ds.lexis.Rd b/man/ds.lexis.Rd index 0c75e1f01..a57cd28c5 100644 --- a/man/ds.lexis.Rd +++ b/man/ds.lexis.Rd @@ -41,9 +41,9 @@ variables to include in the final expanded table. For more information see \stro \item{expandDF}{a character string denoting the name of the new data frame containing the expanded data set. Default \code{lexis.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.lexis} returns to the server-side a data frame for each study with diff --git a/man/ds.list.Rd b/man/ds.list.Rd index de5b69e77..d6ebd00e1 100644 --- a/man/ds.list.Rd +++ b/man/ds.list.Rd @@ -12,9 +12,9 @@ ds.list(x = NULL, newobj = NULL, datasources = NULL) \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{list.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.list} returns a list of objects for each study that is stored on the server-side. diff --git a/man/ds.listDisclosureSettings.Rd b/man/ds.listDisclosureSettings.Rd index d04448b8f..23ab83fd7 100644 --- a/man/ds.listDisclosureSettings.Rd +++ b/man/ds.listDisclosureSettings.Rd @@ -7,9 +7,9 @@ ds.listDisclosureSettings(datasources = NULL) } \arguments{ -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.listDisclosureSettings} returns a list containing the current settings of the diff --git a/man/ds.listServersideFunctions.Rd b/man/ds.listServersideFunctions.Rd index 30931c629..70d9ba5d1 100644 --- a/man/ds.listServersideFunctions.Rd +++ b/man/ds.listServersideFunctions.Rd @@ -7,9 +7,9 @@ ds.listServersideFunctions(datasources = NULL) } \arguments{ -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.listServersideFunctions} returns to the client-side @@ -20,7 +20,7 @@ Firstly lists assign and then aggregate functions. Lists all current server-side functions } \details{ -Uses \code{\link{datashield.methods}} function from \code{DSI} package to list all +Uses \code{\link[DSI]{datashield.methods}} function from \code{DSI} package to list all assign and aggregate functions on the available data repository servers. The only choice of arguments is in \code{datasources}; i.e. which studies to interrogate. Once the studies have diff --git a/man/ds.lmerSLMA.Rd b/man/ds.lmerSLMA.Rd index b0f6908aa..dbd9b7fbe 100644 --- a/man/ds.lmerSLMA.Rd +++ b/man/ds.lmerSLMA.Rd @@ -44,9 +44,9 @@ For more information see \strong{Details}.} \item{checks}{logical. If TRUE \code{ds.lmerSLMA} checks the structural integrity of the model. Default FALSE. For more information see \strong{Details}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} \item{REML}{logical. If TRUE the REstricted Maximum Likelihood (REML) is used for parameter optimization. @@ -95,7 +95,7 @@ The list of elements returned by \code{ds.lmerSLMA} is mentioned below: below separately for each study. \code{coefficients}: a matrix with 5 columns: - \itemize{ + \describe{ \item{First}{: the names of all of the regression parameters (coefficients) in the model} \item{second}{: the estimated values} \item{third}{: corresponding standard errors of the estimated values} @@ -168,20 +168,20 @@ obtained using R help for \code{lmer} and the \code{lme4} package. In \code{formula} most shortcut notation allowed by \code{lmer()} function is also allowed by \code{ds.lmerSLMA}. Many LMEs can be fitted very simply using a formula like: -\deqn{y~a+b+(1|c)} +\eqn{y ~ a + b + (1 | c)} which simply means fit an LME with \code{y} as the outcome variable with \code{a} and \code{b} as fixed effects, and \code{c} as a random effect or grouping factor. It is also possible to fit models with random slopes by specifying a model such as -\deqn{y~a+b+(1+b|c)} +\eqn{y ~ a + b + (1 + b | c)} where the effect of \code{b} can vary randomly between groups defined by \code{c}. -Implicit nesting can be specified with formulae such as \eqn{y~a+b+(1|c/d)} -or \eqn{y~a+b+(1|c)+(1|c:d)}. +Implicit nesting can be specified with formulae such as \eqn{y ~ a + b + (1 | c / d)} +or \eqn{y ~ a + b + (1 | c) + (1 | c : d)}. The \code{dataName} argument avoids you having to specify the name of the data frame in front of each covariate in the formula. For example, if the data frame is called \code{DataFrame} you avoid having to write: -\eqn{DataFrame$y~DataFrame$a+DataFrame$b+(1|DataFrame$c)}. +\eqn{DataFrame\$y ~ DataFrame\$a + DataFrame\$b + (1 | DataFrame\$c)}. The \code{checks} argument verifies that the variables in the model are all defined (exist) on the server-site at every study @@ -252,7 +252,7 @@ Server function called: \code{lmerSLMADS2} # Fit the lmer - ds.lmerSLMA(formula = "BMI ~ incid_rate + diabetes + (1 | Male)", + ds.lmerSLMA(formula = "BMI ~ incid_rate + diabetes + (1 | Male)", dataName = "D.comp", datasources = connections) diff --git a/man/ds.log.Rd b/man/ds.log.Rd index b988fb84b..6ab8fee72 100644 --- a/man/ds.log.Rd +++ b/man/ds.log.Rd @@ -15,9 +15,9 @@ Default \code{exp(1)}.} \item{newobj}{a character string that provides the name for the output variable that is stored on the server-side. Default \code{log.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.log} returns a vector for each study of the transformed values for the numeric vector diff --git a/man/ds.look.Rd b/man/ds.look.Rd index 4a59c166e..3cecb4627 100644 --- a/man/ds.look.Rd +++ b/man/ds.look.Rd @@ -13,9 +13,9 @@ For more information see \strong{Details}.} \item{checks}{logical. If TRUE the optional checks are undertaken. Default FALSE to save time.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ the output from the specified server-side aggregate function to the client-side. diff --git a/man/ds.ls.Rd b/man/ds.ls.Rd index e05feb5ea..207af8548 100644 --- a/man/ds.ls.Rd +++ b/man/ds.ls.Rd @@ -13,7 +13,7 @@ ds.ls( } \arguments{ \item{search.filter}{character string (potentially including \code{*} symbol) specifying the filter -for the object name that you want to find in the enviroment. For more information see \strong{Details}.} +for the object name that you want to find in the environment. For more information see \strong{Details}.} \item{env.to.search}{an integer (e.g. in \code{2} or \code{2L} format) specifying the position in the search path of the environment to be explored. \code{1L} is the current active analytic @@ -26,9 +26,9 @@ set as a valid integer, \code{ds.ls} will list all objects in the server-side R identified by \code{env.to.search} in the search path. For more information see \strong{Details}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.ls} returns to the client-side a list containing: \cr diff --git a/man/ds.lspline.Rd b/man/ds.lspline.Rd index 550e23974..c0189ce40 100644 --- a/man/ds.lspline.Rd +++ b/man/ds.lspline.Rd @@ -18,16 +18,16 @@ ds.lspline( \item{knots}{numeric vector of knot positions} -\item{marginal}{logical, how to parametrize the spline, see Details} +\item{marginal}{logical, how to parametrise the spline, see Details} \item{names}{character, vector of names for constructed variables} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{lspline.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ an object of class "lspline" and "matrix", which its name is specified by the diff --git a/man/ds.make.Rd b/man/ds.make.Rd index d87cc537f..197518bc1 100644 --- a/man/ds.make.Rd +++ b/man/ds.make.Rd @@ -12,9 +12,9 @@ ds.make(toAssign = NULL, newobj = NULL, datasources = NULL) \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{make.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.make} returns the new object which is written to the diff --git a/man/ds.matrix.Rd b/man/ds.matrix.Rd index 27b3e9d87..d60a91832 100644 --- a/man/ds.matrix.Rd +++ b/man/ds.matrix.Rd @@ -18,7 +18,7 @@ ds.matrix( \arguments{ \item{mdata}{a character string specifying the name of a server-side scalar or vector. Also, a numeric value representing a -scalar specified from the client-side can be speficied. +scalar specified from the client-side can be specified Zeros, negative values and NAs are all allowed. For more information see \strong{Details}.} @@ -43,9 +43,9 @@ the row and column names respectively.} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{matrix.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.matrix} returns the created matrix which is written on the server-side. diff --git a/man/ds.matrixDet.Rd b/man/ds.matrixDet.Rd index 1c419f4dd..e827e8158 100644 --- a/man/ds.matrixDet.Rd +++ b/man/ds.matrixDet.Rd @@ -15,9 +15,9 @@ variable that is stored on the data servers. Default \code{matrixdet.newobj}.} \item{logarithm}{logical. If TRUE the logarithm of the modulus of the determinant is calculated. Default FALSE.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.matrixDet} returns the determinant of an existing matrix on the server-side. diff --git a/man/ds.matrixDet.report.Rd b/man/ds.matrixDet.report.Rd index e4fc3569b..0a5c021d1 100644 --- a/man/ds.matrixDet.report.Rd +++ b/man/ds.matrixDet.report.Rd @@ -12,9 +12,9 @@ ds.matrixDet.report(M1 = NULL, logarithm = FALSE, datasources = NULL) \item{logarithm}{logical. If TRUE the logarithm of the modulus of the determinant is calculated. Default FALSE.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.matrixDet.report} returns to the client-side diff --git a/man/ds.matrixDiag.Rd b/man/ds.matrixDiag.Rd index a23ab1330..268e5148c 100644 --- a/man/ds.matrixDiag.Rd +++ b/man/ds.matrixDiag.Rd @@ -15,7 +15,7 @@ ds.matrixDiag( \arguments{ \item{x1}{a character string specifying the name of a server-side scalar or vector. Also, a numeric value or vector -specified from the client-side can be speficied. This argument depends +specified from the client-side can be specified. This argument depends on the value specified in \code{aim}. For more information see \strong{Details}.} @@ -35,9 +35,9 @@ For more information see \strong{Details}.} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{matrixdiag.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.matrixDiag} returns to the server-side the square matrix diagonal. diff --git a/man/ds.matrixDimnames.Rd b/man/ds.matrixDimnames.Rd index bc5e5813c..af56f76a6 100644 --- a/man/ds.matrixDimnames.Rd +++ b/man/ds.matrixDimnames.Rd @@ -22,9 +22,9 @@ An empty list is treated as NULL.} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{matrixdimnames.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.matrixDimnames} returns to the server-side diff --git a/man/ds.matrixInvert.Rd b/man/ds.matrixInvert.Rd index 0a6ff331b..842baee46 100644 --- a/man/ds.matrixInvert.Rd +++ b/man/ds.matrixInvert.Rd @@ -13,9 +13,9 @@ ds.matrixInvert(M1 = NULL, newobj = NULL, datasources = NULL) variable that is stored on the data servers. Default \code{matrixinvert.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.matrixInvert} returns to the server-side the inverts square matrix. diff --git a/man/ds.matrixMult.Rd b/man/ds.matrixMult.Rd index 22a1dbc64..136db26d9 100644 --- a/man/ds.matrixMult.Rd +++ b/man/ds.matrixMult.Rd @@ -14,9 +14,9 @@ ds.matrixMult(M1 = NULL, M2 = NULL, newobj = NULL, datasources = NULL) \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{matrixmult.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.matrixMult} returns to the server-side diff --git a/man/ds.matrixTranspose.Rd b/man/ds.matrixTranspose.Rd index 26be7098d..67e06867e 100644 --- a/man/ds.matrixTranspose.Rd +++ b/man/ds.matrixTranspose.Rd @@ -13,9 +13,9 @@ ds.matrixTranspose(M1 = NULL, newobj = NULL, datasources = NULL) variable that is stored on the data servers. Default \code{matrixtranspose.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.matrixTranspose} returns to the server-side the transpose matrix. diff --git a/man/ds.mean.Rd b/man/ds.mean.Rd index e6d0fd70d..d662a3895 100644 --- a/man/ds.mean.Rd +++ b/man/ds.mean.Rd @@ -31,9 +31,9 @@ the number of valid (non-missing) observations will be saved on the data server Default FALSE. For more information see \strong{Details}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.mean} returns to the client-side a list including: \cr diff --git a/man/ds.meanByClass.Rd b/man/ds.meanByClass.Rd index 340b07dba..f60ba7a45 100644 --- a/man/ds.meanByClass.Rd +++ b/man/ds.meanByClass.Rd @@ -24,9 +24,9 @@ ds.meanByClass( Default \code{'combine'}. For more information see \strong{Details}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.meanByClass} returns to the client-side a table or a list of tables that diff --git a/man/ds.meanSdGp.Rd b/man/ds.meanSdGp.Rd index 8660b9129..964a5dae7 100644 --- a/man/ds.meanSdGp.Rd +++ b/man/ds.meanSdGp.Rd @@ -29,9 +29,9 @@ are undertaken to ensure that the input objects are defined in all studies and t variables are of equivalent class in each study. Default is FALSE to save time.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.meanSdGp} returns to the client-side the mean, SD, Nvalid and SEM combined diff --git a/man/ds.merge.Rd b/man/ds.merge.Rd index 24f8e5e33..d6e18558e 100644 --- a/man/ds.merge.Rd +++ b/man/ds.merge.Rd @@ -65,9 +65,9 @@ For more information see \code{match} in native R \code{merge} function.} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{merge.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.merge} returns the merged data frame that is written on the server-side. diff --git a/man/ds.message.Rd b/man/ds.message.Rd index 6b54e513b..ab1dff8d6 100644 --- a/man/ds.message.Rd +++ b/man/ds.message.Rd @@ -10,9 +10,9 @@ ds.message(message.obj.name = NULL, datasources = NULL) \item{message.obj.name}{is a character string specifying the name of the list that contains the message.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.message} returns a list object from each study, diff --git a/man/ds.metadata.Rd b/man/ds.metadata.Rd index cc43a67d7..128ca9ec7 100644 --- a/man/ds.metadata.Rd +++ b/man/ds.metadata.Rd @@ -9,9 +9,9 @@ ds.metadata(x = NULL, datasources = NULL) \arguments{ \item{x}{a character string specifying the name of the object.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.metadata} returns to the client-side the metadata of associated to an object diff --git a/man/ds.mice.Rd b/man/ds.mice.Rd index 59f6f471b..8bedcd93c 100644 --- a/man/ds.mice.Rd +++ b/man/ds.mice.Rd @@ -54,9 +54,9 @@ that are stored on the data servers. Default \code{imputationSet}. For example, newobj_df="imputationSet", then five imputed dataframes are saved on the servers with names imputationSet.1, imputationSet.2, imputationSet.3, imputationSet.4, imputationSet.5.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ a list with three elements: the method, the predictorMatrix and the post. diff --git a/man/ds.names.Rd b/man/ds.names.Rd index a2e48bf2c..199b20d97 100644 --- a/man/ds.names.Rd +++ b/man/ds.names.Rd @@ -9,11 +9,11 @@ ds.names(xname = NULL, datasources = NULL) \arguments{ \item{xname}{a character string specifying the name of the list.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login that represent the particular data sources (studies) to be addressed by the function call. If the \code{datasources} argument is not specified the default set of connections will be used: -see \code{\link{datashield.connections_default}}.} +see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.names} returns to the client-side the names @@ -28,7 +28,7 @@ the native R function \code{names} but it does not subsume all functionality, for example, it only works to extract names that already exist, not to create new names for objects. The function is restricted to objects of type list, but this includes objects that have a primary class other than list but which -return TRUE to the native R function {is.list}. As an example this includes +return TRUE to the native R function \code{is.list}. As an example this includes the multi-component object created by fitting a generalized linear model using ds.glmSLMA. The resultant object saved on each server separately is formally of class "glm" and "ls" but responds TRUE to is.list(), diff --git a/man/ds.ns.Rd b/man/ds.ns.Rd index 51e3166b1..28ffaaa04 100644 --- a/man/ds.ns.Rd +++ b/man/ds.ns.Rd @@ -34,9 +34,9 @@ are supplied, the basis parameters do not depend on x. Data can extend beyond Bo \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{ns.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ A matrix of dimension length(x) * df where either df was supplied or if knots were @@ -55,7 +55,7 @@ a basis matrix for representing the family of piecewise-cubic splines with the s sequence of interior knots, and the natural boundary conditions. These enforce the constraint that the function is linear beyond the boundary knots, which can either be supplied or default to the extremes of the data. -A primary use is in modeling formula to directly specify a natural spline term in a model. +A primary use is in modelling formula to directly specify a natural spline term in a model. } \author{ Demetris Avraam for DataSHIELD Development Team diff --git a/man/ds.numNA.Rd b/man/ds.numNA.Rd index 668e23edd..896c76ee1 100644 --- a/man/ds.numNA.Rd +++ b/man/ds.numNA.Rd @@ -9,9 +9,9 @@ ds.numNA(x = NULL, datasources = NULL) \arguments{ \item{x}{a character string specifying the name of the vector.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.numNA} returns to the client-side the number of missing values diff --git a/man/ds.qlspline.Rd b/man/ds.qlspline.Rd index fc9285ae8..003f6b3ad 100644 --- a/man/ds.qlspline.Rd +++ b/man/ds.qlspline.Rd @@ -23,16 +23,16 @@ intervals along x or a vector of numbers in (0; 1) specifying the quantiles expl \item{na.rm}{logical, whether NA should be removed when calculating quantiles, passed to na.rm of quantile. Default set to TRUE} -\item{marginal}{logical, how to parametrize the spline, see Details} +\item{marginal}{logical, how to parametrise the spline, see Details} \item{names}{character, vector of names for constructed variables} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{qlspline.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ an object of class "lspline" and "matrix", which its name is specified by the diff --git a/man/ds.quantileMean.Rd b/man/ds.quantileMean.Rd index 560194289..03b469a18 100644 --- a/man/ds.quantileMean.Rd +++ b/man/ds.quantileMean.Rd @@ -13,9 +13,9 @@ ds.quantileMean(x = NULL, type = "combine", datasources = NULL) This can be set as \code{'combine'} or \code{'split'}. For more information see \strong{Details}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.quantileMean} returns to the client-side the quantiles and statistical mean diff --git a/man/ds.rBinom.Rd b/man/ds.rBinom.Rd index e3ee5d16f..993083e18 100644 --- a/man/ds.rBinom.Rd +++ b/man/ds.rBinom.Rd @@ -33,9 +33,9 @@ random seed in each data source.} in each data source (a numeric vector of length 626). If FALSE it will only return the trigger seed value you have provided. Default is FALSE.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.rBinom} returns random number vectors diff --git a/man/ds.rNorm.Rd b/man/ds.rNorm.Rd index 1149593fc..f60071fbc 100644 --- a/man/ds.rNorm.Rd +++ b/man/ds.rNorm.Rd @@ -37,9 +37,9 @@ Default is FALSE.} \item{force.output.to.k.decimal.places}{an integer vector that forces the output random numbers vector to have k decimals.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.rNorm} returns random number vectors with a normal distribution for each diff --git a/man/ds.rPois.Rd b/man/ds.rPois.Rd index 4fbe5e4fa..7ce19a470 100644 --- a/man/ds.rPois.Rd +++ b/man/ds.rPois.Rd @@ -30,9 +30,9 @@ random number seed in each data source (a numeric vector of length 626). If FALSE it will only return the trigger seed value you have provided. Default is FALSE.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.rPois} returns random number vectors with a Poisson distribution for each study, diff --git a/man/ds.rUnif.Rd b/man/ds.rUnif.Rd index d526fca2c..0ffd62aaa 100644 --- a/man/ds.rUnif.Rd +++ b/man/ds.rUnif.Rd @@ -39,9 +39,9 @@ return the trigger seed value you have provided. Default is FALSE.} an integer vector that forces the output random numbers vector to have k decimals.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.Unif} returns random number vectors with a uniform distribution for each study, diff --git a/man/ds.ranksSecure.Rd b/man/ds.ranksSecure.Rd index 3fd8ed8eb..294a754f7 100644 --- a/man/ds.ranksSecure.Rd +++ b/man/ds.ranksSecure.Rd @@ -51,7 +51,7 @@ is usually the setting to use. But, if there is some abnormal configuration of the clusters of values that are being ranked such that some values are treated as being missing and the processing stops, then setting generate.quantiles to FALSE allows the generation of ranks to complete so -they can then be used for non-parameteric analysis, even if the key values +they can then be used for non-parametric analysis, even if the key values cannot be estimated. A real example of an unusual configuration was in a reasonably large dataset of survival times, where a substantial proportion of survival profiles were censored at precisely 10 years. This meant that @@ -181,7 +181,7 @@ occurred. } \description{ Securely generate the ranks of a numeric vector and estimate -true qlobal quantiles across all data sources simultaneously +true global quantiles across all data sources simultaneously } \details{ ds.ranksSecure is a clientside function which calls a series of diff --git a/man/ds.rbind.Rd b/man/ds.rbind.Rd index d294f4744..75c2a03f9 100644 --- a/man/ds.rbind.Rd +++ b/man/ds.rbind.Rd @@ -25,9 +25,9 @@ specifies column names of the output object.} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Defaults \code{rbind.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} \item{notify.of.progress}{specifies if console output should be produced to indicate progress. Default FALSE.} diff --git a/man/ds.reShape.Rd b/man/ds.reShape.Rd index 9c54b3fcd..8acdd161a 100644 --- a/man/ds.reShape.Rd +++ b/man/ds.reShape.Rd @@ -49,9 +49,9 @@ to 'wide' format.} that is stored on the data servers. Default \code{reshape.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.reShape} returns to the server-side a reshaped data frame diff --git a/man/ds.recodeLevels.Rd b/man/ds.recodeLevels.Rd index 958b4881f..144509277 100644 --- a/man/ds.recodeLevels.Rd +++ b/man/ds.recodeLevels.Rd @@ -20,9 +20,9 @@ to the current number of levels.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{recodelevels.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.recodeLevels} returns to the server-side a variable of type factor diff --git a/man/ds.recodeValues.Rd b/man/ds.recodeValues.Rd index a6af2722f..6b775bc9d 100644 --- a/man/ds.recodeValues.Rd +++ b/man/ds.recodeValues.Rd @@ -31,9 +31,9 @@ specify an identical vector of values in both arguments \code{values2replace.vec that is stored on the data servers. Default \code{recodevalues.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} \item{notify.of.progress}{logical. If TRUE console output should be produced to indicate progress. Default FALSE.} @@ -52,7 +52,7 @@ them to a matched set of alternative specified values. This function recodes individual values with new individual values. This can apply to numeric and character values, factor levels and NAs. One particular use of \code{ds.recodeValues} is to convert NAs to an explicit value. This value is specified -in the argument \code{missing}. If tthe user want to recode only missing values, then it +in the argument \code{missing}. If the user want to recode only missing values, then it should also specify an identical vector of values in both arguments \code{values2replace.vector} and \code{new.values.vector} (see Example 2 below). Server function called: \code{recodeValuesDS} diff --git a/man/ds.rep.Rd b/man/ds.rep.Rd index 23a352501..b552ff437 100644 --- a/man/ds.rep.Rd +++ b/man/ds.rep.Rd @@ -44,9 +44,9 @@ the \code{x1} is a character.} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{seq.vect}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.rep} returns in the server-side a vector with the specified repetitive sequence. diff --git a/man/ds.replaceNA.Rd b/man/ds.replaceNA.Rd index f84805910..3b8a4ec01 100644 --- a/man/ds.replaceNA.Rd +++ b/man/ds.replaceNA.Rd @@ -15,9 +15,9 @@ The length of the list or vector must be equal to the number of servers (studies \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{replacena.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.replaceNA} returns to the server-side a new vector or table structure diff --git a/man/ds.rm.Rd b/man/ds.rm.Rd index 9a6e88afa..a63786217 100644 --- a/man/ds.rm.Rd +++ b/man/ds.rm.Rd @@ -9,9 +9,9 @@ ds.rm(x.names = NULL, datasources = NULL) \arguments{ \item{x.names}{a character string specifying the objects to be deleted.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ The \code{ds.rm} function deletes from the server-side diff --git a/man/ds.rowColCalc.Rd b/man/ds.rowColCalc.Rd index dc0992f2a..dc4cfbd93 100644 --- a/man/ds.rowColCalc.Rd +++ b/man/ds.rowColCalc.Rd @@ -15,9 +15,9 @@ ds.rowColCalc(x = NULL, operation = NULL, newobj = NULL, datasources = NULL) \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{rowcolcalc.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.rowColCalc} returns to the server-side rows and columns sums and means. diff --git a/man/ds.sample.Rd b/man/ds.sample.Rd index 018ba2518..357882d5f 100644 --- a/man/ds.sample.Rd +++ b/man/ds.sample.Rd @@ -22,11 +22,11 @@ scalar (e.g. 923) indicating that one should create a new vector on the serversi that is a randomly permuted sample of the vector 1:923, or (if [replace] = FALSE, a full random permutation of that same vector. For further details of using ds.sample with x set as an integer/numeric please see help for -the {sample} function in native R. But if x is set as a character string +the \code{sample} function in native R. But if x is set as a character string denoting a vector, matrix or data.frame on the serverside, please note -that although {ds.sample} effectively calls {sample} on the serverside -it behaves somewhat differently to {sample} - for the reasons identified -at the top of 'details' and so help for {sample} should be used as a guide +that although \code{ds.sample} effectively calls \code{sample} on the serverside +it behaves somewhat differently to \code{sample} - for the reasons identified +at the top of 'details' and so help for \code{sample} should be used as a guide only.} \item{size}{a numeric/integer scalar indicating the size of the sample to @@ -64,13 +64,13 @@ each source} \item{replace}{a Boolean indicator (TRUE or FALSE) specifying whether the sample should be drawn with or without replacement. Default is FALSE so the sample is drawn without replacement. For further details see -help for {sample} in native R.} +help for \code{sample} in native R.} \item{prob}{a character string containing the name of a numeric vector of probability weights on the serverside that is associated with each of the elements of the vector to be sampled enabling the drawing of a sample with some elements given higher probability of being drawn than others. -For further details see help for {sample} in native R.} +For further details see help for \code{sample} in native R.} \item{newobj}{This a character string providing a name for the output data.frame which defaults to 'newobj.sample' if no name is specified.} @@ -78,7 +78,7 @@ data.frame which defaults to 'newobj.sample' if no name is specified.} \item{datasources}{specifies the particular opal object(s) to use. If the argument is not specified the default set of opals will be used. The default opals are called default.opals and the default can be set using the function -{ds.setDefaultOpals}. If the is to be specified, it should be set without +\code{ds.setDefaultOpals}. If the is to be specified, it should be set without inverted commas: e.g. datasources=opals.em or datasources=default.opals. If you wish to apply the function solely to e.g. the second opal server in a set of three, the argument can be specified as: e.g. datasources=opals.em[2]. @@ -106,8 +106,8 @@ or - as a special case - randomly permutes a vector, dataframe or matrix. } \details{ Clientside function ds.sample calls serverside -assign function sampleDS. Based on the native R function {sample()} but deals -slightly differently with data.frames and matrices. Specifically the {sample()} +assign function sampleDS. Based on the native R function \code{sample()} but deals +slightly differently with data.frames and matrices. Specifically the \code{sample()} function in R identifies the length of an object and then samples n components of that length. But length(data.frame) in native R returns the number of columns not the number of rows. So if you have a data.frame with 71 rows and 10 columns, @@ -147,7 +147,7 @@ but they join the sample in random order. By sorting the output object (in this case with the default name 'newobj.sample) using ds.dataFrameSort with the 'sampling.order' vector as the sort key, the output object is rendered equivalent to PRWa but with the rows randomly permuted (so the column reflecting -the vector 'sample.order' now runs from 1:length of obejct, while the +the vector 'sample.order' now runs from 1:length of object, while the column reflecting 'ID.seq' denoting the original order is now randomly ordered. If you need to return to the original order you can simply us ds.dataFrameSort again using the column reflecting 'ID.seq' as the sort key: diff --git a/man/ds.scatterPlot.Rd b/man/ds.scatterPlot.Rd index 773123ac4..798c08447 100644 --- a/man/ds.scatterPlot.Rd +++ b/man/ds.scatterPlot.Rd @@ -27,7 +27,7 @@ This argument can be set as \code{'deteministic'} or \code{'probabilistic'}. Default \code{'deteministic'}. For more information see \strong{Details}.} -\item{k}{the number of the nearest neighbors for which their centroid is calculated. +\item{k}{the number of the nearest neighbours for which their centroid is calculated. Default 3. For more information see \strong{Details}.} @@ -43,9 +43,9 @@ For more information see \strong{Details}.} \item{return.coords}{a logical. If TRUE the coordinates of the anonymised data points are return to the Console. Default value is FALSE.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.scatterPlot} returns to the client-side one or more scatter @@ -60,7 +60,7 @@ As the generation of a scatter plot from original data is disclosive and is not permitted in DataSHIELD, this function allows the user to plot non-disclosive scatter plots. If the argument \code{method} is set to \code{'deterministic'}, the server-side function searches -for the \code{k-1} nearest neighbors of each single data point and calculates the centroid +for the \code{k-1} nearest neighbours of each single data point and calculates the centroid of such \code{k} points. The proximity is defined by the minimum Euclidean distances of z-score transformed data. diff --git a/man/ds.seq.Rd b/man/ds.seq.Rd index 115787932..e8b7365fe 100644 --- a/man/ds.seq.Rd +++ b/man/ds.seq.Rd @@ -41,9 +41,9 @@ For more information see \strong{Details}.} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{seq.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.seq} returns to the server-side the generated sequence. diff --git a/man/ds.setSeed.Rd b/man/ds.setSeed.Rd index 6577b6cb8..6dacac34d 100644 --- a/man/ds.setSeed.Rd +++ b/man/ds.setSeed.Rd @@ -10,9 +10,9 @@ ds.setSeed(seed.as.integer = NULL, datasources = NULL) \item{seed.as.integer}{a numeric value or a NULL that primes the random seed in each data source.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ Sets the values of the vector of integers of length 626 known as diff --git a/man/ds.skewness.Rd b/man/ds.skewness.Rd index 9ebab0d6b..7c3bda041 100644 --- a/man/ds.skewness.Rd +++ b/man/ds.skewness.Rd @@ -17,9 +17,9 @@ For more information see \strong{Details}. The default value is set to 1.} see \strong{Details}. The default value is set to \code{'both'}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.skewness} returns a matrix showing the skewness of the input numeric variable, diff --git a/man/ds.sqrt.Rd b/man/ds.sqrt.Rd index 62abcc83e..638d26a5f 100644 --- a/man/ds.sqrt.Rd +++ b/man/ds.sqrt.Rd @@ -12,9 +12,9 @@ ds.sqrt(x = NULL, newobj = NULL, datasources = NULL) \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default name is set to \code{sqrt.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified the default set of connections will be -used: see \code{\link{datashield.connections_default}}.} +used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.sqrt} assigns a vector for each study that includes the square root values of @@ -71,7 +71,6 @@ specified by the user through the argument \code{newobj}, otherwise is named by # and get their square roots ds.make(toAssign='rep((1:10)^2, times=10)', newobj='squares.vector', datasources=connections) ds.sqrt(x='squares.vector', newobj='sqrt.vector', datasources=connections) - # check the behavior of that operation by comparing the tables of squares.vector and sqrt.vector ds.table(rvar='squares.vector')$output.list$TABLE_rvar.by.study_counts ds.table(rvar='sqrt.vector')$output.list$TABLE_rvar.by.study_counts diff --git a/man/ds.subset.Rd b/man/ds.subset.Rd index 0c2472f24..2b2d91b99 100644 --- a/man/ds.subset.Rd +++ b/man/ds.subset.Rd @@ -33,20 +33,20 @@ operator. This parameter is ignored if the input data is not a vector.} \item{threshold}{a numeric, the threshold to use in conjunction with the logical parameter. This parameter is ignored if the input data is not a vector.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} } \value{ no data are return to the user, the generated subset dataframe is stored on the server side. } \description{ The function uses the R classical subsetting with squared brackets '[]' and allows also to -subset using a logical oprator and a threshold. The object to subset from must be a vector (factor, numeric -or charcater) or a table (data.frame or matrix). +subset using a logical operator and a threshold. The object to subset from must be a vector (factor, numeric +or character) or a table (data.frame or matrix). } \details{ (1) If the input data is a table the user specifies the rows and/or columns to include in the subset; the columns can be -refered to by their names. Table subsetting can also be done using the name of a variable and a threshold (see example 3). +referred to by their names. Table subsetting can also be done using the name of a variable and a threshold (see example 3). (2) If the input data is a vector and the parameters 'rows', 'logical' and 'threshold' are all provided the last two are ignored (i.e. 'rows' has precedence over the other two parameters then). IMPORTANT NOTE: If the requested subset is not valid (i.e. contains less than the allowed number of observations) all the values are diff --git a/man/ds.subsetByClass.Rd b/man/ds.subsetByClass.Rd index d19b5a717..fe372adb8 100644 --- a/man/ds.subsetByClass.Rd +++ b/man/ds.subsetByClass.Rd @@ -19,8 +19,8 @@ the default name of this list is 'subClasses'.} \item{variables}{a vector of string characters, the name(s) of the variables to subset by.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} } \value{ a no data are return to the user but messages are printed out. diff --git a/man/ds.summary.Rd b/man/ds.summary.Rd index f171efe4f..2f52cff7e 100644 --- a/man/ds.summary.Rd +++ b/man/ds.summary.Rd @@ -9,9 +9,9 @@ ds.summary(x = NULL, datasources = NULL) \arguments{ \item{x}{a character string specifying the name of a numeric or factor variable.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.summary} returns to the client-side the class and diff --git a/man/ds.table.Rd b/man/ds.table.Rd index 6ee378d76..252232bdc 100644 --- a/man/ds.table.Rd +++ b/man/ds.table.Rd @@ -38,14 +38,14 @@ tables in the output if the call specifies a 3 dimensional table.} are applied to every 2 dimensional table in the output and reported as "chisq.test_table.name". Default = FALSE.} -\item{exclude}{this argument is passed through to the {table} function in -native R which is called by {tableDS}. The help for {table} in native R +\item{exclude}{this argument is passed through to the \code{table} function in +native R which is called by \code{tableDS}. The help for \code{table} in native R indicates that 'exclude' specifies any levels that should be deleted for all factors in rvar, cvar or stvar. If the argument does not include NA and if the argument is not specified, -it implies = "always" in DataSHIELD. If you read the help for {table} in native R +it implies = "always" in DataSHIELD. If you read the help for \code{table} in native R including the 'details' and the 'examples' (particularly 'd.patho') you -will see that the response of {table} to different combinations of the +will see that the response of \code{table} to different combinations of the and arguments can be non-intuitive. This is particularly so if there is more than one type of missing (e.g. missing by observation as well as missing because of an NaN response to a mathematical @@ -54,27 +54,27 @@ of these complex settings (which should not be very common) and you cannot interpret the output that has been approached you might try: (1) making sure that the variable producing the strange results is of class factor rather than integer or numeric - although integers and -numerics are coerced to factors by {ds.table} they can occasionally behave less +numerics are coerced to factors by \code{ds.table} they can occasionally behave less well when the NA setting is complex; (2) specify both an argument e.g. exclude = c("NaN","3") and a argument e.g. useNA= "no"; (3) if you are excluding multiple levels e.g exclude = c("NA","3") then you can reduce this to one e.g. exclude = c("NA") and then remove the 3s by deleting rows of data, or converting the 3s to a different value.} -\item{useNA}{this argument is passed through to the {table} function in -native R which is called by {tableDS}. In DataSHIELD, this argument can take +\item{useNA}{this argument is passed through to the \code{table} function in +native R which is called by \code{tableDS}. In DataSHIELD, this argument can take two values: "no" or "always" which indicate whether to include NA values in the table. For further information, please see the help for the argument (above) -and/or the help for the {table} function in native R. Default value is set to "always".} +and/or the help for the \code{table} function in native R. Default value is set to "always".} \item{suppress.chisq.warnings}{if set to TRUE, the default warnings are -suppressed that would otherwise be produced by the {table} function in +suppressed that would otherwise be produced by the \code{table} function in native R whenever an expected cell count in one or more cells is less than 5. Default is FALSE. Further details can be found under 'details' and the help provided for the argument (above).} \item{table.assign}{is a Boolean argument set by default to FALSE. If it is -FALSE the {ds.table} function acts as a standard aggregate function - +FALSE the \code{ds.table} function acts as a standard aggregate function - it returns the table that is specified in its call to the clientside where it can be visualised and worked with by the analyst. But if is TRUE, the same table object is also written to @@ -90,8 +90,8 @@ If no explicit name for the table object is specified, but is nevertheless TRUE, the name for the serverside table object defaults to \code{table.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}. If the is to be specified, it should be set without inverted commas: e.g. datasources=connections.em or datasources=default.connections. If you wish to apply the function solely to e.g. the second connection server in a set of three, @@ -103,7 +103,7 @@ e.g. datasources=connections.em[c(1,3)].} a positive integer represented as a character string: e.g. "173". This the has the effect of the standard value of 'nfilter.tab' (often 1, 3, 5 or 10 depending what value the data custodian has selected for this particular -data set), to this new value (here, 173). CRUCIALLY, the {ds.table} function +data set), to this new value (here, 173). CRUCIALLY, the \code{ds.table} function only allows the standard value to be INCREASED. So if the standard value has been set as 5 (as one of the R options set in the serverside connection), "6" and "4981" would be allowable values for the argument but "4" or @@ -118,9 +118,9 @@ it is blocked because it fails the disclosure control criteria or there is an error for some other reason). The clientside output from -{ds.table} includes error messages that identify when the creation of a +\code{ds.table} includes error messages that identify when the creation of a table from a particular study has failed and why. If table.assign=TRUE, -{ds.table} also writes the requested table as an object named by +\code{ds.table} also writes the requested table as an object named by the argument or set to 'newObj' by default. Further information @@ -165,7 +165,7 @@ In consequence, if the number 13 appears in a cell of the empty table returned to the clientside, it means that the true count in that same cell is held as the 13th element of the true count vector saved on the serverside. This means that a data analyst -can still make use of the counts from a call to the {ds.table} +can still make use of the counts from a call to the \code{ds.table} function to drive their ongoing analysis even when one or more non-zero cell counts fall below the specified threshold for potential disclosure risk. @@ -184,7 +184,7 @@ tables produced in the output. In creating a 3-dimensional table the ('separate tables') argument identifies the variable that -indexes the set of two dimensional tables in the output {ds.table}. +indexes the set of two dimensional tables in the output \code{ds.table}. As a minor technicality, it should be noted that if a 1-dimensional table is required, one only need specify a value @@ -196,7 +196,7 @@ dimensional tables and key components of the output for one dimensional tables are actually two dimensional: with rows defined by and with one column for each of the studies. -The output list generated by {ds.table} contains tables based on counts +The output list generated by \code{ds.table} contains tables based on counts named "table.name_counts" and other tables reporting corresponding column proportions ("table.name_col.props") or row proportions ("table.name_row.props"). In one dimensional tables in the output the diff --git a/man/ds.table1D.Rd b/man/ds.table1D.Rd index a4d5c9584..b79ee21ee 100644 --- a/man/ds.table1D.Rd +++ b/man/ds.table1D.Rd @@ -14,16 +14,16 @@ ds.table1D( \arguments{ \item{x}{a character, the name of a numerical vector with discrete values - usually a factor.} -\item{type}{a character which represent the type of table to ouput: pooled table or one table for each +\item{type}{a character which represent the type of table to output: pooled table or one table for each data source. If \code{type} is set to 'combine', a pooled 1-dimensional table is returned; if If \code{type} is set to 'split' a 1-dimensional table is returned for each data source.} -\item{warningMessage}{a boolean, if set to TRUE (deafult) a warning is displayed if any returned table is invalid. Warning +\item{warningMessage}{a boolean, if set to TRUE (default) a warning is displayed if any returned table is invalid. Warning messages are suppressed if this parameter is set to FALSE. However the analyst can still view 'validity' information which are stored in the output object 'validity' - see the list of output objects.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} } \value{ A list object containing the following items: diff --git a/man/ds.table2D.Rd b/man/ds.table2D.Rd index 00a14d148..3d0e2a2c2 100644 --- a/man/ds.table2D.Rd +++ b/man/ds.table2D.Rd @@ -17,17 +17,17 @@ ds.table2D( \item{y}{a character, the name of a numerical vector with discrete values - usually a factor.} -\item{type}{a character which represent the type of table to ouput: pooled table or one table for each +\item{type}{a character which represent the type of table to output: pooled table or one table for each data source or both. If \code{type} is set to 'combine', a pooled 2-dimensional table is returned; If \code{type} is set to 'split' a 2-dimensional table is returned for each data source. If \code{type} is set to 'both' (default) a pooled 2-dimensional table plus a 2-dimensional table for each data source are returned.} -\item{warningMessage}{a boolean, if set to TRUE (deafult) a warning is displayed if any returned table is invalid. Warning +\item{warningMessage}{a boolean, if set to TRUE (default) a warning is displayed if any returned table is invalid. Warning messages are suppressed if this parameter is set to FALSE. However the analyst can still view 'validity' information which are stored in the output object 'validity' - see the list of output objects.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} } \value{ A list object containing the following items: diff --git a/man/ds.tapply.Rd b/man/ds.tapply.Rd index d5aadccf7..8d09e89c6 100644 --- a/man/ds.tapply.Rd +++ b/man/ds.tapply.Rd @@ -24,9 +24,9 @@ summarizing functions. This can be set as: or \code{"quantile"}. For more information see \strong{Details}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.tapply} returns to the client-side an array of the summarized values. diff --git a/man/ds.tapply.assign.Rd b/man/ds.tapply.assign.Rd index ef1e36d80..f58810b2e 100644 --- a/man/ds.tapply.assign.Rd +++ b/man/ds.tapply.assign.Rd @@ -28,9 +28,9 @@ For more information see \strong{Details}.} \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{tapply.assign.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.tapply.assign} returns an array of the summarized values. diff --git a/man/ds.testObjExists.Rd b/man/ds.testObjExists.Rd index 0e14bc6dd..ae0fb40d1 100644 --- a/man/ds.testObjExists.Rd +++ b/man/ds.testObjExists.Rd @@ -9,9 +9,9 @@ ds.testObjExists(test.obj.name = NULL, datasources = NULL) \arguments{ \item{test.obj.name}{a character string specifying the name of the object to search.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.testObjExists} returns a list of messages specifying that the object exists diff --git a/man/ds.unList.Rd b/man/ds.unList.Rd index 72128133a..cb8ef0a15 100644 --- a/man/ds.unList.Rd +++ b/man/ds.unList.Rd @@ -12,9 +12,9 @@ ds.unList(x.name = NULL, newobj = NULL, datasources = NULL) \item{newobj}{a character string that provides the name for the output variable that is stored on the data servers. Default \code{unlist.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.unList} returns to the server-side the unlist object. diff --git a/man/ds.unique.Rd b/man/ds.unique.Rd index f148ec635..61d6355bd 100644 --- a/man/ds.unique.Rd +++ b/man/ds.unique.Rd @@ -7,14 +7,14 @@ ds.unique(x.name = NULL, newobj = NULL, datasources = NULL) } \arguments{ -\item{x.name}{a character string providing the name of the varable, in the server, to perform \code{unique} upon} +\item{x.name}{a character string providing the name of the variable, in the server, to perform \code{unique} upon} \item{newobj}{a character string that provides the name for the output object that is stored on the data servers. Default \code{unique.newobj}.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.unique} returns the vector of unique R objects which are written to the server-side. diff --git a/man/ds.var.Rd b/man/ds.var.Rd index 48bc30bd3..5398c6e1a 100644 --- a/man/ds.var.Rd +++ b/man/ds.var.Rd @@ -20,9 +20,9 @@ components will be undertaken. Default is FALSE to save time. It is suggested that checks should only be undertaken once the function call has failed.} -\item{datasources}{a list of \code{\link{DSConnection-class}} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} } \value{ \code{ds.var} returns to the client-side a list including:\cr diff --git a/man/ds.vectorCalc.Rd b/man/ds.vectorCalc.Rd index 39890eedb..87d7a5dd7 100644 --- a/man/ds.vectorCalc.Rd +++ b/man/ds.vectorCalc.Rd @@ -14,8 +14,8 @@ ds.vectorCalc(x = NULL, calc = NULL, newobj = NULL, datasources = NULL) \item{newobj}{the name of the output object. By default the name is 'vectorcalc.newobj'.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} } \value{ no data are returned to user, the output vector is stored on the server side. @@ -26,10 +26,10 @@ server side function; it uses the R operation symbols built in DataSHIELD. } \details{ In DataSHIELD it is possible to perform an operation on vectors by just using the relevant -R symbols (e.g. '+' for addtion, '*' for multiplication, '-' for substraction and '/' for division). +R symbols (e.g. '+' for addition, '*' for multiplication, '-' for subtraction and '/' for division). This might however be inconvenient if the number of vectors to include in the operation is large. This function takes the names of two or more vectors and performs the desired operation which could be -an addition, a multiplication, a substraction or a division. If one or more vectors have a missing value +an addition, a multiplication, a subtraction or a division. If one or more vectors have a missing value at any one entry (i.e. observation), the operation returns a missing value ('NA') for that entry; the output vectors has, hence the same length as the input vectors. } diff --git a/man/getPooledMean.Rd b/man/getPooledMean.Rd index eae009776..e28cb3b1d 100644 --- a/man/getPooledMean.Rd +++ b/man/getPooledMean.Rd @@ -7,8 +7,8 @@ getPooledMean(dtsources, x) } \arguments{ -\item{dtsources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{dtsources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} \item{x}{a character, the name of a numeric vector} } diff --git a/man/getPooledVar.Rd b/man/getPooledVar.Rd index a94f3ab0e..a89c0544f 100644 --- a/man/getPooledVar.Rd +++ b/man/getPooledVar.Rd @@ -7,8 +7,8 @@ getPooledVar(dtsources, x) } \arguments{ -\item{dtsources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{dtsources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} \item{x}{a character, the name of a numeric vector} } diff --git a/man/glmChecks.Rd b/man/glmChecks.Rd index 8d4983a83..ec482bed6 100644 --- a/man/glmChecks.Rd +++ b/man/glmChecks.Rd @@ -12,25 +12,25 @@ glmChecks(formula, data, offset, weights, datasources) \item{data}{a character, the name of an optional data frame containing the variables in in the \code{formula}.} -\item{offset}{null or a numreric vector that can be used to specify an a priori known component to be +\item{offset}{null or a numeric vector that can be used to specify an a priori known component to be included in the linear predictor during fitting.} \item{weights}{a character, the name of an optional vector of 'prior weights' to be used in the fitting process. Should be NULL or a numeric vector.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} } \value{ an integer 0 if check was passed and 1 if failed } \description{ This is an internal function required by the client function \code{ds.glm} -to verify all the variables and ensure the process does not halt inadvertanly. +to verify all the variables and ensure the process does not halt inadvertently } \details{ the variables are checked to ensure they are defined, not empty (i.e. are not missing -at complete) and evantually (if 'offset' or 'weights') are of 'numeric' with non negative value +at complete) and eventually (if 'offset' or 'weights') are of 'numeric' with non negative value (if 'weights'). } \author{ diff --git a/man/isAssigned.Rd b/man/isAssigned.Rd index 7136df107..d46bed497 100644 --- a/man/isAssigned.Rd +++ b/man/isAssigned.Rd @@ -7,8 +7,8 @@ isAssigned(datasources = NULL, newobj = NULL) } \arguments{ -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} \item{newobj}{a character, the name the object to look for.} } diff --git a/man/isDefined.Rd b/man/isDefined.Rd index 9153016b2..6b2d0f7e9 100644 --- a/man/isDefined.Rd +++ b/man/isDefined.Rd @@ -7,9 +7,9 @@ isDefined(datasources = NULL, obj = NULL, error.message = TRUE) } \arguments{ -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified, the default set of connections will be -used: see \code{\link{datashield.connections_default}}.} +used: see \code{\link[DSI]{datashield.connections_default}}.} \item{obj}{a character vector, the name of the object(s) to look for.} diff --git a/man/logical2int.Rd b/man/logical2int.Rd index 2baf8ebe5..14517f63d 100644 --- a/man/logical2int.Rd +++ b/man/logical2int.Rd @@ -16,7 +16,7 @@ an integer This is an internal function. } \details{ -This function is called to turn a logical oprator given as a +This function is called to turn a logical operator given as a character into an integer: '>' is turned into 1, '>=' into 2, '<' into 3, '<=' into 4, '==' into 5 and '!=' into 6. } diff --git a/man/meanByClassHelper0a.Rd b/man/meanByClassHelper0a.Rd index 083dc524d..ec7fed67e 100644 --- a/man/meanByClassHelper0a.Rd +++ b/man/meanByClassHelper0a.Rd @@ -13,10 +13,10 @@ meanByClassHelper0a(a, b, type, datasources) \item{type}{a character which represents the type of analysis to carry out. If \code{type} is set to 'combine', a pooled table of results is generated. If \code{type} is set to 'split', a table of results -is genrated for each study.} +is generated for each study.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} } \value{ a table or a list of tables that hold the length of the numeric variable and its mean diff --git a/man/meanByClassHelper0b.Rd b/man/meanByClassHelper0b.Rd index 57220e0ee..56dd89d10 100644 --- a/man/meanByClassHelper0b.Rd +++ b/man/meanByClassHelper0b.Rd @@ -15,10 +15,10 @@ meanByClassHelper0b(x, outvar, covar, type, datasources) \item{type}{a character which represents the type of analysis to carry out. If \code{type} is set to 'combine', a pooled table of results is generated. If \code{type} is set to 'split', a table of results -is genrated for each study.} +is generated for each study.} -\item{datasources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{datasources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} } \value{ a table or a list of tables that hold the length of the numeric variable(s) and their mean @@ -29,7 +29,7 @@ This is an internal function. } \details{ This function is called by the function 'ds.meanByClass' to produce the final tables -if the user soecify a table structure. +if the user specify a table structure. } \author{ Gaye, A. diff --git a/man/meanByClassHelper1.Rd b/man/meanByClassHelper1.Rd index 8f1c2cd8e..648eb577c 100644 --- a/man/meanByClassHelper1.Rd +++ b/man/meanByClassHelper1.Rd @@ -7,8 +7,8 @@ meanByClassHelper1(dtsource, tables, variable, categories) } \arguments{ -\item{dtsource}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{dtsource}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} \item{tables}{a character vector, the tables to breakdown} diff --git a/man/meanByClassHelper2.Rd b/man/meanByClassHelper2.Rd index b7fc1bbb9..27a763d74 100644 --- a/man/meanByClassHelper2.Rd +++ b/man/meanByClassHelper2.Rd @@ -7,14 +7,14 @@ meanByClassHelper2(dtsources, tablenames, variables, invalidrecorder) } \arguments{ -\item{dtsources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{dtsources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} \item{tablenames}{a character vector, the name of the subset tables} \item{variables}{a character vector, the names of the continuous variables to computes a mean for.} -\item{invalidrecorder}{a list, holds informations about invalid subsets in each study.} +\item{invalidrecorder}{a list, holds information about invalid subsets in each study.} } \value{ a matrix, a table which contains the length, mean and standard deviation of each of the @@ -25,7 +25,7 @@ This is an internal function. } \details{ This function is called by the function 'ds.meanByClass' to produce the final table -if the user sets the parmater 'type' to combine (the default behaviour of 'ds.meanByClass'). +if the user sets the parameter 'type' to combine (the default behaviour of 'ds.meanByClass'). } \author{ Gaye, A. diff --git a/man/meanByClassHelper3.Rd b/man/meanByClassHelper3.Rd index cb32fef22..ee80e814f 100644 --- a/man/meanByClassHelper3.Rd +++ b/man/meanByClassHelper3.Rd @@ -7,14 +7,14 @@ meanByClassHelper3(dtsources, tablenames, variables, invalidrecorder) } \arguments{ -\item{dtsources}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{dtsources}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} \item{tablenames}{a character vector, the name of the subset tables} \item{variables}{a character vector, the names of the continuous variables to computes a mean for.} -\item{invalidrecorder}{a list, holds informations about invalid subsets in each study} +\item{invalidrecorder}{a list, holds information about invalid subsets in each study} } \value{ a list which one results table for each study. @@ -24,7 +24,7 @@ This is an internal function. } \details{ This function is called by the function 'ds.meanByClass' to produce the final tables -if the user sets the parmater 'type' to 'split'. +if the user sets the parameter 'type' to 'split'. } \author{ Gaye, A. diff --git a/man/meanByClassHelper4.Rd b/man/meanByClassHelper4.Rd index 19a380b82..cbf0b67c1 100644 --- a/man/meanByClassHelper4.Rd +++ b/man/meanByClassHelper4.Rd @@ -13,8 +13,8 @@ meanByClassHelper4( ) } \arguments{ -\item{dtsource}{a list of \code{\link{DSConnection-class}} objects obtained after login. If the -the default set of connections will be used: see \link{datashield.connections_default}.} +\item{dtsource}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the +the default set of connections will be used: see \link[DSI]{datashield.connections_default}.} \item{alist}{the name of the list that holds the final subset tables} diff --git a/man/subsetHelper.Rd b/man/subsetHelper.Rd index d7b82266e..6922ec94b 100644 --- a/man/subsetHelper.Rd +++ b/man/subsetHelper.Rd @@ -7,9 +7,9 @@ subsetHelper(dts, data, rs = NULL, cs = NULL) } \arguments{ -\item{dts}{a list of \code{\link{DSConnection-class}} +\item{dts}{a list of \code{\link[DSI]{DSConnection-class}} objects obtained after login. If the \code{datasources} argument is not specified -the default set of connections will be used: see \code{\link{datashield.connections_default}}.} +the default set of connections will be used: see \code{\link[DSI]{datashield.connections_default}}.} \item{data}{a character string specifying the name of the data frame or the factor vector and the range of the subset.} diff --git a/opal_azure-pipelines.yml b/opal_azure-pipelines.yml new file mode 100644 index 000000000..6c6103d4a --- /dev/null +++ b/opal_azure-pipelines.yml @@ -0,0 +1,645 @@ +######################################################################################### +# DataSHIELD Azure test suite. +# Starts with a vanilla Opal docker composition, installs dsBase +# and dsBaseClient (as well as dependencies - including a fully functional +# Opal server). +# Does checks and tests then saves results to testStatus repo. +# +# Inside the root directory $(Pipeline.Workspace) will be a file tree like: +# /dsBaseClient <- Checked out version of datashield/dsBaseClient +# /testStatus <- Checked out version of datashield/testStatus +# /logs <- Where results of tests and lots are collated +# +# As of May 2020 this takes ~ 70 mins to run. +# As of Nov 2020 this takes ~ 120 mins to run. +# As of Mar 2024 this takes ~ 300+ mins to run! +# As of Mar 2024 this takes ~ 300+ mins to run! +# As of Jun 2024 this takes ~ 360+ mins to run! +# +# The only things that should ever be changed are the repo branches in the resources. +# +######################################################################################### + + +##################################################################################### +# These should all be constant, except test_filter. This can be used to test subsets +# of test files in the testthat directory. Options are like: +# '*' <- Run all tests +# 'ds.asNumeric*' <- Run all ds.asNumeric tests, i.e. all the arg, smk etc tests. +# '*_smk_*' <- Run all the smoke tests for all functions. +variables: + datetime: $[format('{0:yyyyMMddHHmmss}', pipeline.startTime)] + repoName: $(Build.Repository.Name) + projectName: 'dsBaseClient' + branchName: $(Build.SourceBranchName) + test_filter: '*' + _r_check_system_clock_: 0 + + +######################################################################################### +# Need to define all the GH repos and their access tokens, see: +# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml +resources: + repositories: + - repository: testStatusRepo + type: github + endpoint: datashield-testing + name: datashield/testStatus + ref: master + + +######################################################################################### +# When and under what condition to run the pipeline. +schedules: + - cron: "0 0 * * 0" + displayName: Weekly build - master + branches: + include: + - master + always: true + - cron: "0 2 * * *" + displayName: Nightly build - v6.3.4-dev + branches: + include: + - v6.3.4-dev + always: true + +######################################################################################### +# Jobs + +jobs: +- job: build_and_run_tests + timeoutInMinutes: 360 + pool: + vmImage: ubuntu-24.04 + + steps: + ##################################################################################### + # Checkout the source code to a subfolder. + # This may give an error in the logs like: + # [warning]Unable move and reuse existing repository to required location + # This is an Azure bug - https://github.com/microsoft/azure-pipelines-yaml/issues/403 + - checkout: self + path: 'dsBaseClient' + + - checkout: testStatusRepo + path: 'testStatus' + persistCredentials: true + condition: and(eq(variables['Build.Repository.Name'], 'datashield/dsBaseClient'), ne(variables['Build.Reason'], 'PullRequest')) + + + ##################################################################################### + # The MySQL install that comes with the VM doesn't seem compatable with our set up + # so we delete it. + # If previous steps have failed then don't run. + - bash: | + + # Work-around for tempory Bazel's apt repository issue. + curl https://bazel.build/bazel-release.pub.gpg | sudo apt-key add - + + # Purge the default mysql installed on the VM as it is incompatible with our stuff. + sudo service mysql stop + sudo apt-get update + sudo apt-get remove --purge mysql-client mysql-server mysql-common -y + sudo apt-get purge mysql-client mysql-server mysql-common -y + sudo apt-get autoremove -y + sudo apt-get autoclean -y + sudo rm -rf /var/lib/mysql/ + + displayName: 'Uninstall default MySQL' + condition: succeeded() + + + ##################################################################################### + # The Azure VMs have 2 CPUs, so configure R to use both when compile/install packages. + # If previous steps have failed then don't run. + - bash: | + + echo "options(Ncpus=4)" >> ~/.Rprofile + + displayName: 'Tweak local R env using .Rprofile' + condition: succeeded() + + + ##################################################################################### + # Install R and all the dependencies dsBaseClient requires. + # If previous steps have failed then don't run. + - bash: | + sudo apt-get install --no-install-recommends software-properties-common dirmngr + wget -qO- https://cloud.r-project.org/bin/linux/ubuntu/marutter_pubkey.asc | sudo tee -a /etc/apt/trusted.gpg.d/cran_ubuntu_key.asc + sudo add-apt-repository "deb https://cloud.r-project.org/bin/linux/ubuntu $(lsb_release -cs)-cran40/" + sudo apt-get update -qq + sudo apt-get upgrade -y + + sudo apt-get install -qq libxml2-dev libcurl4-openssl-dev libssl-dev libgsl-dev libgit2-dev r-base -y + sudo apt-get install -qq libharfbuzz-dev libfribidi-dev libmagick++-dev libudunits2-dev -y + sudo R -q -e "install.packages(c('curl','httr'), dependencies=TRUE, repos='https://cloud.r-project.org')" + sudo R -q -e "install.packages(c('devtools','covr'), dependencies=TRUE, repos='https://cloud.r-project.org')" + sudo R -q -e "install.packages(c('fields','meta','metafor','ggplot2','gridExtra','data.table'), dependencies=TRUE, repos='https://cloud.r-project.org')" + sudo R -q -e "install.packages(c('DSI','DSOpal','DSLite'), dependencies=TRUE, repos='https://cloud.r-project.org')" + sudo R -q -e "install.packages(c('MolgenisAuth', 'MolgenisArmadillo', 'DSMolgenisArmadillo'), dependencies=TRUE, repos='https://cloud.r-project.org')" + sudo R -q -e "install.packages(c('DescTools','e1071'), dependencies=TRUE, repos='https://cloud.r-project.org')" + + sudo R -q -e "library('devtools'); devtools::install_github(repo='datashield/dsDangerClient', ref='6.3.4', dependencies = TRUE)" + + # XML grep for coverage report merging + sudo apt-get install -qq xml-twig-tools -y + + displayName: 'Install all dependencies for dsBaseClient' + condition: succeeded() + + + ##################################################################################### + # Check that the man files in the repo match what is in the function headers. i.e. has + # devtools::document() been run before commiting? + # If previous steps have failed then don't run. + # If this step fails still mark as failed, but don't stop the rest of the steps running. + - bash: | + + # Concatenate all the files in the man dir into one long string and md5sum it. + orig_sum=$(find man -type f | sort -u | xargs cat | md5sum) + + # Rebuild the documentation. + R -e "devtools::document()" + + # Concatenate all the files in the man dir into one long string and md5sum it. + new_sum=$(find man -type f | sort -u | xargs cat | md5sum) + + if [ "$orig_sum" != "$new_sum" ]; then + echo "Your committed manual files (man/*.Rd) are out of sync with the documentation in the R files." + echo "Run devtools::document() locally then commit again." + exit 1 + else + echo "Documentation up to date." + exit 0 + fi + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Check manual updated before being committed' + condition: succeeded() + continueOnError: true + + + ##################################################################################### + # Run devtools::check on the checked out source code. + # If previous steps have failed then don't run. + # If this step fails still mark as failed, but don't stop the rest of the steps running. + - bash: | + + R -q -e "library('devtools'); devtools::check(args = c('--no-examples', '--no-tests'))" | tee azure-pipelines_check.Rout + grep --quiet "^0 errors" azure-pipelines_check.Rout && grep --quiet " 0 warnings" azure-pipelines_check.Rout && grep --quiet " 0 notes" azure-pipelines_check.Rout + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Devtools checks' + condition: succeeded() + continueOnError: true + + ##################################################################################### + # + # Opal phase + # + ##################################################################################### + + ##################################################################################### + # Deploy docker for Opal. + # If previous steps have failed then don't run. + - task: DockerCompose@1 + inputs: + action: Run Services + dockerComposeFile: ../dsBaseClient/docker-compose_opal.yml + projectName: dsbaseclient + qualifyImageNames: true + buildImages: true + abortOnContainerExit: true + detached: true + displayName: 'Install Opal servers (opal, rserver, mongodb)' + condition: succeeded() + + + ##################################################################################### + # Install test datasets. + # If previous steps have failed then don't run. + - bash: | + sleep 60 + + R -q -f "obiba_opal-upload_testing_datasets.R" + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient/tests/testthat/data_files + displayName: 'Install test datasets to Opal' + condition: succeeded() + + + ##################################################################################### + # Install dsBase. + # If previous steps have failed then don't run. + - bash: | + R -q -e "library(opalr); opal <- opal.login(username = 'administrator', password = 'datashield_test&', url = 'https://localhost:8443', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); opal.put(opal, 'system', 'conf', 'general', '_rPackage'); opal.logout(o)" + + R -q -e "library(opalr); opal <- opal.login('administrator','datashield_test&', url='https://localhost:8443/', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); dsadmin.install_github_package(opal, 'dsBase', username = 'datashield', ref = 'v6.3.4-dev'); opal.logout(opal)" + + sleep 60 + + R -q -e "library(opalr); opal <- opal.login('administrator','datashield_test&', url='https://localhost:8443/', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); dsadmin.set_option(opal, 'default.datashield.privacyControlLevel', 'permissive'); opal.logout(opal)" + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient/tests/testthat/data_files + displayName: 'Install dsBase to Opal, as set disclosure test options' + condition: succeeded() + + + ##################################################################################### + # Essentially run devtools::test() on the checked out code. This is wrapped up with + # code coverage. The actual command is vary convoluted as it had to do some things + # which are not default behaviour: output the results to a JUnit xml file, not stop + # when a small number of errors have happened, run through the code coverage tool. + # TODO: Tidy up variable names - use timestamps here. + # TODO: Why is DSLite needed for this to run?! + - bash: | + + # There is an issue with the way we are using packages. The wrapped up test command + # below fails in a way that implies that it is not installed. I cannot figure out + # why this is case. As a work around we can run some of the functions below. My + # best guess is that there is an implicit build or similar that happens. Although + # I cannot replicate that directly with build etc directly. + + sudo R --verbose -e 'devtools::reload()' + + mkdir $(Pipeline.Workspace)/logs + + # run the coverage tool and output to coveragelist.csv + # testthat::testpackage uses a MultiReporter, comprised of a ProgressReporter and JunitReporter + # R output and messages are redirected by sink() to test_console_output.txt + # junit reporter output is to test_results.xml + # + # "_-|arg-|smk-|datachk-|disc-|math-|expt-|expt_smk-" + # testthat::test_package("$(projectName)", filter = "_-|datachk-|smk-|arg-|disc-|perf-|smk_expt-|expt-|math-", reporter = multi_rep, stop_on_failure = FALSE) + sudo R -q -e ' + library(covr); + dsbase.res <- covr::package_coverage( + type = c("none"), + code = c( + '"'"' + library(testthat); + output_file <- file("test_console_output_dsbase.txt"); + sink(output_file); + sink(output_file, type = "message"); + library(testthat); + junit_rep <- JunitReporter$new(file = "test_results_dsbase.xml"); + progress_rep <- ProgressReporter$new(max_failures = 999999); + multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); + options("datashield.return_errors" = FALSE); + options("default_driver" = "OpalDriver"); + testthat::test_package("$(projectName)", filter = "_-|datachk-|smk-|arg-|disc-|perf-|smk_expt-|expt-|math-", reporter = multi_rep, stop_on_failure = FALSE) + '"'"' + ) + ); + base::saveRDS(dsbase.res, "test_results_dsbase.rds")' + + # display the test console output + cat test_console_output_dsbase.txt + + grep --quiet " FAIL 0 " test_console_output_dsbase.txt + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Code coverage and JUnit report output, with dsBase' + condition: succeeded() + + + ##################################################################################### + # Parse the JUnit file to see if there are any errors/warnings. If there are then + # echo them so finding bugs should be easier. + # This should run even if previous steps have failed. + - bash: | + + # Strip out when error and failure = 0 and count the number of times it does not. + issue_count=$(sed 's/failures="0" errors="0"//' test_results_dsbase.xml | sed 's/errors="0" failures="0"//' | grep --count errors=) + echo "Number of testsuites with issues: "$issue_count + echo "Testsuites with issues:" + sed 's/failures="0" errors="0"//' test_results_dsbase.xml | sed 's/errors="0" failures="0"//' | grep errors= > issues.log + cat issues.log + exit $issue_count + + workingDirectory: $(Pipeline.Workspace)/logs + displayName: 'Check for errors & Failures in JUnit file' + condition: succeededOrFailed() + + + ##################################################################################### + # Essentially run devtools::test() on the checked out code for discctrl reporting. + # The actual command is vary convoluted as it had to do some things + # which are not default behaviour: output the results to a JUnit xml file, not stop + # when a small number of errors have happened, run through the code coverage tool. + # TODO: Tidy up variable names - use timestamps here. + #- bash: | + + # junit reporter output is to test_results_discctrl.xml + # sudo R -q -e ' + # library(testthat); + # output_file <- file("test_console_output_discctrl.txt"); + # sink(output_file); + # sink(output_file, type = "message"); + # junit_rep <- JunitReporter$new(file = "test_results_discctrl.xml"); + # progress_rep <- ProgressReporter$new(max_failures = 999999); + # multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); + # default_driver <- "OpalDriver"; + # testthat::test_package("$(projectName)", filter = "_-|discctrl-", reporter = multi_rep, stop_on_failure = FALSE)' + + # cat test_console_output_discctrl.txt + + # if [ -e test_results_discctrl.xml ]; then + # mv test_results_discctrl.xml $(Pipeline.Workspace)/logs + # else + # touch $(Pipeline.Workspace)/logs/test_results_discctrl.xml + # fi + + # workingDirectory: $(Pipeline.Workspace)/dsBaseClient + # displayName: 'discctrl report output' + # condition: succeededOrFailed() + + + ##################################################################################### + # Install dsDanger on Opal server + # If previous steps have failed then don't run + - bash: | + + R -q -e "library(opalr); opal <- opal.login(username = 'administrator', password = 'datashield_test&', url = 'https://localhost:8443', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); opal.put(opal, 'system', 'conf', 'general', '_rPackage'); opal.logout(o)" + + R -q -e "library(opalr); opal <- opal.login('administrator','datashield_test&', url='https://localhost:8443/', opts = list(ssl_verifyhost=0, ssl_verifypeer=0)); dsadmin.install_github_package(opal, 'dsDanger', username = 'datashield', ref = '6.3.4'); opal.logout(opal)" + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Install dsDanger package on Opal server' + condition: succeeded() + + + ##################################################################################### + # Essentially run devtools::test() on the checked out code. This is wrapped up with + # code coverage. The actual command is vary convoluted as it had to do some things + # which are not default behaviour: output the results to a JUnit xml file, not stop + # when a small number of errors have happened, run through the code coverage tool. + # TODO: Tidy up variable names - use timestamps here. + - bash: | + + # See, 'Code coverage and JUnit report output' for issues with the approach and improvement needed. + sudo R --verbose -e 'devtools::reload()' + + mkdir $(Pipeline.Workspace)/logs + + # run the coverage tool and output to coveragelist.csv + # testthat::testpackage uses a MultiReporter, comprised of a ProgressReporter and JunitReporter + # R output and messages are redirected by sink() to test_console_output.txt + # junit reporter output is to test_results.xml + sudo R -q -e ' + library(covr); + dsdanger.res <- covr::package_coverage( + type = c("none"), + code = c( + '"'"' + library(testthat); + output_file <- file("test_console_output_dsdanger.txt"); + sink(output_file); + sink(output_file, type = "message"); + library(testthat); + junit_rep <- JunitReporter$new(file = "test_results_dsdanger.xml"); + progress_rep <- ProgressReporter$new(max_failures = 999999); + multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); + options("datashield.return_errors" = FALSE); + options("default_driver" = "OpalDriver"); + testthat::test_package("$(projectName)", filter = "__dgr-|datachk_dgr-|smk_dgr-|arg_dgr-|disc_dgr-|smk_expt_dgr-|expt_dgr-|math_dgr-", reporter = multi_rep, stop_on_failure = FALSE) + '"'"' + ) + ); + base::saveRDS(dsdanger.res, "test_results_dsdanger.rds")' + + # Merge coverage results + cat test_results_dsbase.txt test_results_dsdanger.txt > $(Pipeline.Workspace)/logs/test_console_output.txt + xml_grep --pretty_print indented --wrap "testsuites" --descr "" --cond "testsuite" test_results_dsbase.xml test_results_dsdanger.xml > test_results.xml + + # Create 'coveragelist.csv' + sudo R -q -e ' + library(covr); + dsbase.res <- base::readRDS("test_results_dsbase.rds") + write.csv( + coverage_to_list( + dsbase.res + ), + "coveragelist.csv" + )' + + # display the test console output + cat test_console_output_dsdanger.txt + + mv coveragelist.csv $(Pipeline.Workspace)/logs + mv test_results.xml $(Pipeline.Workspace)/logs + + grep --quiet " FAIL 0 " test_console_output_dsdanger.txt + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Code coverage and JUnit report output, with dsBase and dsDanger' + condition: succeeded() + + + ##################################################################################### + # Parse the JUnit file to see if there are any errors/warnings. If there are then + # echo them so finding bugs should be easier. + # This should run even if previous steps have failed. + - bash: | + + # Strip out when error and failure = 0 and count the number of times it does not. + issue_count=$(sed 's/failures="0" errors="0"//' test_results.xml | sed 's/errors="0" failures="0"//' | grep --count errors=) + echo "Number of testsuites with issues: "$issue_count + echo "Testsuites with issues:" + sed 's/failures="0" errors="0"//' test_results.xml | sed 's/errors="0" failures="0"//' | grep errors= > issues.log + cat issues.log + exit $issue_count + + workingDirectory: $(Pipeline.Workspace)/logs + displayName: 'Check for errors & Failures in JUnit file' + condition: succeededOrFailed() + + + ##################################################################################### + # Essentially run devtools::test() on the checked out code for bug reporting. + # The actual command is vary convoluted as it had to do some things + # which are not default behaviour: output the results to a JUnit xml file, not stop + # when a small number of errors have happened, run through the code coverage tool. + # TODO: Tidy up variable names - use timestamps here. +# - bash: | + + # junit reporter output is to test_results_bug.xml + # sudo R -q -e ' + # library(testthat); + # output_file <- file("test_console_output_bug.txt"); + # sink(output_file); + # sink(output_file, type = "message"); + # junit_rep <- JunitReporter$new(file = "test_results_bug.xml"); + # progress_rep <- ProgressReporter$new(max_failures = 999999); + # multi_rep <- MultiReporter$new(reporters = list(progress_rep, junit_rep)); + # options("default_driver" = "OpalDriver"); + # testthat::test_package("$(projectName)", filter = "__bug-|datachk_bug-|smk_bug-|arg_bug-|disc_bug-|smk_expt_bug-|expt_bug-|math_bug-", reporter = multi_rep, stop_on_failure = FALSE)' + + # cat test_console_output_bug.txt + + # if [ -e test_results_bug.xml ]; then + # mv test_results_bug.xml $(Pipeline.Workspace)/logs + # else + # touch $(Pipeline.Workspace)/logs/test_results_bug.xml + # fi + +# workingDirectory: $(Pipeline.Workspace)/dsBaseClient +# displayName: 'Bug report output' +# condition: succeededOrFailed() + + + ##################################################################################### + # Parse the JUnit file to see if there are any errors/warnings. If there are then + # echo them so finding bugs should be easier. + # This should run even if previous steps have failed. + - bash: | + + # Strip out when error and failure = 0 and count the number of times it does not. + issue_count=$(sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep --count errors=) + echo "Number of testsuites with issues: "$issue_count + echo "Testsuites with issues:" + sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep errors= > issues.log + cat issues.log + no_issue_count=$(sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep -v --count errors=) + echo + echo "Number of testsuites with no issues: "$no_issue_count + echo "Testsuites with issues:" + sed 's/failures="0" errors="0"//' test_results_bug.xml | sed 's/errors="0" failures="0"//' | grep " no_issues.log + cat no_issues.log + exit 0 + + workingDirectory: $(Pipeline.Workspace)/logs + displayName: 'Bug summary report output' + condition: succeededOrFailed() + + + ##################################################################################### + # 'Down' Opal Docker Composition + - bash: | + + docker compose -f docker-compose_opal.yml down -v + + workingDirectory: $(Pipeline.Workspace)/dsBaseClient + displayName: 'Down Opal Docker Composition' + condition: succeeded() + + ##################################################################################### + # Windup phase + ##################################################################################### + + ##################################################################################### + # Output some important version numbers to file. This gets added to the testStatus + # commit so it can be parsed and used on the status table. + - bash: | + + echo 'branch:'$(branchName) >> $(datetime).txt + echo 'os:'$(lsb_release -ds) >> $(datetime).txt + echo 'R:'$(R --version | head -n 1) >> $(datetime).txt + echo 'opal:'$(opal system --opal localhost:8443 --user administrator --password "datashield_test&" --version) >> $(datetime).txt + + workingDirectory: $(Pipeline.Workspace)/logs + displayName: 'Write versions to file' + condition: succeededOrFailed() + + + ##################################################################################### + # Checkout the testStatus repo, add the results from here, push back to GH. + # TODO: Automatically pull in better email/name info from somewhere. + # TODO: More debug info in commit message + - bash: | + + # Git needs some config set to be able to push to a repo. + git config --global user.email "you@example.com" + git config --global user.name "Azure pipeline" + + # This repo is checked out in detatched head state, so reconnect it here. + git checkout master + + # It is possible that other commits have been made to the testStatus repo since it + # was checked out. i.e. other pipeline runs might have finished. + git pull + + # Make the directories if they dont already exist + mkdir --parents logs/$(projectName)/$(branchName) + mkdir --parents docs/$(projectName)/$(branchName)/latest + + cp $(Pipeline.Workspace)/logs/coveragelist.csv logs/$(projectName)/$(branchName)/ + cp $(Pipeline.Workspace)/logs/coveragelist.csv logs/$(projectName)/$(branchName)/$(datetime).csv + + cp $(Pipeline.Workspace)/logs/test_results.xml logs/$(projectName)/$(branchName)/ + cp $(Pipeline.Workspace)/logs/test_results.xml logs/$(projectName)/$(branchName)/$(datetime).xml + + cp $(Pipeline.Workspace)/logs/$(datetime).txt logs/$(projectName)/$(branchName)/ + + # Run the script to parse the results and build the html pages. + # status.py JUnit_file.xml coverage_file.csv output_file.html local_repo_path remote_repo_name branch + source/status.py logs/$(projectName)/$(branchName)/$(datetime).xml logs/$(projectName)/$(branchName)/$(datetime).csv logs/$(projectName)/$(branchName)/$(datetime).txt status.html $(Pipeline.Workspace)/$(projectName) $(projectName) $(branchName) + + cp status.html docs/$(projectName)/$(branchName)/latest/index.html + git add logs/$(projectName)/$(branchName)/coveragelist.csv + git add logs/$(projectName)/$(branchName)/test_results.xml + git add logs/$(projectName)/$(branchName)/$(datetime).xml + git add logs/$(projectName)/$(branchName)/$(datetime).csv + git add logs/$(projectName)/$(branchName)/$(datetime).txt + git add docs/$(projectName)/$(branchName)/latest/index.html + + git commit -m "Azure auto test for $(projectName)/$(branchName) @ $(datetime)" -m "Debug info:\nProjectName:$(projectName)\nBranchName:$(branchName)\nDataTime:$(datetime)" + git push + exit 0 + + workingDirectory: $(Pipeline.Workspace)/testStatus + displayName: 'Parse test results' + condition: and(eq(variables['Build.Repository.Name'], 'datashield/dsBaseClient'), ne(variables['Build.Reason'], 'PullRequest')) + + + ##################################################################################### + # Output the environment information to the console. This is useful for debugging. + # Always do this, even if some of the above has failed or the job has been cacelled. + - bash: | + + echo 'BranchName: '$(branchName) + echo 'ProjectName: '$(projectName) + echo 'RepoName: '$(repoName) + + echo -e "\n#############################" + echo -e "ls /: ######################" + ls $(Pipeline.Workspace) + + echo -e "\n#############################" + echo -e "lscpu: ######################" + lscpu + + echo -e "\n#############################" + echo -e "memory: #####################" + free -m + + echo -e "\n#############################" + echo -e "env: ########################" + env + + echo -e "\n#############################" + echo -e "Puppet version: #############" + /opt/puppetlabs/bin/puppet --version + /opt/puppetlabs/puppet/bin/r10k version + + echo -e "\n#############################" + echo -e "Rprofile: ###################" + cat $(Pipeline.Workspace)/dsBaseClient/.Rprofile + + echo -e "\n#############################" + echo -e "R installed.packages(): #####" + R -e 'installed.packages()' + + echo -e "\n#############################" + echo -e "R sessionInfo(): ############" + R -e 'sessionInfo()' + + sudo apt install tree -y + pwd + echo -e "\n#############################" + echo -e "File tree: ##################" + tree $(Pipeline.Workspace) + + displayName: 'Environment info' + condition: always() diff --git a/tests/docker/armadillo/standard/config/application.yml b/tests/docker/armadillo/standard/config/application.yml new file mode 100644 index 000000000..12b78ec82 --- /dev/null +++ b/tests/docker/armadillo/standard/config/application.yml @@ -0,0 +1,79 @@ +# NOTE these settings should be similar to application.template.yml + +armadillo: + # DO NOT CHANGE + docker-management-enabled: false + # DO NOT CHANGE + docker-run-in-container: true + + # when running the R containers from a docker-compose.yml they get prefixes based on the directory name of the + # docker-compose.yml file ie armadillo-dev-" + profileName + "-1". Same goes for Armadillo ie armadillo-dev-armadillo-1" + container-prefix: 'dev-' + + # uncomment this to configure an oidc user as admin user + # oidc-admin-user: user@yourdomain.org + profiles: + - name: default + image: datashield/rock-omicron-karma:devel + port: 8085 + host: default + package-whitelist: # Packages for 'permissive' + - dsBase + - dsMediation + - dsMTLBase + - dsSurvival + - dsTidyverse + - dsExposome + - dsOmics + - resourcer + function-blacklist: [ ] + options: + datashield: + # the seed can only be 9 digits + seed: 342325352 + +# required settings: +spring: + security: + user: + # please change this admin password! + password: admin + ## uncomment oauth2 settings below to enable oidcgit + # oauth2: + # client: + # provider: + # molgenis: + # issuer-uri: 'http://auth.molgenis.org' + # registration: + # molgenis: + # client-id: '...' + # client-secret: '...' + # resourceserver: + # jwt: + # issuer-uri: 'http://auth.molgenis.org' + # opaquetoken: + # client-id: '...' + + # optional settings (review spring handbook to find more): + servlet: + multipart: + ## change this if your files are bigger + max-file-size: 1000MB + max-request-size: 1000MB + +storage: + ## to change location of the data storage + root-dir: /data + +# Match with Dockerfile volume /logs +audit.log.path: '/logs/audit.log' + +stdout.log.path: '/logs/armadillo.log' + +logging: + level: + root: INFO + ## change to DEBUG to have more details, typically when developing + org.molgenis: INFO + ## Don't log upload data + org.apache.coyote.http11.Http11InputBuffer: INFO diff --git a/tests/testthat.R b/tests/testthat.R new file mode 100644 index 000000000..3e6bbe151 --- /dev/null +++ b/tests/testthat.R @@ -0,0 +1,12 @@ +# This file is part of the standard setup for testthat. +# It is recommended that you do not modify it. +# +# Where should you do additional test configuration? +# Learn more about the roles of various files in: +# * https://r-pkgs.org/testing-design.html#sec-tests-files-overview +# * https://testthat.r-lib.org/articles/special-files.html + +library(testthat) +library(dsBaseClient) + +test_check("dsBaseClient") diff --git a/tests/testthat/connection_to_datasets/login_details.R b/tests/testthat/connection_to_datasets/login_details.R index ba5bae060..2ce4ca2de 100644 --- a/tests/testthat/connection_to_datasets/login_details.R +++ b/tests/testthat/connection_to_datasets/login_details.R @@ -4,7 +4,7 @@ source("connection_to_datasets/init_local_settings.R") init.ip.address() -# create blank enviroment of test data +# create blank environment of test data ds.test_env <- new.env() # this option helps DSI to find the connection objects by looking in the right environment @@ -16,8 +16,8 @@ if (! is.null(getOption("default_driver"))) { ds.test_env$driver <- getOption("default_driver") } else { # switch between "DSLiteDriver" and "OpalDriver", "ArmadilloDriver" to test - # ds.test_env$driver <- "DSLiteDriver" - ds.test_env$driver <- "OpalDriver" + ds.test_env$driver <- "DSLiteDriver" + # ds.test_env$driver <- "OpalDriver" # ds.test_env$driver <- "ArmadilloDriver" } diff --git a/tests/testthat/dstest_functions/ds_expect_variables.R b/tests/testthat/dstest_functions/ds_expect_variables.R index 15c3eafeb..abb504c78 100644 --- a/tests/testthat/dstest_functions/ds_expect_variables.R +++ b/tests/testthat/dstest_functions/ds_expect_variables.R @@ -12,11 +12,10 @@ # Tests # -ds_expect_variables <- function(expected.variables) -{ - studies.current.varables <- ds.ls() +ds_expect_variables <- function(expected.variables) { + studies.current.variables <- ds.ls() - for (study.current.varables in studies.current.varables) { - expect_setequal(study.current.varables$objects.found, expected.variables) + for (study.current.variables in studies.current.variables) { + expect_setequal(study.current.variables$objects.found, expected.variables) } } diff --git a/tests/testthat/perf_files/armadillo_azure-pipeline.csv b/tests/testthat/perf_files/armadillo_azure-pipeline.csv new file mode 100644 index 000000000..186e1d583 --- /dev/null +++ b/tests/testthat/perf_files/armadillo_azure-pipeline.csv @@ -0,0 +1,14 @@ +"refer_name","rate","lower_tolerance","upper_tolerance" +"conndisconn::perf::simple0","0.1275","0.5","2" +"ds.abs::perf::0","4.824","0.5","2" +"ds.asInteger::perf:0","4.366","0.5","2" +"ds.asList::perf:0","9.787","0.5","2" +"ds.asNumeric::perf:0","4.316","0.5","2" +"ds.assign::perf::0","8.055","0.5","2" +"ds.class::perf::combine:0","9.847","0.5","2" +"ds.colnames::perf:0","7.574","0.5","2" +"ds.exists::perf::combine:0","19.84","0.5","2" +"ds.length::perf::combine:0","19.58","0.5","2" +"ds.mean::perf::combine:0","19.66","0.5","2" +"ds.mean::perf::split:0","19.21","0.5","2" +"void::perf::void::0","41810.0","0.5","2" diff --git a/tests/testthat/perf_files/armadillo_hp-laptop_quay.csv b/tests/testthat/perf_files/armadillo_hp-laptop_quay.csv new file mode 100644 index 000000000..6c6a3f337 --- /dev/null +++ b/tests/testthat/perf_files/armadillo_hp-laptop_quay.csv @@ -0,0 +1,14 @@ +"refer_name","rate","lower_tolerance","upper_tolerance" +"conndisconn::perf::simple0","0.06225260028207","0.5","2" +"ds.abs::perf::0","2.602332538044","0.5","2" +"ds.asInteger::perf:0","2.16574645464856","0.5","2" +"ds.asList::perf:0","5.125787987349","0.5","2" +"ds.asNumeric::perf:0","2.02140793909654","0.5","2" +"ds.assign::perf::0","4.25379741119624","0.5","2" +"ds.class::perf::combine:0","5.03264832363257","0.5","2" +"ds.colnames::perf:0","3.61308626946146","0.5","2" +"ds.exists::perf::combine:0","8.47538336211864","0.5","2" +"ds.length::perf::combine:0","9.49818979827918","0.5","2" +"ds.mean::perf::combine:0","9.66558925664494","0.5","2" +"ds.mean::perf::split:0","8.6304479562724","0.5","2" +"void::perf::void::0","19351.7615914652","0.5","2" diff --git a/tests/testthat/perf_files/default_perf_profile.csv b/tests/testthat/perf_files/default_perf_profile.csv new file mode 100644 index 000000000..cff242360 --- /dev/null +++ b/tests/testthat/perf_files/default_perf_profile.csv @@ -0,0 +1,14 @@ +"refer_name","rate","lower_tolerance","upper_tolerance" +"conndisconn::perf::simple0","0.2118","0.5","2" +"ds.abs::perf::0","1.718","0.5","2" +"ds.asInteger::perf:0","1.484","0.5","2" +"ds.asList::perf:0","3.050","0.5","2" +"ds.asNumeric::perf:0","1.508","0.5","2" +"ds.assign::perf::0","3.547","0.5","2" +"ds.class::perf::combine:0","3.061","0.5","2" +"ds.colnames::perf:0","2.456","0.5","2" +"ds.exists::perf::combine:0","6.904","0.5","2" +"ds.length::perf::combine:0","6.058","0.5","2" +"ds.mean::perf::combine:0","5.892","0.5","2" +"ds.mean::perf::split:0","6.881","0.5","2" +"void::perf::void::0","27070.0","0.5","2" diff --git a/tests/testthat/perf_files/dslite_hp-laptop_quay.csv b/tests/testthat/perf_files/dslite_hp-laptop_quay.csv new file mode 100644 index 000000000..5d552ae05 --- /dev/null +++ b/tests/testthat/perf_files/dslite_hp-laptop_quay.csv @@ -0,0 +1,14 @@ +"refer_name","rate","lower_tolerance","upper_tolerance" +"conndisconn::perf::simple0","1.28701068653782","0.5","2" +"ds.abs::perf::0","9.58736733800142","0.5","2" +"ds.asInteger::perf:0","8.50891366904172","0.5","2" +"ds.asList::perf:0","16.5820946915488","0.5","2" +"ds.asNumeric::perf:0","9.11018116242571","0.5","2" +"ds.assign::perf::0","20.7627800741047","0.5","2" +"ds.class::perf::combine:0","16.4968034024194","0.5","2" +"ds.colnames::perf:0","12.2919611789594","0.5","2" +"ds.exists::perf::combine:0","41.3622556042039","0.5","2" +"ds.length::perf::combine:0","41.0818690662793","0.5","2" +"ds.mean::perf::combine:0","40.9888639028757","0.5","2" +"ds.mean::perf::split:0","41.2851424288384","0.5","2" +"void::perf::void::0","24346.5613457168","0.5","2" diff --git a/tests/testthat/perf_files/opal_azure-pipeline.csv b/tests/testthat/perf_files/opal_azure-pipeline.csv new file mode 100644 index 000000000..cff242360 --- /dev/null +++ b/tests/testthat/perf_files/opal_azure-pipeline.csv @@ -0,0 +1,14 @@ +"refer_name","rate","lower_tolerance","upper_tolerance" +"conndisconn::perf::simple0","0.2118","0.5","2" +"ds.abs::perf::0","1.718","0.5","2" +"ds.asInteger::perf:0","1.484","0.5","2" +"ds.asList::perf:0","3.050","0.5","2" +"ds.asNumeric::perf:0","1.508","0.5","2" +"ds.assign::perf::0","3.547","0.5","2" +"ds.class::perf::combine:0","3.061","0.5","2" +"ds.colnames::perf:0","2.456","0.5","2" +"ds.exists::perf::combine:0","6.904","0.5","2" +"ds.length::perf::combine:0","6.058","0.5","2" +"ds.mean::perf::combine:0","5.892","0.5","2" +"ds.mean::perf::split:0","6.881","0.5","2" +"void::perf::void::0","27070.0","0.5","2" diff --git a/tests/testthat/perf_files/opal_hp-laptop_quay.csv b/tests/testthat/perf_files/opal_hp-laptop_quay.csv new file mode 100644 index 000000000..334cd62c1 --- /dev/null +++ b/tests/testthat/perf_files/opal_hp-laptop_quay.csv @@ -0,0 +1,14 @@ +"refer_name","rate","lower_tolerance","upper_tolerance" +"conndisconn::perf::simple0","0.147643461923159","0.5","2" +"ds.abs::perf::0","0.631818039001181","0.5","2" +"ds.asInteger::perf:0","0.675696161933654","0.5","2" +"ds.asList::perf:0","1.59078428438764","0.5","2" +"ds.asNumeric::perf:0","0.692813012683229","0.5","2" +"ds.assign::perf::0","1.89351857736982","0.5","2" +"ds.class::perf::combine:0","1.62870246867488","0.5","2" +"ds.colnames::perf:0","1.32209430785405","0.5","2" +"ds.exists::perf::combine:0","3.45004426293124","0.5","2" +"ds.length::perf::combine:0","2.78832377100152","0.5","2" +"ds.mean::perf::combine:0","2.7801284055162","0.5","2" +"ds.mean::perf::split:0","3.67443474363821","0.5","2" +"void::perf::void::0","18974.1385397392","0.5","2" diff --git a/tests/testthat/perf_files/template_perf_profile.csv b/tests/testthat/perf_files/template_perf_profile.csv new file mode 100644 index 000000000..c01b6fec1 --- /dev/null +++ b/tests/testthat/perf_files/template_perf_profile.csv @@ -0,0 +1 @@ +"refer_name","rate","lower_tolerance","upper_tolerance" diff --git a/tests/testthat/perf_tests/perf_rate.R b/tests/testthat/perf_tests/perf_rate.R new file mode 100644 index 000000000..0384bf637 --- /dev/null +++ b/tests/testthat/perf_tests/perf_rate.R @@ -0,0 +1,49 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024-2025 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +.perf.reference.filename <- 'perf_files/default_perf_profile.csv' + +.perf.reference <- NULL + +.load.pref <- function() { + .perf.reference <<- read.csv(.perf.reference.filename, header = TRUE, sep = ",") +} + +perf.reference.save <- function(perf.ref.name, rate, tolerance.lower, tolerance.upper) { + if (is.null(.perf.reference)) + load.pref() + + .perf.reference[nrow(.perf.reference)+1,] <- c(perf.ref.name, rate, tolerance.lower, tolerance.upper) + + write.csv(.perf.reference, .perf.reference.filename, row.names = FALSE) + + .perf.reference <<- .perf.reference +} + +perf.reference.rate <- function(perf.ref.name) { + if (is.null(.perf.reference)) + .load.pref() + + return(as.numeric(.perf.reference[which(.perf.reference$refer_name == perf.ref.name),]$rate)) +} + +perf.reference.tolerance.lower <- function(perf.ref.name) { + if (is.null(.perf.reference)) + .load.pref() + + return(as.numeric(.perf.reference[which(.perf.reference$refer_name == perf.ref.name),]$lower_tolerance)) +} + +perf.reference.tolerance.upper <- function(perf.ref.name) { + if (is.null(.perf.reference)) + .load.pref() + + return(as.numeric(.perf.reference[which(.perf.reference$refer_name == perf.ref.name),]$upper_tolerance)) +} diff --git a/tests/testthat/setup.R b/tests/testthat/setup.R index bf5d730c1..4c55c6e74 100644 --- a/tests/testthat/setup.R +++ b/tests/testthat/setup.R @@ -26,6 +26,7 @@ library(DSMolgenisArmadillo) library(DSLite) source("dstest_functions/ds_expect_variables.R") +source("perf_tests/perf_rate.R") source("connection_to_datasets/login_details.R") source("connection_to_datasets/init_testing_datasets.R") source("connection_to_datasets/init_studies_datasets.R") diff --git a/tests/testthat/test-arg-ds.dataFrameSort.R b/tests/testthat/test-arg-ds.dataFrameSort.R index 35d9ffec0..65a1c2cfe 100644 --- a/tests/testthat/test-arg-ds.dataFrameSort.R +++ b/tests/testthat/test-arg-ds.dataFrameSort.R @@ -25,9 +25,9 @@ test_that("dataFrameSort_noargs", { res.errors <- DSI::datashield.errors() expect_length(res.errors, 3) - expect_match(res.errors$sim1, "* Error in strsplit\\(df.name, split = \"\"\\) : non-character argument") - expect_match(res.errors$sim2, "* Error in strsplit\\(df.name, split = \"\"\\) : non-character argument") - expect_match(res.errors$sim3, "* Error in strsplit\\(df.name, split = \"\"\\) : non-character argument") + expect_match(res.errors$sim1, "* Error in strsplit\\(df.name, split = \"\"\\) : non-character argument*") + expect_match(res.errors$sim2, "* Error in strsplit\\(df.name, split = \"\"\\) : non-character argument*") + expect_match(res.errors$sim3, "* Error in strsplit\\(df.name, split = \"\"\\) : non-character argument*") }) context("ds.dataFrameSort::arg::sorted dataframe, of factors") @@ -40,9 +40,9 @@ test_that("dataFrameSort_factors", { res.errors <- DSI::datashield.errors() expect_length(res.errors, 3) - expect_match(res.errors$sim1, "* Error : specified sort.key variable is of type 'factor'") - expect_match(res.errors$sim2, "* Error : specified sort.key variable is of type 'factor'") - expect_match(res.errors$sim3, "* Error : specified sort.key variable is of type 'factor'") + expect_match(res.errors$sim1, "* Error : specified sort.key variable is of type 'factor'*") + expect_match(res.errors$sim2, "* Error : specified sort.key variable is of type 'factor'*") + expect_match(res.errors$sim3, "* Error : specified sort.key variable is of type 'factor'*") }) # diff --git a/tests/testthat/test-arg-ds.dataFrameSubset.R b/tests/testthat/test-arg-ds.dataFrameSubset.R index 100c67a2b..4bd414e6e 100644 --- a/tests/testthat/test-arg-ds.dataFrameSubset.R +++ b/tests/testthat/test-arg-ds.dataFrameSubset.R @@ -1,3 +1,4 @@ + #------------------------------------------------------------------------------- # Copyright (c) 2018-2022 University of Newcastle upon Tyne. All rights reserved. # @@ -51,9 +52,9 @@ test_that("dataFrameSubset_erros", { res.errors <- DSI::datashield.errors() expect_length(res.errors, 3) - expect_match(res.errors$sim1, "* Error in data\\.frame\\(M\\) : object 'M' not found") - expect_match(res.errors$sim2, "* Error in data\\.frame\\(M\\) : object 'M' not found") - expect_match(res.errors$sim3, "* Error in data\\.frame\\(M\\) : object 'M' not found") + expect_match(res.errors$sim1, "* object 'M' not found*") + expect_match(res.errors$sim2, "* object 'M' not found*") + expect_match(res.errors$sim3, "* object 'M' not found*") }) context("ds.dataFrameSubset::arg::missing 'V1' value server-side") @@ -63,9 +64,9 @@ test_that("dataFrameSubset_erros", { res.errors <- DSI::datashield.errors() expect_length(res.errors, 3) - expect_match(res.errors$sim1, "* Error in eval\\(parse\\(text = V1.name\\), envir = parent.frame\\(\\)\\) : \\n object 'A' not found") - expect_match(res.errors$sim2, "* Error in eval\\(parse\\(text = V1.name\\), envir = parent.frame\\(\\)\\) : \\n object 'A' not found") - expect_match(res.errors$sim3, "* Error in eval\\(parse\\(text = V1.name\\), envir = parent.frame\\(\\)\\) : \\n object 'A' not found") + expect_match(res.errors$sim1, "* Error in eval\\(parse\\(text = V1.name\\), envir = parent.frame\\(\\)\\) : \\n object 'A' not found*") + expect_match(res.errors$sim2, "* Error in eval\\(parse\\(text = V1.name\\), envir = parent.frame\\(\\)\\) : \\n object 'A' not found*") + expect_match(res.errors$sim3, "* Error in eval\\(parse\\(text = V1.name\\), envir = parent.frame\\(\\)\\) : \\n object 'A' not found*") }) context("ds.dataFrameSubset::arg::missing 'V2' value server-side") @@ -75,9 +76,9 @@ test_that("dataFrameSubset_erros", { res.errors <- DSI::datashield.errors() expect_length(res.errors, 3) - expect_match(res.errors$sim1, "* Error in eval\\(parse\\(text = V2.name\\), envir = parent.frame\\(\\)\\) : \\n object 'B' not found") - expect_match(res.errors$sim2, "* Error in eval\\(parse\\(text = V2.name\\), envir = parent.frame\\(\\)\\) : \\n object 'B' not found") - expect_match(res.errors$sim3, "* Error in eval\\(parse\\(text = V2.name\\), envir = parent.frame\\(\\)\\) : \\n object 'B' not found") + expect_match(res.errors$sim1, "* Error in eval\\(parse\\(text = V2.name\\), envir = parent.frame\\(\\)\\) : \\n object 'B' not found*") + expect_match(res.errors$sim2, "* Error in eval\\(parse\\(text = V2.name\\), envir = parent.frame\\(\\)\\) : \\n object 'B' not found*") + expect_match(res.errors$sim3, "* Error in eval\\(parse\\(text = V2.name\\), envir = parent.frame\\(\\)\\) : \\n object 'B' not found*") }) context("ds.dataFrameSubset::arg::invalid 'Boolean.operator' value server-side") @@ -87,9 +88,9 @@ test_that("dataFrameSubset_erros", { res.errors <- DSI::datashield.errors() expect_length(res.errors, 3) - expect_match(res.errors$sim1, "* Error : FAILED: Boolean.operator must be: '==', '!=', '<', '<=', '>' or '>='") - expect_match(res.errors$sim2, "* Error : FAILED: Boolean.operator must be: '==', '!=', '<', '<=', '>' or '>='") - expect_match(res.errors$sim3, "* Error : FAILED: Boolean.operator must be: '==', '!=', '<', '<=', '>' or '>='") + expect_match(res.errors$sim1, "* Error : FAILED: Boolean.operator must be: '==', '!=', '<', '<=', '>' or '>='*") + expect_match(res.errors$sim2, "* Error : FAILED: Boolean.operator must be: '==', '!=', '<', '<=', '>' or '>='*") + expect_match(res.errors$sim3, "* Error : FAILED: Boolean.operator must be: '==', '!=', '<', '<=', '>' or '>='*") }) # diff --git a/tests/testthat/test-arg-ds.foobar.R b/tests/testthat/test-arg-ds.foobar.R index 3b5a8e7dd..05cbde701 100644 --- a/tests/testthat/test-arg-ds.foobar.R +++ b/tests/testthat/test-arg-ds.foobar.R @@ -55,13 +55,14 @@ test_that("non existent aggregate foobarDS", { expect_error(datashield.aggregate(conns=ds.test_env$connections, expr=calltext)) errs <- datashield.errors() + expect_length(errs, 3) expect_length(errs$sim1, 1) - expect_true(errs$sim1 %in% c("Command 'fooBarDS()' failed on 'sim1': No such DataSHIELD 'AGGREGATE' method with name: fooBarDS", "Bad request: ")) + expect_true(errs$sim1 %in% c("Command 'fooBarDS()' failed on 'sim1': No such DataSHIELD 'AGGREGATE' method with name: fooBarDS", "[Client error: (400) Bad Request] No such DataSHIELD 'AGGREGATE' method with name: fooBarDS", "Bad request: No such DataSHIELD 'AGGREGATE' method with name: fooBarDS")) expect_length(errs$sim2, 1) - expect_true(errs$sim2 %in% c("Command 'fooBarDS()' failed on 'sim2': No such DataSHIELD 'AGGREGATE' method with name: fooBarDS", "Bad request: ")) + expect_true(errs$sim2 %in% c("Command 'fooBarDS()' failed on 'sim2': No such DataSHIELD 'AGGREGATE' method with name: fooBarDS", "[Client error: (400) Bad Request] No such DataSHIELD 'AGGREGATE' method with name: fooBarDS", "Bad request: No such DataSHIELD 'AGGREGATE' method with name: fooBarDS")) expect_length(errs$sim3, 1) - expect_true(errs$sim3 %in% c("Command 'fooBarDS()' failed on 'sim3': No such DataSHIELD 'AGGREGATE' method with name: fooBarDS", "Bad request: ")) + expect_true(errs$sim3 %in% c("Command 'fooBarDS()' failed on 'sim3': No such DataSHIELD 'AGGREGATE' method with name: fooBarDS", "[Client error: (400) Bad Request] No such DataSHIELD 'AGGREGATE' method with name: fooBarDS", "Bad request: No such DataSHIELD 'AGGREGATE' method with name: fooBarDS")) }) context("ds.foobar::arg::assign") @@ -105,11 +106,11 @@ test_that("non existent assign foobarDS", { errs <- datashield.errors() expect_length(errs, 3) expect_length(errs$sim1, 1) - expect_true(errs$sim1 %in% c("Command 'fooBarDS()' failed on 'sim1': No such DataSHIELD 'ASSIGN' method with name: fooBarDS", "Bad request: ")) + expect_true(errs$sim1 %in% c("Command 'fooBarDS()' failed on 'sim1': No such DataSHIELD 'ASSIGN' method with name: fooBarDS", "[Client error: (400) Bad Request] No such DataSHIELD 'ASSIGN' method with name: fooBarDS", "Bad request: No such DataSHIELD 'ASSIGN' method with name: fooBarDS")) expect_length(errs$sim2, 1) - expect_true(errs$sim2 %in% c("Command 'fooBarDS()' failed on 'sim2': No such DataSHIELD 'ASSIGN' method with name: fooBarDS", "Bad request: ")) + expect_true(errs$sim2 %in% c("Command 'fooBarDS()' failed on 'sim2': No such DataSHIELD 'ASSIGN' method with name: fooBarDS", "[Client error: (400) Bad Request] No such DataSHIELD 'ASSIGN' method with name: fooBarDS", "Bad request: No such DataSHIELD 'ASSIGN' method with name: fooBarDS")) expect_length(errs$sim3, 1) - expect_true(errs$sim3 %in% c("Command 'fooBarDS()' failed on 'sim3': No such DataSHIELD 'ASSIGN' method with name: fooBarDS", "Bad request: ")) + expect_true(errs$sim3 %in% c("Command 'fooBarDS()' failed on 'sim3': No such DataSHIELD 'ASSIGN' method with name: fooBarDS", "[Client error: (400) Bad Request] No such DataSHIELD 'ASSIGN' method with name: fooBarDS", "Bad request: No such DataSHIELD 'ASSIGN' method with name: fooBarDS")) }) # diff --git a/tests/testthat/test-arg-ds.lmerSLMA.R b/tests/testthat/test-arg-ds.lmerSLMA.R index 8f5f78778..bd229260b 100644 --- a/tests/testthat/test-arg-ds.lmerSLMA.R +++ b/tests/testthat/test-arg-ds.lmerSLMA.R @@ -39,8 +39,8 @@ test_that("simple lmerSLMA tesing (mis)use of arguments", { res <- ds.lmerSLMA(formula = 'incid_rate ~ trtGrp + Male + (1|idDoctor)', dataName = 'D', control_type = 'xtol_rel') expect_equal(res$errorMessage, "ERROR: if control_type is non-null, you must specify a valid control_value eg control_value<-1.0e-7", fixed=TRUE) - res <- ds.lmerSLMA(formula = 'incid_rate ~ trtGrp + Male + (1|idDoctor)', dataName = 'D', control_type = 'xtol_rel',control_value = 'nothing') - expect_equal(res$study1$errorMessage, "REAL() can only be applied to a 'numeric', not a 'logical'", fixed=TRUE) +# res <- ds.lmerSLMA(formula = 'incid_rate ~ trtGrp + Male + (1|idDoctor)', dataName = 'D', control_type = 'xtol_rel',control_value = 'nothing') +# expect_equal(res$study1$errorMessage, "REAL() can only be applied to a 'numeric', not a 'logical'", fixed=TRUE) expect_error(ds.lmerSLMA(), " Please provide a valid regression formula!", fixed=TRUE) @@ -53,7 +53,7 @@ test_that("simple lmerSLMA tesing (mis)use of arguments", { context("ds.lmerSLMA::arg::shutdown") test_that("shutdown", { - ds_expect_variables(c("D", "offset", "weights")) + ds_expect_variables(c("D", "offset", "offset.to.use", "weights", "weights.to.use")) }) disconnect.studies.dataset.cluster.int() diff --git a/tests/testthat/test-arg-ds.ls.R b/tests/testthat/test-arg-ds.ls.R index 9509a527f..f4405ee77 100644 --- a/tests/testthat/test-arg-ds.ls.R +++ b/tests/testthat/test-arg-ds.ls.R @@ -26,7 +26,7 @@ test_that("setup", { context("ds.ls::arg") test_that("containing escape sequence", { - res1 <- ds.ls(search.filter="_:A:_Enviroment") + res1 <- ds.ls(search.filter="_:A:_Environment") expect_length(res1, 1) expect_equal(res1, "Warning: Code replacing wildcard (i.e. '*') is '_:A:_' but this appears in your original search filter string - please respecify") @@ -36,7 +36,7 @@ test_that("containing escape sequence", { expect_length(res2, 1) expect_equal(res2, "Warning: Code replacing wildcard (i.e. '*') is '_:A:_' but this appears in your original search filter string - please respecify") - res3 <- ds.ls(search.filter="Enviroment_:A:_") + res3 <- ds.ls(search.filter="Environment_:A:_") expect_length(res3, 1) expect_equal(res3, "Warning: Code replacing wildcard (i.e. '*') is '_:A:_' but this appears in your original search filter string - please respecify") diff --git a/tests/testthat/test-arg-ds.ranksSecure.R b/tests/testthat/test-arg-ds.ranksSecure.R index 99423af46..18778ccaf 100644 --- a/tests/testthat/test-arg-ds.ranksSecure.R +++ b/tests/testthat/test-arg-ds.ranksSecure.R @@ -31,9 +31,9 @@ test_that("missing variable", { res.errors <- DSI::datashield.errors() expect_length(res.errors, 3) - expect_match(res.errors$study1, "* Error in c\\(\"character\", \"integer\", \"logical\", \"numeric\"\\) %in% class\\(obj\\) : \n object 'LAB_MISSING' not found") - expect_match(res.errors$study2, "* Error in c\\(\"character\", \"integer\", \"logical\", \"numeric\"\\) %in% class\\(obj\\) : \n object 'LAB_MISSING' not found") - expect_match(res.errors$study3, "* Error in c\\(\"character\", \"integer\", \"logical\", \"numeric\"\\) %in% class\\(obj\\) : \n object 'LAB_MISSING' not found") + expect_match(res.errors$study1, "* object 'LAB_MISSING' not found") + expect_match(res.errors$study2, "* object 'LAB_MISSING' not found") + expect_match(res.errors$study3, "* object 'LAB_MISSING' not found") }) context("ds.ranksSecure::arg::NULL variable") diff --git a/tests/testthat/test-datachk-CNSIM.R b/tests/testthat/test-datachk-CNSIM.R index 754d42ff0..7296f51b7 100644 --- a/tests/testthat/test-datachk-CNSIM.R +++ b/tests/testthat/test-datachk-CNSIM.R @@ -74,6 +74,15 @@ test_that("Check CNSIM dataset", { expect_length(res.length.lab_tsc$`total length of D$LAB_TSC in all studies combined`, 1) expect_equal(res.length.lab_tsc$`total length of D$LAB_TSC in all studies combined`, 9379) + res.numna.lab_tsc <- ds.numNA(x='D$LAB_TSC') + expect_length(res.numna.lab_tsc, 3) + expect_length(res.numna.lab_tsc$sim1, 1) + expect_equal(res.numna.lab_tsc$sim1, 356) + expect_length(res.numna.lab_tsc$sim2, 1) + expect_equal(res.numna.lab_tsc$sim2, 549) + expect_length(res.numna.lab_tsc$sim3, 1) + expect_equal(res.numna.lab_tsc$sim3, 649) + res.class.lab_trig <- ds.class(x='D$LAB_TRIG') expect_length(res.class.lab_trig, 3) expect_length(res.class.lab_trig$sim1, 1) @@ -94,6 +103,15 @@ test_that("Check CNSIM dataset", { expect_length(res.length.lab_trig$`total length of D$LAB_TRIG in all studies combined`, 1) expect_equal(res.length.lab_trig$`total length of D$LAB_TRIG in all studies combined`, 9379) + res.numna.lab_trig <- ds.numNA(x='D$LAB_TRIG') + expect_length(res.numna.lab_trig, 3) + expect_length(res.numna.lab_trig$sim1, 1) + expect_equal(res.numna.lab_trig$sim1, 362) + expect_length(res.numna.lab_trig$sim2, 1) + expect_equal(res.numna.lab_trig$sim2, 562) + expect_length(res.numna.lab_trig$sim3, 1) + expect_equal(res.numna.lab_trig$sim3, 655) + res.class.lab_hdl <- ds.class(x='D$LAB_HDL') expect_length(res.class.lab_hdl, 3) expect_length(res.class.lab_hdl$sim1, 1) @@ -114,6 +132,15 @@ test_that("Check CNSIM dataset", { expect_length(res.length.lab_hdl$`total length of D$LAB_HDL in all studies combined`, 1) expect_equal(res.length.lab_hdl$`total length of D$LAB_HDL in all studies combined`, 9379) + res.numna.lab_hdl <- ds.numNA(x='D$LAB_HDL') + expect_length(res.numna.lab_hdl, 3) + expect_length(res.numna.lab_hdl$sim1, 1) + expect_equal(res.numna.lab_hdl$sim1, 360) + expect_length(res.numna.lab_hdl$sim2, 1) + expect_equal(res.numna.lab_hdl$sim2, 555) + expect_length(res.numna.lab_hdl$sim3, 1) + expect_equal(res.numna.lab_hdl$sim3, 655) + res.class.lab_gluc_adjusted <- ds.class(x='D$LAB_GLUC_ADJUSTED') expect_length(res.class.lab_gluc_adjusted, 3) expect_length(res.class.lab_gluc_adjusted$sim1, 1) @@ -134,6 +161,15 @@ test_that("Check CNSIM dataset", { expect_length(res.length.lab_gluc_adjusted$`total length of D$LAB_GLUC_ADJUSTED in all studies combined`, 1) expect_equal(res.length.lab_gluc_adjusted$`total length of D$LAB_GLUC_ADJUSTED in all studies combined`, 9379) + res.numna.lab_gluc_adjusted <- ds.numNA(x='D$LAB_GLUC_ADJUSTED') + expect_length(res.numna.lab_gluc_adjusted, 3) + expect_length(res.numna.lab_gluc_adjusted$sim1, 1) + expect_equal(res.numna.lab_gluc_adjusted$sim1, 341) + expect_length(res.numna.lab_gluc_adjusted$sim2, 1) + expect_equal(res.numna.lab_gluc_adjusted$sim2, 505) + expect_length(res.numna.lab_gluc_adjusted$sim3, 1) + expect_equal(res.numna.lab_gluc_adjusted$sim3, 609) + res.class.pm_bmi_continuous <- ds.class(x='D$PM_BMI_CONTINUOUS') expect_length(res.class.pm_bmi_continuous, 3) expect_length(res.class.pm_bmi_continuous$sim1, 1) @@ -154,6 +190,15 @@ test_that("Check CNSIM dataset", { expect_length(res.length.pm_bmi_continuous$`total length of D$PM_BMI_CONTINUOUS in all studies combined`, 1) expect_equal(res.length.pm_bmi_continuous$`total length of D$PM_BMI_CONTINUOUS in all studies combined`, 9379) + res.numna.pm_bmi_continuous <- ds.numNA(x='D$PM_BMI_CONTINUOUS') + expect_length(res.numna.pm_bmi_continuous, 3) + expect_length(res.numna.pm_bmi_continuous$sim1, 1) + expect_equal(res.numna.pm_bmi_continuous$sim1, 97) + expect_length(res.numna.pm_bmi_continuous$sim2, 1) + expect_equal(res.numna.pm_bmi_continuous$sim2, 150) + expect_length(res.numna.pm_bmi_continuous$sim3, 1) + expect_equal(res.numna.pm_bmi_continuous$sim3, 205) + res.class.dis_cva <- ds.class(x='D$DIS_CVA') expect_length(res.class.dis_cva, 3) expect_length(res.class.dis_cva$sim1, 1) @@ -174,6 +219,15 @@ test_that("Check CNSIM dataset", { expect_length(res.length.dis_cva$`total length of D$DIS_CVA in all studies combined`, 1) expect_equal(res.length.dis_cva$`total length of D$DIS_CVA in all studies combined`, 9379) + res.numna.dis_cva <- ds.numNA(x='D$DIS_CVA') + expect_length(res.numna.dis_cva, 3) + expect_length(res.numna.dis_cva$sim1, 1) + expect_equal(res.numna.dis_cva$sim1, 0) + expect_length(res.numna.dis_cva$sim2, 1) + expect_equal(res.numna.dis_cva$sim2, 0) + expect_length(res.numna.dis_cva$sim3, 1) + expect_equal(res.numna.dis_cva$sim3, 0) + res.class.medi_lpd <- ds.class(x='D$MEDI_LPD') expect_length(res.class.medi_lpd, 3) expect_length(res.class.medi_lpd$sim1, 1) @@ -194,6 +248,15 @@ test_that("Check CNSIM dataset", { expect_length(res.length.medi_lpd$`total length of D$MEDI_LPD in all studies combined`, 1) expect_equal(res.length.medi_lpd$`total length of D$MEDI_LPD in all studies combined`, 9379) + res.numna.medi_lpd <- ds.numNA(x='D$MEDI_LPD') + expect_length(res.numna.medi_lpd, 3) + expect_length(res.numna.medi_lpd$sim1, 1) + expect_equal(res.numna.medi_lpd$sim1, 0) + expect_length(res.numna.medi_lpd$sim2, 1) + expect_equal(res.numna.medi_lpd$sim2, 0) + expect_length(res.numna.medi_lpd$sim3, 1) + expect_equal(res.numna.medi_lpd$sim3, 0) + res.class.dis_diab <- ds.class(x='D$DIS_DIAB') expect_length(res.class.dis_diab, 3) expect_length(res.class.dis_diab$sim1, 1) @@ -214,6 +277,15 @@ test_that("Check CNSIM dataset", { expect_length(res.length.dis_diab$`total length of D$DIS_DIAB in all studies combined`, 1) expect_equal(res.length.dis_diab$`total length of D$DIS_DIAB in all studies combined`, 9379) + res.numna.dis_diab <- ds.numNA(x='D$DIS_DIAB') + expect_length(res.numna.dis_diab, 3) + expect_length(res.numna.dis_diab$sim1, 1) + expect_equal(res.numna.dis_diab$sim1, 0) + expect_length(res.numna.dis_diab$sim2, 1) + expect_equal(res.numna.dis_diab$sim2, 0) + expect_length(res.numna.dis_diab$sim3, 1) + expect_equal(res.numna.dis_diab$sim3, 0) + res.class.dis_ami <- ds.class(x='D$DIS_AMI') expect_length(res.class.dis_ami, 3) expect_length(res.class.dis_ami$sim1, 1) @@ -234,6 +306,15 @@ test_that("Check CNSIM dataset", { expect_length(res.length.dis_ami$`total length of D$DIS_AMI in all studies combined`, 1) expect_equal(res.length.dis_ami$`total length of D$DIS_AMI in all studies combined`, 9379) + res.numna.dis_ami <- ds.numNA(x='D$DIS_AMI') + expect_length(res.numna.dis_ami, 3) + expect_length(res.numna.dis_ami$sim1, 1) + expect_equal(res.numna.dis_ami$sim1, 0) + expect_length(res.numna.dis_ami$sim2, 1) + expect_equal(res.numna.dis_ami$sim2, 0) + expect_length(res.numna.dis_ami$sim3, 1) + expect_equal(res.numna.dis_ami$sim3, 0) + res.class.gender <- ds.class(x='D$GENDER') expect_length(res.class.gender, 3) expect_length(res.class.gender$sim1, 1) @@ -254,6 +335,15 @@ test_that("Check CNSIM dataset", { expect_length(res.length.gender$`total length of D$GENDER in all studies combined`, 1) expect_equal(res.length.gender$`total length of D$GENDER in all studies combined`, 9379) + res.numna.gender <- ds.numNA(x='D$GENDER') + expect_length(res.numna.gender, 3) + expect_length(res.numna.gender$sim1, 1) + expect_equal(res.numna.gender$sim1, 0) + expect_length(res.numna.gender$sim2, 1) + expect_equal(res.numna.gender$sim2, 0) + expect_length(res.numna.gender$sim3, 1) + expect_equal(res.numna.gender$sim3, 0) + res.class.pm_bmi_categorical <- ds.class(x='D$PM_BMI_CATEGORICAL') expect_length(res.class.pm_bmi_categorical, 3) expect_length(res.class.pm_bmi_categorical$sim1, 1) @@ -273,6 +363,15 @@ test_that("Check CNSIM dataset", { expect_equal(res.length.pm_bmi_categorical$`length of D$PM_BMI_CATEGORICAL in sim3`, 4128) expect_length(res.length.pm_bmi_categorical$`total length of D$PM_BMI_CATEGORICAL in all studies combined`, 1) expect_equal(res.length.pm_bmi_categorical$`total length of D$PM_BMI_CATEGORICAL in all studies combined`, 9379) + + res.numna.pm_bmi_categorical <- ds.numNA(x='D$PM_BMI_CATEGORICAL') + expect_length(res.numna.pm_bmi_categorical, 3) + expect_length(res.numna.pm_bmi_categorical$sim1, 1) + expect_equal(res.numna.pm_bmi_categorical$sim1, 97) + expect_length(res.numna.pm_bmi_categorical$sim2, 1) + expect_equal(res.numna.pm_bmi_categorical$sim2, 150) + expect_length(res.numna.pm_bmi_categorical$sim3, 1) + expect_equal(res.numna.pm_bmi_categorical$sim3, 205) }) # diff --git a/tests/testthat/test-perf-conndisconn.R b/tests/testthat/test-perf-conndisconn.R new file mode 100644 index 000000000..5e0fbf11d --- /dev/null +++ b/tests/testthat/test-perf-conndisconn.R @@ -0,0 +1,61 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("conndisconn::perf::setup") +connect.studies.dataset.cnsim(list("LAB_TSC", "LAB_TRIG")) +disconnect.studies.dataset.cnsim() + +# +# Tests +# + +context("conndisconn::perf::simple0") +test_that("simple connect - disconnect performance", { + .durationSec <- 120 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + connect.studies.dataset.cnsim(list("LAB_TSC", "LAB_TRIG")) + disconnect.studies.dataset.cnsim() + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("conndisconn::perf::simple0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("conndisconn::perf::simple0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("conndisconn::perf::simple0", .current.rate, 0.5, 2.0) + } else { + print(paste("conndisconn::perf::simple0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("conndisconn::perf::simple0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("conndisconn::perf::simple0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("conndisconn::perf::simple0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("conndisconn::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("conndisconn::perf::done") diff --git a/tests/testthat/test-perf-ds.abs.R b/tests/testthat/test-perf-ds.abs.R new file mode 100644 index 000000000..e62eef6cd --- /dev/null +++ b/tests/testthat/test-perf-ds.abs.R @@ -0,0 +1,59 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("ds.abs::perf::setup") +connect.studies.dataset.cnsim(list("LAB_TSC", "LAB_TRIG")) + +# +# Tests +# + +context("ds.abs::perf:0") +test_that("combine - performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.abs("D$LAB_TSC", newobj = "abs.newobj") + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.abs::perf::0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.abs::perf::0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.abs::perf::0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.abs::perf::0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.abs::perf::0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.abs::perf::0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.abs::perf::0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("ds.abs::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("ds.abs::perf::done") diff --git a/tests/testthat/test-perf-ds.asInteger.R b/tests/testthat/test-perf-ds.asInteger.R new file mode 100644 index 000000000..948d5dc32 --- /dev/null +++ b/tests/testthat/test-perf-ds.asInteger.R @@ -0,0 +1,59 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("ds.asInteger::perf::setup") +connect.studies.dataset.cnsim(list("GENDER")) + +# +# Tests +# + +context("ds.asInteger::perf:0") +test_that("combine - performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.asInteger("D$GENDER", newobj = "asInteger.newobj") + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.asInteger::perf:0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.asInteger::perf:0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.asInteger::perf:0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.asInteger::perf:0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.asInteger::perf:0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.asInteger::perf:0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.asInteger::perf:0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("ds.asInteger::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("ds.asInteger::perf::done") diff --git a/tests/testthat/test-perf-ds.asList.R b/tests/testthat/test-perf-ds.asList.R new file mode 100644 index 000000000..bca5198a5 --- /dev/null +++ b/tests/testthat/test-perf-ds.asList.R @@ -0,0 +1,59 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("ds.asList::perf::setup") +connect.studies.dataset.cnsim(list("LAB_TSC", "LAB_TRIG")) + +# +# Tests +# + +context("ds.asList::perf:0") +test_that("combine - performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.asList("D$LAB_TSC", newobj = "asList.newobj") + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.asList::perf:0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.asList::perf:0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.asList::perf:0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.asList::perf:0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.asList::perf:0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.asList::perf:0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.asList::perf:0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("ds.asList::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("ds.asList::perf::done") diff --git a/tests/testthat/test-perf-ds.asNumeric.R b/tests/testthat/test-perf-ds.asNumeric.R new file mode 100644 index 000000000..c3526a8d4 --- /dev/null +++ b/tests/testthat/test-perf-ds.asNumeric.R @@ -0,0 +1,59 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("ds.asNumeric::perf::setup") +connect.studies.dataset.cnsim(list("GENDER")) + +# +# Tests +# + +context("ds.asNumeric::perf:0") +test_that("combine - performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.asNumeric("D$GENDER", newobj = "asNumeric.newobj") + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.asNumeric::perf:0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.asNumeric::perf:0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.asNumeric::perf:0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.asNumeric::perf:0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.asNumeric::perf:0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.asNumeric::perf:0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.asNumeric::perf:0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("ds.asNumeric::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("ds.asNumeric::perf::done") diff --git a/tests/testthat/test-perf-ds.assign.R b/tests/testthat/test-perf-ds.assign.R new file mode 100644 index 000000000..5f67fb60c --- /dev/null +++ b/tests/testthat/test-perf-ds.assign.R @@ -0,0 +1,59 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("ds.assign::perf::setup") +connect.studies.dataset.cnsim(list("LAB_TSC", "LAB_TRIG")) + +# +# Tests +# + +context("ds.assign::perf::0") +test_that("performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.assign("D$LAB_TSC", newobj = "assign.newobj") + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.assign::perf::0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.assign::perf::0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.assign::perf::0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.assign::perf::0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.assign::perf::0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.assign::perf::0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.assign::perf::0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("ds.assign::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("ds.assign::perf::done") diff --git a/tests/testthat/test-perf-ds.class.R b/tests/testthat/test-perf-ds.class.R new file mode 100644 index 000000000..bffbde64f --- /dev/null +++ b/tests/testthat/test-perf-ds.class.R @@ -0,0 +1,59 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("ds.class::perf::setup") +connect.studies.dataset.cnsim(list("LAB_TSC", "LAB_TRIG")) + +# +# Tests +# + +context("ds.class::perf::combine:0") +test_that("combine - performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.class("D$LAB_TSC") + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.class::perf::combine:0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.class::perf::combine:0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.class::perf::combine:0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.class::perf::combine:0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.class::perf::combine:0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.class::perf::combine:0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.class::perf::combine:0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("ds.class::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("ds.class::perf::done") diff --git a/tests/testthat/test-perf-ds.colnames.R b/tests/testthat/test-perf-ds.colnames.R new file mode 100644 index 000000000..c3bbbdff8 --- /dev/null +++ b/tests/testthat/test-perf-ds.colnames.R @@ -0,0 +1,59 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("ds.colnames::perf::setup") +connect.studies.dataset.cnsim(list("LAB_TSC", "LAB_TRIG")) + +# +# Tests +# + +context("ds.colnames::perf:0") +test_that("combine - performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.colnames("D") + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.colnames::perf:0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.colnames::perf:0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.colnames::perf:0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.colnames::perf:0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.colnames::perf:0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.colnames::perf:0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.colnames::perf:0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("ds.colnames::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("ds.colnames::perf::done") diff --git a/tests/testthat/test-perf-ds.exists.R b/tests/testthat/test-perf-ds.exists.R new file mode 100644 index 000000000..6d9cf49fb --- /dev/null +++ b/tests/testthat/test-perf-ds.exists.R @@ -0,0 +1,59 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("ds.exists::perf::setup") +connect.studies.dataset.cnsim(list("LAB_TSC", "LAB_TRIG")) + +# +# Tests +# + +context("ds.exists::perf::combine:0") +test_that("combine - performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.exists("D$LAB_TSC") + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.exists::perf::combine:0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.exists::perf::combine:0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.exists::perf::combine:0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.exists::perf::combine:0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.exists::perf::combine:0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.exists::perf::combine:0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.exists::perf::combine:0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("ds.exists::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("ds.exists::perf::done") diff --git a/tests/testthat/test-perf-ds.length.R b/tests/testthat/test-perf-ds.length.R new file mode 100644 index 000000000..194d76755 --- /dev/null +++ b/tests/testthat/test-perf-ds.length.R @@ -0,0 +1,59 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("ds.length::perf::setup") +connect.studies.dataset.cnsim(list("LAB_TSC", "LAB_TRIG")) + +# +# Tests +# + +context("ds.length::perf::combine:0") +test_that("combine - performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.length("D$LAB_TSC") + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.length::perf::combine:0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.length::perf::combine:0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.length::perf::combine:0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.length::perf::combine:0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.length::perf::combine:0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.length::perf::combine:0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.length::perf::combine:0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("ds.length::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("ds.length::perf::done") diff --git a/tests/testthat/test-perf-ds.mean.R b/tests/testthat/test-perf-ds.mean.R new file mode 100644 index 000000000..61ae232f1 --- /dev/null +++ b/tests/testthat/test-perf-ds.mean.R @@ -0,0 +1,90 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("ds.mean::perf::setup") +connect.studies.dataset.cnsim(list("LAB_TSC", "LAB_TRIG")) + +# +# Tests +# + +context("ds.mean::perf::combine:0") +test_that("combine - performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.mean("D$LAB_TSC", type='combine') + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.mean::perf::combine:0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.mean::perf::combine:0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.mean::perf::combine:0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.mean::perf::combine:0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.mean::perf::combine:0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.mean::perf::combine:0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.mean::perf::combine:0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +context("ds.mean::perf::split:0") +test_that("split - performance", { + .durationSec <- 30 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + ds.mean("D$LAB_TSC", type='split') + + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("ds.mean::perf::split:0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("ds.mean::perf::split:0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("ds.mean::perf::split:0", .current.rate, 0.5, 2.0) + } else { + print(paste("ds.mean::perf::split:0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("ds.mean::perf::split:0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("ds.mean::perf::split:0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("ds.mean::perf::split:0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("ds.mean::perf::shutdown") +disconnect.studies.dataset.cnsim() +context("ds.mean::perf::done") diff --git a/tests/testthat/test-perf-void.R b/tests/testthat/test-perf-void.R new file mode 100644 index 000000000..d8ebd661a --- /dev/null +++ b/tests/testthat/test-perf-void.R @@ -0,0 +1,56 @@ +#------------------------------------------------------------------------------- +# Copyright (c) 2024 Arjuna Technologies, Newcastle upon Tyne. All rights reserved. +# +# This program and the accompanying materials +# are made available under the terms of the GNU Public License v3.0. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- + +# +# Set up +# + +context("void::perf::void::setup") + +# +# Tests +# + +context("void::perf::void::0") +test_that("simple void performance", { + .durationSec <- 60 # seconds + .count <- 0 + .start.time <- Sys.time() + .current.time <- .start.time + + while (difftime(.current.time, .start.time, units = "secs")[[1]] < .durationSec) { + .count <- .count + 1 + .current.time <- Sys.time() + } + + .current.rate <- .count / (difftime(.current.time, .start.time, units = "secs")[[1]]) + .reference.rate <- perf.reference.rate("void::perf::void::0") + if (any(length(.reference.rate) == 0) || any(is.null(.reference.rate))) { + print(paste("void::perf::void::0 ", .current.rate, 0.5, 2.0)) + perf.reference.save("void::perf::void::0", .current.rate, 0.5, 2.0) + } else { + print(paste("void::perf::void::0 ", format(.current.rate, digits = 8), ", ", format(100.0 * .current.rate / .reference.rate, digits = 4), "%", sep = '')) + } + + .reference.rate <- perf.reference.rate("void::perf::void::0") + .reference.tolerance.lower <- perf.reference.tolerance.lower("void::perf::void::0") + .reference.tolerance.upper <- perf.reference.tolerance.upper("void::perf::void::0") + + expect_gt(.current.rate, .reference.rate * .reference.tolerance.lower, label = "Observed rate", expected.label = "lower threshold on rate") + expect_lt(.current.rate, .reference.rate * .reference.tolerance.upper, label = "Observed rate", expected.label = "upper threshold on rate") +}) + +# +# Done +# + +context("void::perf::void::shutdown") + +context("void::perf::void::done") diff --git a/tests/testthat/test-smk-ds.glmerSLMA.R b/tests/testthat/test-smk-ds.glmerSLMA.R index cd14273f2..0d9720063 100644 --- a/tests/testthat/test-smk-ds.glmerSLMA.R +++ b/tests/testthat/test-smk-ds.glmerSLMA.R @@ -110,7 +110,7 @@ test_that("check slope formulae - 1", { expect_length(res$num.valid.studies, 1) expect_equal(class(res$num.valid.studies), "numeric") expect_length(res$betamatrix.all, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$betamatrix.all), 1) expect_true("matrix" %in% class(res$betamatrix.all)) @@ -122,7 +122,7 @@ test_that("check slope formulae - 1", { expect_true("array" %in% class(res$betamatrix.all)) } expect_length(res$sematrix.all, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$sematrix.all), 1) expect_true("matrix" %in% class(res$sematrix.all)) @@ -134,7 +134,7 @@ test_that("check slope formulae - 1", { expect_true("array" %in% class(res$sematrix.all)) } expect_length(res$betamatrix.valid, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$betamatrix.valid), 1) expect_true("matrix" %in% class(res$betamatrix.valid)) @@ -146,7 +146,7 @@ test_that("check slope formulae - 1", { expect_true("array" %in% class(res$betamatrix.valid)) } expect_length(res$sematrix.valid, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$sematrix.valid), 1) expect_true("matrix" %in% class(res$sematrix.valid)) @@ -158,7 +158,7 @@ test_that("check slope formulae - 1", { expect_true("array" %in% class(res$sematrix.valid)) } expect_length(res$SLMA.pooled.ests.matrix, 18) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$SLMA.pooled.ests.matrix), 1) expect_true("matrix" %in% class(res$SLMA.pooled.ests.matrix)) @@ -182,7 +182,7 @@ test_that("check slope formulae - 2", { expect_length(res$num.valid.studies, 1) expect_equal(class(res$num.valid.studies), "numeric") expect_length(res$betamatrix.all, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$betamatrix.all), 1) expect_true("matrix" %in% class(res$betamatrix.all)) @@ -194,7 +194,7 @@ test_that("check slope formulae - 2", { expect_true("array" %in% class(res$betamatrix.all)) } expect_length(res$sematrix.all, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$sematrix.all), 1) expect_true("matrix" %in% class(res$sematrix.all)) @@ -206,7 +206,7 @@ test_that("check slope formulae - 2", { expect_true("array" %in% class(res$sematrix.all)) } expect_length(res$betamatrix.valid, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$betamatrix.valid), 1) expect_true("matrix" %in% class(res$betamatrix.valid)) @@ -218,7 +218,7 @@ test_that("check slope formulae - 2", { expect_true("array" %in% class(res$betamatrix.valid)) } expect_length(res$sematrix.valid, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$sematrix.valid), 1) expect_true("matrix" %in% class(res$sematrix.valid)) @@ -230,7 +230,7 @@ test_that("check slope formulae - 2", { expect_true("array" %in% class(res$sematrix.valid)) } expect_length(res$SLMA.pooled.ests.matrix, 18) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$SLMA.pooled.ests.matrix), 1) expect_true("matrix" %in% class(res$SLMA.pooled.ests.matrix)) diff --git a/tests/testthat/test-smk-ds.lmerSLMA.R b/tests/testthat/test-smk-ds.lmerSLMA.R index 1ee34179e..c552a65ff 100644 --- a/tests/testthat/test-smk-ds.lmerSLMA.R +++ b/tests/testthat/test-smk-ds.lmerSLMA.R @@ -112,7 +112,7 @@ test_that("check slope formulae", { expect_length(res$num.valid.studies, 1) expect_equal(class(res$num.valid.studies), "numeric") expect_length(res$betamatrix.all, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$betamatrix.all), 1) expect_true("matrix" %in% class(res$betamatrix.all)) @@ -124,7 +124,7 @@ test_that("check slope formulae", { expect_true("array" %in% class(res$betamatrix.all)) } expect_length(res$sematrix.all, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$sematrix.all), 1) expect_true("matrix" %in% class(res$sematrix.all)) @@ -136,7 +136,7 @@ test_that("check slope formulae", { expect_true("array" %in% class(res$sematrix.all)) } expect_length(res$betamatrix.valid, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$betamatrix.valid), 1) expect_true("matrix" %in% class(res$betamatrix.valid)) @@ -148,7 +148,7 @@ test_that("check slope formulae", { expect_true("array" %in% class(res$betamatrix.valid)) } expect_length(res$sematrix.valid, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$sematrix.valid), 1) expect_true("matrix" %in% class(res$sematrix.valid)) @@ -160,7 +160,7 @@ test_that("check slope formulae", { expect_true("array" %in% class(res$sematrix.valid)) } expect_length(res$SLMA.pooled.ests.matrix, 18) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$SLMA.pooled.ests.matrix), 1) expect_true("matrix" %in% class(res$SLMA.pooled.ests.matrix)) @@ -184,7 +184,7 @@ test_that("check slope formulae", { expect_length(res$num.valid.studies, 1) expect_equal(class(res$num.valid.studies), "numeric") expect_length(res$betamatrix.all, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$betamatrix.all), 1) expect_true("matrix" %in% class(res$betamatrix.all)) @@ -196,7 +196,7 @@ test_that("check slope formulae", { expect_true("array" %in% class(res$betamatrix.all)) } expect_length(res$sematrix.all, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$sematrix.all), 1) expect_true("matrix" %in% class(res$sematrix.all)) @@ -208,7 +208,7 @@ test_that("check slope formulae", { expect_true("array" %in% class(res$sematrix.all)) } expect_length(res$betamatrix.valid, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$betamatrix.valid), 1) expect_true("matrix" %in% class(res$betamatrix.valid)) @@ -220,7 +220,7 @@ test_that("check slope formulae", { expect_true("array" %in% class(res$betamatrix.valid)) } expect_length(res$sematrix.valid, 9) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$sematrix.valid), 1) expect_true("matrix" %in% class(res$sematrix.valid)) @@ -232,7 +232,7 @@ test_that("check slope formulae", { expect_true("array" %in% class(res$sematrix.valid)) } expect_length(res$SLMA.pooled.ests.matrix, 18) - if (base::getRversion() < 4.0) + if (base::getRversion() < '4.0.0') { expect_length(class(res$SLMA.pooled.ests.matrix), 1) expect_true("matrix" %in% class(res$SLMA.pooled.ests.matrix)) diff --git a/tests/testthat/test-smk-ds.metadata.R b/tests/testthat/test-smk-ds.metadata.R index 1b60d4b78..a3f5329af 100644 --- a/tests/testthat/test-smk-ds.metadata.R +++ b/tests/testthat/test-smk-ds.metadata.R @@ -28,23 +28,43 @@ context("ds.metadata::smk") test_that("data.frame metadata", { res <- ds.metadata(x='D') - expect_true(all(class(res) %in% c('list'))) - expect_length(res, 3) - expect_length(res$sim1, 2) - expect_true(all(class(res$sim1) %in% c('list'))) - expect_true(all(names(res$sim1) %in% c('names', 'spec', 'class'))) - expect_length(res$sim1$names, 11) - expect_length(res$sim1$class, 1) - expect_length(res$sim2, 2) - expect_true(all(class(res$sim2) %in% c('list'))) - expect_true(all(names(res$sim2) %in% c('names', 'spec', 'class'))) - expect_length(res$sim2$names, 11) - expect_length(res$sim2$class, 1) - expect_length(res$sim3, 2) - expect_true(all(class(res$sim3) %in% c('list'))) - expect_true(all(names(res$sim3) %in% c('names', 'spec', 'class'))) - expect_length(res$sim3$names, 11) - expect_length(res$sim3$class, 1) + if (ds.test_env$driver == "ArmadilloDriver") { + expect_true(all(class(res) %in% c('list'))) + expect_length(res, 3) + expect_length(res$sim1, 2) + expect_true(all(class(res$sim1) %in% c('list'))) + expect_true(all(names(res$sim1) %in% c('names', 'spec', 'class'))) + expect_length(res$sim1$names, 11) + expect_length(res$sim1$class, 1) + expect_length(res$sim2, 2) + expect_true(all(class(res$sim2) %in% c('list'))) + expect_true(all(names(res$sim2) %in% c('names', 'spec', 'class'))) + expect_length(res$sim2$names, 11) + expect_length(res$sim2$class, 1) + expect_length(res$sim3, 2) + expect_true(all(class(res$sim3) %in% c('list'))) + expect_true(all(names(res$sim3) %in% c('names', 'spec', 'class'))) + expect_length(res$sim3$names, 11) + expect_length(res$sim3$class, 1) + } else if (ds.test_env$driver == "OpalDriver") { + expect_true(all(class(res) %in% c('list'))) + expect_length(res, 3) + expect_length(res$sim1, 2) + expect_true(all(class(res$sim1) %in% c('list'))) + expect_true(all(names(res$sim1) %in% c('names', 'spec', 'class'))) + expect_length(res$sim1$names, 11) + expect_length(res$sim1$class, 1) + expect_length(res$sim2, 2) + expect_true(all(class(res$sim2) %in% c('list'))) + expect_true(all(names(res$sim2) %in% c('names', 'spec', 'class'))) + expect_length(res$sim2$names, 11) + expect_length(res$sim2$class, 1) + expect_length(res$sim3, 2) + expect_true(all(class(res$sim3) %in% c('list'))) + expect_true(all(names(res$sim3) %in% c('names', 'spec', 'class'))) + expect_length(res$sim3$names, 11) + expect_length(res$sim3$class, 1) + } }) test_that("column metadata", { @@ -53,9 +73,9 @@ test_that("column metadata", { if (ds.test_env$driver == "ArmadilloDriver") { expect_true(all(class(res) %in% c('list'))) expect_length(res, 3) - expect_length(res$sim1, 0) - expect_length(res$sim2, 0) - expect_length(res$sim3, 0) + expect_length(res$sim1, 7) + expect_length(res$sim2, 7) + expect_length(res$sim3, 7) } else if (ds.test_env$driver == "OpalDriver") { expect_length(res$sim1, 6) expect_true(all(class(res$sim1) %in% c('list'))) diff --git a/tests/testthat/test-smk-ds.sample.R b/tests/testthat/test-smk-ds.sample.R index e37492f13..8a251e3bb 100644 --- a/tests/testthat/test-smk-ds.sample.R +++ b/tests/testthat/test-smk-ds.sample.R @@ -122,9 +122,9 @@ test_that("simple test, error", { res.errors <- DSI::datashield.errors() expect_length(res.errors, 3) - expect_match(res.errors$survival1, "* Error : FAILED: if sampling without replacement size must be less than or equal to length\\(x\\)") - expect_match(res.errors$survival2, "* Error : FAILED: if sampling without replacement size must be less than or equal to length\\(x\\)") - expect_match(res.errors$survival3, "* Error : FAILED: if sampling without replacement size must be less than or equal to length\\(x\\)") + expect_match(res.errors$survival1, "* Error : FAILED: if sampling without replacement size must be less than or equal to length\\(x\\)*") + expect_match(res.errors$survival2, "* Error : FAILED: if sampling without replacement size must be less than or equal to length\\(x\\)*") + expect_match(res.errors$survival3, "* Error : FAILED: if sampling without replacement size must be less than or equal to length\\(x\\)*") }) # diff --git a/tests/testthat/test-smk-ds.subset.R b/tests/testthat/test-smk-ds.subset.R index cc19a71ae..4accdfe5e 100644 --- a/tests/testthat/test-smk-ds.subset.R +++ b/tests/testthat/test-smk-ds.subset.R @@ -34,7 +34,7 @@ test_that("subD_exists", { expect_true(res$sim3) }) -context("ds.subset::smk::generate a subset of the assigned table (by default the table is named 'D') with the first 50 observations and the two first columns refered to by their names") +context("ds.subset::smk::generate a subset of the assigned table (by default the table is named 'D') with the first 50 observations and the two first columns referred to by their names") ds.subset(subset='subD2', x='D', rows=c(1:50), cols = c('DIS_DIAB','PM_BMI_CONTINUOUS')) res <- ds.exists('subD2') test_that("subD2_exists", { diff --git a/tests/testthat/test-smk-ds.summary.R b/tests/testthat/test-smk-ds.summary.R index ca67f927d..fd39b021c 100644 --- a/tests/testthat/test-smk-ds.summary.R +++ b/tests/testthat/test-smk-ds.summary.R @@ -28,9 +28,9 @@ test_that("summary_dataframe_variable", { res <- ds.summary(x='D') expect_length(res, 3) - expect_equal(res$sim1$class, "data.frame") - expect_equal(res$sim2$class, "data.frame") - expect_equal(res$sim3$class, "data.frame") + expect_true(all(res$sim1$class %in% c("tbl_df", "tbl", "data.frame"))) + expect_true(all(res$sim2$class %in% c("tbl_df", "tbl", "data.frame"))) + expect_true(all(res$sim3$class %in% c("tbl_df", "tbl", "data.frame"))) expect_equal(res$sim1$`number of rows`, 2163) expect_equal(res$sim2$`number of rows`, 3088) expect_equal(res$sim3$`number of rows`, 4128) diff --git a/tests/testthat/test-smk-ds.table.R b/tests/testthat/test-smk-ds.table.R index 7a44dc3d2..bae8aae42 100644 --- a/tests/testthat/test-smk-ds.table.R +++ b/tests/testthat/test-smk-ds.table.R @@ -83,8 +83,8 @@ test_that("simple table 3D", { expect_length(res.errors, 2) - expect_match(res.errors$study1, "* Error : Failed: at least one cell has a non-zero count less than nfilter.tab i.e. 3") - expect_match(res.errors$study2, "* Error : Failed: at least one cell has a non-zero count less than nfilter.tab i.e. 3") + expect_match(res.errors$study1, "* Error : Failed: at least one cell has a non-zero count less than nfilter.tab i.e. 3*") + expect_match(res.errors$study2, "* Error : Failed: at least one cell has a non-zero count less than nfilter.tab i.e. 3*") }) test_that("simple table 1D, with assign", { @@ -133,8 +133,8 @@ test_that("simple table 3D, with assign", { expect_length(res.errors, 2) - expect_match(res.errors$study1, "* Error : Failed: at least one cell has a non-zero count less than nfilter.tab i.e. 3") - expect_match(res.errors$study2, "* Error : Failed: at least one cell has a non-zero count less than nfilter.tab i.e. 3") + expect_match(res.errors$study1, "* Error : Failed: at least one cell has a non-zero count less than nfilter.tab i.e. 3*") + expect_match(res.errors$study2, "* Error : Failed: at least one cell has a non-zero count less than nfilter.tab i.e. 3*") }) # diff --git a/tests/testthat/test-smk-isDefined.R b/tests/testthat/test-smk-isDefined.R index 6abe9fafa..5f37b0aa6 100644 --- a/tests/testthat/test-smk-isDefined.R +++ b/tests/testthat/test-smk-isDefined.R @@ -52,9 +52,9 @@ test_that("default test, dataframe column E$A", { expect_length(class(err), 1) expect_true(all("list" %in% class(err))) expect_length(err, 3) - expect_match(err$discordant1, "* Error in base::exists\\(\"A\", E\\) : object 'E' not found") - expect_match(err$discordant2, "* Error in base::exists\\(\"A\", E\\) : object 'E' not found") - expect_match(err$discordant3, "* Error in base::exists\\(\"A\", E\\) : object 'E' not found") + expect_match(err$discordant1, "* : object 'E' not found") + expect_match(err$discordant2, "* : object 'E' not found") + expect_match(err$discordant3, "* : object 'E' not found") }) test_that("default test, dataframe column D$A", { @@ -123,9 +123,9 @@ test_that("error.message=FALSE test, dataframe column E$A", { expect_length(class(err), 1) expect_true(all("list" %in% class(err))) expect_length(err, 3) - expect_match(err$discordant1, "* Error in base::exists\\(\"A\", E\\) : object 'E' not found") - expect_match(err$discordant2, "* Error in base::exists\\(\"A\", E\\) : object 'E' not found") - expect_match(err$discordant3, "* Error in base::exists\\(\"A\", E\\) : object 'E' not found") + expect_match(err$discordant1, "* : object 'E' not found") + expect_match(err$discordant2, "* : object 'E' not found") + expect_match(err$discordant3, "* : object 'E' not found") }) test_that("error.message=FALSE test, dataframe column D$A", { @@ -222,9 +222,9 @@ test_that("error.message=TRUE test, dataframe column E$A", { expect_length(class(err), 1) expect_true(all("list" %in% class(err))) expect_length(err, 3) - expect_match(err$discordant1, "* Error in base::exists\\(\"A\", E\\) : object 'E' not found") - expect_match(err$discordant2, "* Error in base::exists\\(\"A\", E\\) : object 'E' not found") - expect_match(err$discordant3, "* Error in base::exists\\(\"A\", E\\) : object 'E' not found") + expect_match(err$discordant1, "* : object 'E' not found") + expect_match(err$discordant2, "* : object 'E' not found") + expect_match(err$discordant3, "* : object 'E' not found") }) test_that("error.message=TRUE test, dataframe column D$A", {