diff --git a/.env.sample b/.env.sample
index 2c15a1c0a..06e9ab1a4 100644
--- a/.env.sample
+++ b/.env.sample
@@ -1,4 +1,15 @@
API_URL=http://localhost:9393
ONTOLOGIES_LINKED_DATA_PATH=
GOO_PATH=
-SPARQL_CLIENT_PATH=
\ No newline at end of file
+SPARQL_CLIENT_PATH=
+
+REDIS_GOO_CACHE_HOST=redis-ut
+REDIS_HTTP_CACHE_HOST=redis-ut
+REDIS_PERSISTENT_HOST=redis-ut
+
+## An ontology that will be imported in the starting of the API server
+STARTER_ONTOLOGY=STY
+## API key of a remote API used to download the starter ontology
+OP_API_KEY=8b5b7825-538d-40e0-9e9e-5ab9274a9aeb
+## API url of the remote API used to download the starter ontology
+OP_API_URL="https://data.bioontology.org"
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index d9af054d8..0eb08341c 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -69,7 +69,7 @@ jobs:
- uses: actions/checkout@v3
- uses: ruby/setup-ruby@v1
with:
- ruby-version: 2.7.8 # Not needed with a .ruby-version file
+ ruby-version: 3.1.0 # Not needed with a .ruby-version file
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
- name: get-deployment-config
uses: actions/checkout@v3
diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml
index 9d47b3f9d..a414f46e1 100644
--- a/.github/workflows/docker-image.yml
+++ b/.github/workflows/docker-image.yml
@@ -1,5 +1,4 @@
name: Docker branch Images build
-
on:
push:
branches:
@@ -9,28 +8,31 @@ on:
- test
release:
types: [ published ]
+
jobs:
push_to_registry:
name: Push Docker branch image to Docker Hub
runs-on: ubuntu-latest
steps:
- name: Check out the repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up QEMU
- uses: docker/setup-qemu-action@v2
+ uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2
+ uses: docker/setup-buildx-action@v3
+ with:
+ platforms: linux/amd64,linux/arm64
- name: Log in to Docker Hub
- uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to the Container registry
- uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -38,19 +40,22 @@ jobs:
- name: Extract metadata (tags, labels) for Docker
id: meta
- uses: docker/metadata-action@v4
+ uses: docker/metadata-action@v5
with:
images: |
agroportal/ontologies_api
ghcr.io/${{ github.repository }}
- name: Build and push Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64
build-args: |
- RUBY_VERSION=2.7.8
+ RUBY_VERSION=3.1
+ BUILDPLATFORM=${{ github.job_name }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
+ cache-from: type=gha
+ cache-to: type=gha,mode=max
diff --git a/.github/workflows/ruby-unit-tests.yml b/.github/workflows/ruby-unit-tests.yml
index 16d8357ef..39d28a8c5 100644
--- a/.github/workflows/ruby-unit-tests.yml
+++ b/.github/workflows/ruby-unit-tests.yml
@@ -2,15 +2,14 @@ name: Ruby Unit Tests
on:
push:
- pull_request:
jobs:
test:
strategy:
fail-fast: false
matrix:
- goo-slice: [ '20', '100', '500' ]
- ruby-version: [ '2.7' ]
+ goo-slice: [ '100' ]
+ ruby-version: [ '3.2.0' ]
triplestore: [ 'fs', 'ag', 'vo', 'gb' ]
runs-on: ubuntu-latest
steps:
@@ -28,11 +27,10 @@ jobs:
ruby-version: ${{ matrix.ruby-version }}
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
- name: Run unit tests
- # unit tests are run inside a container
- # http://docs.codecov.io/docs/testing-with-docker
run: |
ci_env=`bash <(curl -s https://codecov.io/env)`
- GOO_SLICES=${{ matrix.goo-slice }} bundle exec rake test:docker:${{ matrix.triplestore }} TESTOPTS="-v"
+ GOO_SLICES=${{ matrix.goo-slice }} bundle exec rake test:docker:${{ matrix.triplestore }}
+
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
with:
diff --git a/.ruby-version b/.ruby-version
new file mode 100644
index 000000000..fd2a01863
--- /dev/null
+++ b/.ruby-version
@@ -0,0 +1 @@
+3.1.0
diff --git a/Dockerfile b/Dockerfile
index a2bed4861..ca31be04d 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,29 +1,57 @@
-ARG RUBY_VERSION=3.0
-ARG DISTRO_NAME=bullseye
+# syntax=docker/dockerfile:1
-FROM ruby:$RUBY_VERSION-$DISTRO_NAME
+# Build arguments with specific versions for better reproducibility
+ARG RUBY_VERSION=3.1
+ARG DISTRO_NAME=slim-bookworm
-RUN apt-get update -yqq && apt-get install -yqq --no-install-recommends \
- openjdk-11-jre-headless \
- raptor2-utils \
- wait-for-it \
- libraptor2-dev \
- && rm -rf /var/lib/apt/lists/*
-
-RUN mkdir -p /srv/ontoportal/ontologies_api
-RUN mkdir -p /srv/ontoportal/bundle
-COPY Gemfile* /srv/ontoportal/ontologies_api/
+FROM ruby:${RUBY_VERSION}-${DISTRO_NAME}
WORKDIR /srv/ontoportal/ontologies_api
-RUN gem update --system 3.4.22 # the 3.4.22 can be removed if we support Ruby version > 3.0
+# Set environment variables
+ENV BUNDLE_PATH=/srv/ontoportal/bundle \
+ BUNDLE_JOBS=4 \
+ BUNDLE_RETRY=5 \
+ RAILS_ENV=production \
+ DEBIAN_FRONTEND=noninteractive
+
+# Install system dependencies
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ ca-certificates \
+ openjdk-17-jre-headless \
+ raptor2-utils \
+ wait-for-it \
+ libraptor2-dev \
+ build-essential \
+ libxml2 \
+ libxslt-dev \
+ libmariadb-dev \
+ git \
+ curl \
+ libffi-dev \
+ file \
+ pandoc \
+ pkg-config && \
+ apt-get clean && \
+ rm -rf /var/lib/apt/lists/*
+
RUN gem install bundler
-ENV BUNDLE_PATH=/srv/ontoportal/bundle
-RUN bundle install
-COPY . /srv/ontoportal/ontologies_api
-RUN cp /srv/ontoportal/ontologies_api/config/environments/config.rb.sample /srv/ontoportal/ontologies_api/config/environments/development.rb
-RUN cp /srv/ontoportal/ontologies_api/config/environments/config.rb.sample /srv/ontoportal/ontologies_api/config/environments/production.rb
+COPY Gemfile* ./
+
+# Install dependencies
+RUN bundle install --jobs ${BUNDLE_JOBS} --retry ${BUNDLE_RETRY}
+# Copy application code
+COPY . .
+
+# Copy config files
+RUN cp config/environments/config.rb.sample config/environments/development.rb && \
+ cp config/environments/config.rb.sample config/environments/production.rb
+
+# Expose port
EXPOSE 9393
+
+# Start command
CMD ["bundle", "exec", "rackup", "-p", "9393", "--host", "0.0.0.0"]
diff --git a/Gemfile b/Gemfile
index 5082ef4e8..4c3e56380 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,40 +1,43 @@
source 'https://rubygems.org'
-gem 'activesupport', '~> 5'
-# see https://github.com/ncbo/ontologies_api/issues/69
+gem 'activesupport'
gem 'bigdecimal'
-# gem 'faraday', '~> 1.9'
-gem 'json-schema', '~> 2.0'
+gem 'json-schema'
gem 'multi_json'
gem 'oj'
gem 'parseconfig'
gem 'rack'
-gem 'rake', '~> 10.0'
+gem 'rake'
gem 'rexml' # Investigate why unicorn fails to start under ruby 3 without adding rexml gem to the Gemfile
-gem 'sinatra', '~> 1.0'
-gem 'sinatra-advanced-routes'
-gem 'sinatra-contrib', '~> 1.0'
+gem 'sinatra'
+gem 'rackup'
+
+github 'sinatra/sinatra' do
+ gem 'sinatra-contrib'
+end
+
gem 'request_store'
gem 'parallel'
-gem 'json-ld'
-gem 'google-protobuf', '3.25.3'
+gem 'google-protobuf'
+gem 'net-ftp'
+gem 'json-ld', '~> 3.2.0'
+gem 'rdf-raptor', github:'ruby-rdf/rdf-raptor', ref: '6392ceabf71c3233b0f7f0172f662bd4a22cd534' # use version 3.3.0 when available
# Rack middleware
-gem 'ffi', '~> 1.16.3'
-gem 'rack-accept', '~> 0.4'
-gem 'rack-attack', '~> 6.6.1', require: 'rack/attack'
-gem 'rack-cache', '~> 1.13.0'
+gem 'ffi', '~> 1.15.0'
+gem 'rack-accept'
+gem 'rack-attack', require: 'rack/attack'
+gem 'rack-cache'
gem 'rack-cors', require: 'rack/cors'
# GitHub dependency can be removed when https://github.com/niko/rack-post-body-to-params/pull/6 is merged and released
gem 'rack-post-body-to-params', github: 'palexander/rack-post-body-to-params', branch: 'multipart_support'
gem 'rack-timeout'
-gem 'redis-rack-cache', '~> 2.0'
+gem 'redis-rack-cache'
# Data access (caching)
gem 'redis'
-gem 'redis-store', '~>1.10'
+gem 'redis-store'
# Monitoring
-gem 'cube-ruby', require: 'cube'
gem 'newrelic_rpm', group: [:default, :deployment]
# HTTP server
@@ -42,21 +45,21 @@ gem 'unicorn'
gem 'unicorn-worker-killer'
# Templating
-gem 'haml', '~> 5.2.2' # pin see https://github.com/ncbo/ontologies_api/pull/107
-gem 'redcarpet'
+gem 'haml', '~> 5.2.2'
+gem 'rack-contrib'
+gem 'pandoc-ruby'
# NCBO gems (can be from a local dev path or from rubygems/git)
gem 'ncbo_annotator', git: 'https://github.com/ontoportal-lirmm/ncbo_annotator.git', branch: 'development'
gem 'ncbo_cron', git: 'https://github.com/ontoportal-lirmm/ncbo_cron.git', branch: 'master'
gem 'ncbo_ontology_recommender', git: 'https://github.com/ontoportal-lirmm/ncbo_ontology_recommender.git', branch: 'development'
+gem 'ontologies_linked_data', github: 'ontoportal-lirmm/ontologies_linked_data', branch: 'development'
gem 'goo', github: 'ontoportal-lirmm/goo', branch: 'development'
gem 'sparql-client', github: 'ontoportal-lirmm/sparql-client', branch: 'development'
-gem 'ontologies_linked_data', git: 'https://github.com/ontoportal-lirmm/ontologies_linked_data.git', branch: 'development'
-
group :development do
# bcrypt_pbkdf and ed35519 is required for capistrano deployments when using ed25519 keys; see https://github.com/miloserdow/capistrano-deploy/issues/42
- gem 'shotgun', github: 'palexander/shotgun', branch: 'ncbo'
+ gem 'shotgun', github: 'syphax-bouazzouni/shotgun', branch: 'master'
gem 'rubocop'
end
@@ -77,12 +80,14 @@ end
group :test do
gem 'crack', '0.4.5'
- gem 'minitest', '~> 5.0'
- gem 'minitest-hooks', "~> 1.5"
+ gem 'minitest'
+ gem 'minitest-hooks'
gem 'minitest-stub_any_instance'
+ gem 'minitest-reporters'
+ gem 'minitest-fail-fast'
gem 'rack-test'
gem 'simplecov', require: false
gem 'simplecov-cobertura' # for codecov.io
- gem 'webmock', '~> 3.19.1'
+ gem 'webmock'
gem 'webrick'
end
diff --git a/Gemfile.lock b/Gemfile.lock
index e2a975019..8398ea56a 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,12 +1,12 @@
GIT
remote: https://github.com/ontoportal-lirmm/goo.git
- revision: f8ac7b00e8d8b46d1eea04de014175525c1cdd83
+ revision: e48a2d13a65cc2dd1c12d116cfc9da9061106861
branch: development
specs:
goo (0.0.2)
addressable (~> 2.8)
pry
- rdf (= 3.2.11)
+ rdf
rdf-raptor
rdf-rdfxml
rdf-vocab
@@ -29,7 +29,7 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/ncbo_cron.git
- revision: 37a9573c11978869a867050f8ec75e048c8b9b2b
+ revision: cc4cd9218db7181c4843772631b7f3a96c74a4aa
branch: master
specs:
ncbo_cron (0.0.1)
@@ -42,7 +42,7 @@ GIT
ncbo_annotator
ontologies_linked_data
redis
- rufus-scheduler (~> 2.0.24)
+ rufus-scheduler
GIT
remote: https://github.com/ontoportal-lirmm/ncbo_ontology_recommender.git
@@ -57,7 +57,7 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git
- revision: 312ef426eeaa461e88fa23124ea5fd531f4276ba
+ revision: b321d73a28b4f60fc5969da7a071b3c19c1a84f3
branch: development
specs:
ontologies_linked_data (0.0.1)
@@ -77,7 +77,7 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/sparql-client.git
- revision: 59251e59346c9a69a67c88552ba55a1244eec602
+ revision: 736b7650e28db3ce5e3e49511ac30f958a29e8f1
branch: development
specs:
sparql-client (3.2.2)
@@ -93,33 +93,69 @@ GIT
activesupport (>= 2.3)
GIT
- remote: https://github.com/palexander/shotgun.git
- revision: db198224aaab2e4cb9b049adccb30e387d88bc3b
- branch: ncbo
+ remote: https://github.com/ruby-rdf/rdf-raptor.git
+ revision: 6392ceabf71c3233b0f7f0172f662bd4a22cd534
+ ref: 6392ceabf71c3233b0f7f0172f662bd4a22cd534
specs:
- shotgun (0.9)
- rack (>= 1.0)
+ rdf-raptor (3.3.0)
+ ffi (~> 1.15)
+ rdf (~> 3.3)
+
+GIT
+ remote: https://github.com/sinatra/sinatra.git
+ revision: c235249abaafa2780b540aca1813dfcf3d17c2dd
+ specs:
+ rack-protection (4.1.1)
+ base64 (>= 0.1.0)
+ logger (>= 1.6.0)
+ rack (>= 3.0.0, < 4)
+ sinatra-contrib (4.1.1)
+ multi_json (>= 0.0.2)
+ mustermann (~> 3.0)
+ rack-protection (= 4.1.1)
+ sinatra (= 4.1.1)
+ tilt (~> 2.0)
+
+GIT
+ remote: https://github.com/syphax-bouazzouni/shotgun.git
+ revision: 421f4d0bc2f3193f7cd4b634f5f8ccab09f6b0f7
+ branch: master
+ specs:
+ shotgun (0.9.2)
+ rack
+ rackup
GEM
remote: https://rubygems.org/
specs:
- activesupport (5.2.8.1)
- concurrent-ruby (~> 1.0, >= 1.0.2)
- i18n (>= 0.7, < 2)
- minitest (~> 5.1)
- tzinfo (~> 1.1)
+ activesupport (7.2.2.1)
+ base64
+ benchmark (>= 0.3)
+ bigdecimal
+ concurrent-ruby (~> 1.0, >= 1.3.1)
+ connection_pool (>= 2.2.5)
+ drb
+ i18n (>= 1.6, < 2)
+ logger (>= 1.4.2)
+ minitest (>= 5.1)
+ securerandom (>= 0.3)
+ tzinfo (~> 2.0, >= 2.0.5)
addressable (2.8.7)
public_suffix (>= 2.0.2, < 7.0)
airbrussh (1.5.3)
sshkit (>= 1.6.1, != 1.7.0)
+ ansi (1.5.0)
ast (2.4.2)
- backports (3.25.0)
base64 (0.2.0)
+ bcp47_spec (0.2.1)
bcrypt (3.1.20)
bcrypt_pbkdf (1.1.1)
- bigdecimal (3.1.8)
+ bcrypt_pbkdf (1.1.1-arm64-darwin)
+ bcrypt_pbkdf (1.1.1-x86_64-darwin)
+ benchmark (0.4.0)
+ bigdecimal (3.1.9)
builder (3.3.0)
- capistrano (3.19.1)
+ capistrano (3.19.2)
airbrussh (>= 1.0.0)
i18n
rake (>= 10.0.0)
@@ -132,44 +168,52 @@ GEM
capistrano (~> 3.1)
sshkit (~> 1.3)
coderay (1.1.3)
- concurrent-ruby (1.3.4)
- connection_pool (2.4.1)
+ concurrent-ruby (1.3.5)
+ connection_pool (2.5.0)
crack (0.4.5)
rexml
- cube-ruby (0.0.3)
dante (0.2.0)
- date (3.4.0)
+ date (3.4.1)
declarative (0.0.20)
docile (1.4.1)
domain_name (0.6.20240107)
+ drb (2.2.1)
ed25519 (1.3.0)
- faraday (2.8.1)
- base64
- faraday-net_http (>= 2.0, < 3.1)
- ruby2_keywords (>= 0.0.4)
- faraday-net_http (3.0.2)
+ et-orbi (1.2.11)
+ tzinfo
+ faraday (2.12.2)
+ faraday-net_http (>= 2.0, < 3.5)
+ json
+ logger
+ faraday-net_http (3.4.0)
+ net-http (>= 0.5.0)
faraday-retry (2.2.1)
faraday (~> 2.0)
- ffi (1.16.3)
- gapic-common (0.21.1)
+ ffi (1.15.5)
+ fugit (1.11.1)
+ et-orbi (~> 1, >= 1.2.11)
+ raabro (~> 1.4)
+ gapic-common (0.25.0)
faraday (>= 1.9, < 3.a)
faraday-retry (>= 1.0, < 3.a)
- google-protobuf (~> 3.18)
- googleapis-common-protos (>= 1.4.0, < 2.a)
- googleapis-common-protos-types (>= 1.11.0, < 2.a)
- googleauth (~> 1.9)
- grpc (~> 1.59)
+ google-cloud-env (~> 2.2)
+ google-logging-utils (~> 0.1)
+ google-protobuf (>= 3.25, < 5.a)
+ googleapis-common-protos (~> 1.6)
+ googleapis-common-protos-types (~> 1.15)
+ googleauth (~> 1.12)
+ grpc (~> 1.66)
get_process_mem (0.2.7)
ffi (~> 1.0)
- google-analytics-data (0.6.1)
+ google-analytics-data (0.7.0)
google-analytics-data-v1beta (>= 0.11, < 2.a)
google-cloud-core (~> 1.6)
- google-analytics-data-v1beta (0.13.1)
- gapic-common (>= 0.21.1, < 2.a)
+ google-analytics-data-v1beta (0.16.0)
+ gapic-common (>= 0.25.0, < 2.a)
google-cloud-errors (~> 1.0)
google-apis-analytics_v3 (0.16.0)
google-apis-core (>= 0.15.0, < 2.a)
- google-apis-core (0.15.1)
+ google-apis-core (0.16.0)
addressable (~> 2.5, >= 2.5.1)
googleauth (~> 1.9)
httpclient (>= 2.8.3, < 3.a)
@@ -180,50 +224,80 @@ GEM
google-cloud-core (1.7.1)
google-cloud-env (>= 1.0, < 3.a)
google-cloud-errors (~> 1.0)
- google-cloud-env (2.1.1)
+ google-cloud-env (2.2.1)
faraday (>= 1.0, < 3.a)
google-cloud-errors (1.4.0)
- google-protobuf (3.25.3-x86_64-linux)
+ google-logging-utils (0.1.0)
+ google-protobuf (4.29.3)
+ bigdecimal
+ rake (>= 13)
+ google-protobuf (4.29.3-arm64-darwin)
+ bigdecimal
+ rake (>= 13)
+ google-protobuf (4.29.3-x86_64-darwin)
+ bigdecimal
+ rake (>= 13)
+ google-protobuf (4.29.3-x86_64-linux)
+ bigdecimal
+ rake (>= 13)
googleapis-common-protos (1.6.0)
google-protobuf (>= 3.18, < 5.a)
googleapis-common-protos-types (~> 1.7)
grpc (~> 1.41)
- googleapis-common-protos-types (1.16.0)
+ googleapis-common-protos-types (1.18.0)
google-protobuf (>= 3.18, < 5.a)
- googleauth (1.11.2)
+ googleauth (1.13.1)
faraday (>= 1.0, < 3.a)
- google-cloud-env (~> 2.1)
+ google-cloud-env (~> 2.2)
+ google-logging-utils (~> 0.1)
jwt (>= 1.4, < 3.0)
multi_json (~> 1.11)
os (>= 0.9, < 2.0)
signet (>= 0.16, < 2.a)
- grpc (1.65.2-x86_64-linux)
+ grpc (1.70.1)
+ google-protobuf (>= 3.25, < 5.0)
+ googleapis-common-protos-types (~> 1.0)
+ grpc (1.70.1-arm64-darwin)
+ google-protobuf (>= 3.25, < 5.0)
+ googleapis-common-protos-types (~> 1.0)
+ grpc (1.70.1-x86_64-darwin)
+ google-protobuf (>= 3.25, < 5.0)
+ googleapis-common-protos-types (~> 1.0)
+ grpc (1.70.1-x86_64-linux)
google-protobuf (>= 3.25, < 5.0)
googleapis-common-protos-types (~> 1.0)
haml (5.2.2)
temple (>= 0.8.0)
tilt
- hashdiff (1.1.1)
+ hashdiff (1.1.2)
htmlentities (4.3.4)
http-accept (1.7.0)
- http-cookie (1.0.7)
+ http-cookie (1.0.8)
domain_name (~> 0.5)
- httpclient (2.8.3)
- i18n (1.14.6)
+ httpclient (2.9.0)
+ mutex_m
+ i18n (1.14.7)
concurrent-ruby (~> 1.0)
- json (2.7.6)
- json-ld (3.0.2)
- multi_json (~> 1.12)
- rdf (>= 2.2.8, < 4.0)
- json-schema (2.8.1)
- addressable (>= 2.4)
- jwt (2.9.3)
+ json (2.10.1)
+ json-canonicalization (0.4.0)
+ json-ld (3.2.5)
+ htmlentities (~> 4.3)
+ json-canonicalization (~> 0.3, >= 0.3.2)
+ link_header (~> 0.0, >= 0.0.8)
+ multi_json (~> 1.15)
+ rack (>= 2.2, < 4)
+ rdf (~> 3.2, >= 3.2.10)
+ json-schema (5.1.1)
+ addressable (~> 2.8)
+ bigdecimal (~> 3.1)
+ jwt (2.10.1)
base64
kgio (2.11.4)
- language_server-protocol (3.17.0.3)
+ language_server-protocol (3.17.0.4)
libxml-ruby (5.0.3)
link_header (0.0.8)
- logger (1.6.1)
+ lint_roller (1.1.0)
+ logger (1.6.6)
macaddr (1.7.2)
systemu (~> 2.6.5)
mail (2.8.1)
@@ -235,98 +309,117 @@ GEM
mime-types (3.6.0)
logger
mime-types-data (~> 3.2015)
- mime-types-data (3.2024.1001)
+ mime-types-data (3.2025.0220)
mini_mime (1.1.5)
- minitest (5.25.1)
+ minitest (5.25.4)
+ minitest-fail-fast (0.1.0)
+ minitest (~> 5)
minitest-hooks (1.5.2)
minitest (> 5.3)
+ minitest-reporters (1.7.1)
+ ansi
+ builder
+ minitest (>= 5.0)
+ ruby-progressbar
minitest-stub_any_instance (1.0.3)
mlanett-redis-lock (0.2.7)
redis
multi_json (1.15.0)
- mutex_m (0.2.0)
- net-http-persistent (4.0.4)
+ mustermann (3.0.3)
+ ruby2_keywords (~> 0.0.1)
+ mutex_m (0.3.0)
+ net-ftp (0.3.8)
+ net-protocol
+ time
+ net-http (0.6.0)
+ uri
+ net-http-persistent (4.0.5)
connection_pool (~> 2.2)
- net-imap (0.4.17)
+ net-imap (0.5.6)
date
net-protocol
net-pop (0.1.2)
net-protocol
net-protocol (0.2.2)
timeout
- net-scp (4.0.0)
+ net-scp (4.1.0)
net-ssh (>= 2.6.5, < 8.0.0)
net-sftp (4.0.0)
net-ssh (>= 5.0.0, < 8.0.0)
- net-smtp (0.5.0)
+ net-smtp (0.5.1)
net-protocol
net-ssh (7.3.0)
netrc (0.11.0)
- newrelic_rpm (9.15.0)
- oj (3.16.7)
+ newrelic_rpm (9.17.0)
+ oj (3.16.9)
bigdecimal (>= 3.0)
ostruct (>= 0.2)
omni_logger (0.1.4)
logger
os (1.1.4)
- ostruct (0.6.0)
+ ostruct (0.6.1)
+ pandoc-ruby (2.1.10)
parallel (1.26.3)
parseconfig (1.1.2)
- parser (3.3.5.1)
+ parser (3.3.7.1)
ast (~> 2.4.1)
racc
pony (1.13.1)
mail (>= 2.0)
- pry (0.14.2)
+ pry (0.15.2)
coderay (~> 1.1)
method_source (~> 1.0)
- public_suffix (5.1.1)
+ public_suffix (6.0.1)
+ raabro (1.4.0)
racc (1.8.1)
- rack (1.6.13)
+ rack (3.1.10)
rack-accept (0.4.5)
rack (>= 0.4)
- rack-attack (6.6.1)
- rack (>= 1.0, < 3)
- rack-cache (1.13.0)
+ rack-attack (6.7.0)
+ rack (>= 1.0, < 4)
+ rack-cache (1.17.0)
rack (>= 0.4)
- rack-cors (1.0.6)
- rack (>= 1.6.0)
+ rack-contrib (2.5.0)
+ rack (< 4)
+ rack-cors (2.0.2)
+ rack (>= 2.0.0)
rack-mini-profiler (3.3.1)
rack (>= 1.2.0)
- rack-protection (1.5.5)
- rack
- rack-test (2.1.0)
+ rack-session (2.1.0)
+ base64 (>= 0.1.0)
+ rack (>= 3.0.0)
+ rack-test (2.2.0)
rack (>= 1.3)
rack-timeout (0.7.0)
+ rackup (2.2.1)
+ rack (>= 3)
rainbow (3.1.1)
raindrops (0.20.1)
- rake (10.5.0)
- rdf (3.2.11)
+ rake (13.2.1)
+ rdf (3.3.2)
+ bcp47_spec (~> 0.2)
+ bigdecimal (~> 3.1, >= 3.1.5)
link_header (~> 0.0, >= 0.0.8)
- rdf-raptor (3.2.0)
- ffi (~> 1.15)
- rdf (~> 3.2)
- rdf-rdfxml (3.2.2)
- builder (~> 3.2)
+ rdf-rdfxml (3.3.0)
+ builder (~> 3.2, >= 3.2.4)
htmlentities (~> 4.3)
- rdf (~> 3.2)
- rdf-xsd (~> 3.2)
- rdf-vocab (3.2.7)
- rdf (~> 3.2, >= 3.2.4)
- rdf-xsd (3.2.1)
- rdf (~> 3.2)
+ rdf (~> 3.3)
+ rdf-xsd (~> 3.3)
+ rdf-vocab (3.3.2)
+ rdf (~> 3.3)
+ rdf-xsd (3.3.0)
+ rdf (~> 3.3)
rexml (~> 3.2)
- redcarpet (3.6.0)
- redis (5.3.0)
+ redis (5.4.0)
redis-client (>= 0.22.0)
- redis-client (0.22.2)
+ redis-client (0.23.2)
connection_pool
redis-rack-cache (2.2.1)
rack-cache (>= 1.10, < 2)
redis-store (>= 1.6, < 2)
redis-store (1.11.0)
redis (>= 4, < 6)
- regexp_parser (2.9.2)
+ regexp_parser (2.10.0)
representable (3.2.0)
declarative (< 0.1.0)
trailblazer-option (>= 0.1.1, < 0.2.0)
@@ -339,28 +432,30 @@ GEM
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
retriable (3.1.2)
- rexml (3.3.9)
+ rexml (3.4.1)
rsolr (2.6.0)
builder (>= 2.1.2)
faraday (>= 0.9, < 3, != 2.0.0)
- rubocop (1.68.0)
+ rubocop (1.72.2)
json (~> 2.3)
- language_server-protocol (>= 3.17.0)
+ language_server-protocol (~> 3.17.0.2)
+ lint_roller (~> 1.1.0)
parallel (~> 1.10)
parser (>= 3.3.0.2)
rainbow (>= 2.2.2, < 4.0)
- regexp_parser (>= 2.4, < 3.0)
- rubocop-ast (>= 1.32.2, < 2.0)
+ regexp_parser (>= 2.9.3, < 3.0)
+ rubocop-ast (>= 1.38.0, < 2.0)
ruby-progressbar (~> 1.7)
- unicode-display_width (>= 2.4.0, < 3.0)
- rubocop-ast (1.34.0)
+ unicode-display_width (>= 2.4.0, < 4.0)
+ rubocop-ast (1.38.0)
parser (>= 3.3.1.0)
ruby-progressbar (1.13.0)
ruby-xxHash (0.4.0.2)
ruby2_keywords (0.0.5)
- rubyzip (2.3.2)
- rufus-scheduler (2.0.24)
- tzinfo (>= 0.3.22)
+ rubyzip (2.4.1)
+ rufus-scheduler (3.9.2)
+ fugit (~> 1.1, >= 1.11.1)
+ securerandom (0.4.1)
signet (0.19.0)
addressable (~> 2.8)
faraday (>= 0.17.5, < 3.a)
@@ -375,54 +470,56 @@ GEM
simplecov (~> 0.19)
simplecov-html (0.13.1)
simplecov_json_formatter (0.1.4)
- sinatra (1.4.8)
- rack (~> 1.5)
- rack-protection (~> 1.4)
- tilt (>= 1.3, < 3)
- sinatra-advanced-routes (0.5.3)
- sinatra (~> 1.0)
- sinatra-contrib (1.4.7)
- backports (>= 2.0)
- multi_json
- rack-protection
- rack-test
- sinatra (~> 1.4.0)
- tilt (>= 1.3, < 3)
- sshkit (1.23.2)
+ sinatra (4.1.1)
+ logger (>= 1.6.0)
+ mustermann (~> 3.0)
+ rack (>= 3.0.0, < 4)
+ rack-protection (= 4.1.1)
+ rack-session (>= 2.0.0, < 3)
+ tilt (~> 2.0)
+ sshkit (1.24.0)
base64
+ logger
net-scp (>= 1.1.2)
net-sftp (>= 2.1.2)
net-ssh (>= 2.8.0)
ostruct
systemu (2.6.5)
temple (0.10.3)
- thread_safe (0.3.6)
- tilt (2.4.0)
- timeout (0.4.1)
+ tilt (2.6.0)
+ time (0.4.1)
+ date
+ timeout (0.4.3)
trailblazer-option (0.1.2)
- tzinfo (1.2.11)
- thread_safe (~> 0.1)
+ tzinfo (2.0.6)
+ concurrent-ruby (~> 1.0)
uber (0.1.0)
- unicode-display_width (2.6.0)
+ unicode-display_width (3.1.4)
+ unicode-emoji (~> 4.0, >= 4.0.4)
+ unicode-emoji (4.0.4)
unicorn (6.1.0)
kgio (~> 2.6)
raindrops (~> 0.7)
unicorn-worker-killer (0.4.5)
get_process_mem (~> 0)
unicorn (>= 4, < 7)
+ uri (1.0.2)
uuid (2.3.9)
macaddr (~> 1.0)
- webmock (3.19.1)
+ webmock (3.25.0)
addressable (>= 2.8.0)
crack (>= 0.3.2)
hashdiff (>= 0.4.0, < 2.0.0)
- webrick (1.9.0)
+ webrick (1.9.1)
PLATFORMS
+ arm64-darwin
+ ruby
+ x86_64-darwin
x86_64-linux
DEPENDENCIES
- activesupport (~> 5)
+ activesupport
bcrypt_pbkdf (>= 1.0, < 2.0)
bigdecimal
capistrano (~> 3)
@@ -430,54 +527,58 @@ DEPENDENCIES
capistrano-locally
capistrano-rbenv
crack (= 0.4.5)
- cube-ruby
ed25519 (>= 1.2, < 2.0)
- ffi (~> 1.16.3)
+ ffi (~> 1.15.0)
goo!
- google-protobuf (= 3.25.3)
+ google-protobuf
haml (~> 5.2.2)
- json-ld
- json-schema (~> 2.0)
- minitest (~> 5.0)
- minitest-hooks (~> 1.5)
+ json-ld (~> 3.2.0)
+ json-schema
+ minitest
+ minitest-fail-fast
+ minitest-hooks
+ minitest-reporters
minitest-stub_any_instance
multi_json
ncbo_annotator!
ncbo_cron!
ncbo_ontology_recommender!
+ net-ftp
newrelic_rpm
oj
ontologies_linked_data!
+ pandoc-ruby
parallel
parseconfig
rack
- rack-accept (~> 0.4)
- rack-attack (~> 6.6.1)
- rack-cache (~> 1.13.0)
+ rack-accept
+ rack-attack
+ rack-cache
+ rack-contrib
rack-cors
rack-mini-profiler
rack-post-body-to-params!
rack-test
rack-timeout
- rake (~> 10.0)
- redcarpet
+ rackup
+ rake
+ rdf-raptor!
redis
- redis-rack-cache (~> 2.0)
- redis-store (~> 1.10)
+ redis-rack-cache
+ redis-store
request_store
rexml
rubocop
shotgun!
simplecov
simplecov-cobertura
- sinatra (~> 1.0)
- sinatra-advanced-routes
- sinatra-contrib (~> 1.0)
+ sinatra
+ sinatra-contrib!
sparql-client!
unicorn
unicorn-worker-killer
- webmock (~> 3.19.1)
+ webmock
webrick
BUNDLED WITH
- 2.4.22
+ 2.6.3
diff --git a/app.rb b/app.rb
index e09178bd0..be90bd342 100644
--- a/app.rb
+++ b/app.rb
@@ -1,10 +1,11 @@
+$VERBOSE = false
+
# sinatra-base
require 'sinatra'
# sinatra-contrib
require 'sinatra/respond_with'
require 'sinatra/namespace'
-require 'sinatra/advanced_routes'
require 'sinatra/multi_route'
# Other gem dependencies
@@ -24,54 +25,57 @@
require 'rack-timeout'
require 'rack/cors'
require_relative 'lib/rack/slow_requests'
-require_relative 'lib/rack/cube_reporter'
require_relative 'lib/rack/param_translator'
require_relative 'lib/rack/slice_detection'
require_relative 'lib/rack/request_lang'
# Logging setup
-require_relative "config/logging"
+require_relative 'config/logging'
# Inflector setup
-require_relative "config/inflections"
+require_relative 'config/inflections'
require 'request_store'
# Protection settings
-set :protection, :except => :path_traversal
+set :protection, except: :path_traversal
# Allow HTTP method overrides
set :method_override, true
# Setup root and static public directory
set :root, File.dirname(__FILE__)
+
+require 'rack/contrib'
use Rack::Static,
- :urls => ["/static"],
- :root => "public"
+ urls: ['/static'],
+ root: 'public'
+set :public_folder, File.expand_path('public', __dir__)
+set :static, true
# Setup the environment
environment = settings.environment.nil? ? :development : settings.environment
-require_relative "config/config"
+require_relative 'config/config'
if ENV['OVERRIDE_CONFIG'] == 'true'
LinkedData.config do |config|
- config.goo_backend_name = ENV['GOO_BACKEND_NAME']
- config.goo_host = ENV['GOO_HOST']
- config.goo_port = ENV['GOO_PORT'].to_i
- config.goo_path_query = ENV['GOO_PATH_QUERY']
- config.goo_path_data = ENV['GOO_PATH_DATA']
- config.goo_path_update = ENV['GOO_PATH_UPDATE']
- config.goo_redis_host = ENV['REDIS_HOST']
- config.goo_redis_port = ENV['REDIS_PORT']
- config.http_redis_host = ENV['REDIS_HOST']
- config.http_redis_port = ENV['REDIS_PORT']
+ config.goo_backend_name = ENV['GOO_BACKEND_NAME']
+ config.goo_host = ENV['GOO_HOST']
+ config.goo_port = ENV['GOO_PORT'].to_i
+ config.goo_path_query = ENV['GOO_PATH_QUERY']
+ config.goo_path_data = ENV['GOO_PATH_DATA']
+ config.goo_path_update = ENV['GOO_PATH_UPDATE']
+ config.goo_redis_host = ENV['REDIS_HOST']
+ config.goo_redis_port = ENV['REDIS_PORT']
+ config.http_redis_host = ENV['REDIS_HOST']
+ config.http_redis_port = ENV['REDIS_PORT']
end
Annotator.config do |config|
config.annotator_redis_host = ENV['ANNOTATOR_REDIS_HOST']
config.annotator_redis_port = ENV['ANNOTATOR_REDIS_PORT']
- config.mgrep_host = ENV['MGREP_HOST']
- config.mgrep_port = ENV['MGREP_PORT']
+ config.mgrep_host = ENV['MGREP_HOST']
+ config.mgrep_port = ENV['MGREP_PORT']
end
end
@@ -86,59 +90,18 @@
set :show_exceptions, false
end
-# mini-profiler sets the etag header to nil, so don't use when caching is enabled
-if [:development].include?(settings.environment) && !LinkedData.settings.enable_http_cache && LinkedData::OntologiesAPI.settings.enable_miniprofiler
- begin
- require 'rack-mini-profiler'
- Rack::MiniProfiler.config.storage = Rack::MiniProfiler::FileStore
- Rack::MiniProfiler.config.position = 'right'
- c = ::Rack::MiniProfiler.config
- c.pre_authorize_cb = lambda { |env|
- true
- }
- tmp = File.expand_path("../tmp/miniprofiler", __FILE__)
- FileUtils.mkdir_p(tmp) unless File.exists?(tmp)
- c.storage_options = {path: tmp}
- use Rack::MiniProfiler
- puts ">> rack-mini-profiler is enabled"
- rescue LoadError
- # profiler isn't there
- end
-end
-
use Rack::Cors do
allow do
origins '*'
- resource '*', :headers => :any, :methods => [:get, :post, :put, :patch, :delete, :options]
+ resource '*', headers: :any, methods: [:get, :post, :put, :patch, :delete, :options]
end
end
-# Use middleware (ORDER IS IMPORTANT)
-use Rack::Cors do
- allow do
- origins '*'
- resource '*', :headers => :any, :methods => [:get, :post, :put, :patch, :delete, :options]
- end
-end
-
-if Goo.queries_debug?
- use Goo::Debug
-end
-
-# Monitoring middleware
-if LinkedData::OntologiesAPI.settings.enable_monitoring
- cube_settings = {
- cube_host: LinkedData::OntologiesAPI.settings.cube_host,
- cube_port: LinkedData::OntologiesAPI.settings.cube_port
- }
- use Rack::CubeReporter, cube_settings
- use Rack::SlowRequests, log_path: LinkedData::OntologiesAPI.settings.slow_request_log
-end
-
# Show exceptions after timeout
if LinkedData::OntologiesAPI.settings.enable_req_timeout
use Rack::Timeout; Rack::Timeout.timeout = LinkedData::OntologiesAPI.settings.req_timeout # seconds, shorter than unicorn timeout
end
+
use Rack::SliceDetection
use Rack::Accept
use Rack::PostBodyToParams
@@ -160,10 +123,10 @@
redis_host_port = "#{LinkedData::OntologiesAPI.settings.http_redis_host}:#{LinkedData::OntologiesAPI.settings.http_redis_port}"
verbose = environment == :development
use Rack::Cache,
- verbose: verbose,
- allow_reload: true,
- metastore: "redis://#{redis_host_port}/0/metastore",
- entitystore: "redis://#{redis_host_port}/0/entitystore"
+ verbose: verbose,
+ allow_reload: true,
+ metastore: "redis://#{redis_host_port}/0/metastore",
+ entitystore: "redis://#{redis_host_port}/0/entitystore"
end
# Initialize unicorn Worker killer to mitigate unicorn worker memory bloat
@@ -181,7 +144,12 @@
# Enter console mode
if settings.environment == :console
require 'rack/test'
- include Rack::Test::Methods; def app() Sinatra::Application end
- Pry.start binding, :quiet => true
+ include Rack::Test::Methods;
+
+ def app()
+ Sinatra::Application
+ end
+
+ Pry.start binding, quiet: true
exit
end
diff --git a/bin/ontoportal b/bin/ontoportal
index 66f1a6540..9bf8cf5ba 100755
--- a/bin/ontoportal
+++ b/bin/ontoportal
@@ -3,10 +3,11 @@
# Function to display script usage information
show_help() {
cat << EOL
-Usage: $0 {dev|test|run|help} [--reset-cache] [--api-url API_URL] [--api-key API_KEY] [--old-path OLD_PATH] [--goo-path GOO_PATH] [--sparql-client-path SPARQL_CLIENT_PATH]
+Usage: $0 {dev|test|run|help} [--reset-cache] [--api-url API_URL] [--api-key API_KEY] [--old-path OLD_PATH] [--goo-path GOO_PATH] [--sparql-client-path SPARQL_CLIENT_PATH] [--with-provision]
dev : Start the Ontoportal API development server.
Example: $0 dev --api-url http://localhost:9393
Use --reset-cache to remove volumes: $0 dev --reset-cache
+ Use --with-provision to parse ontology for use
test : Run tests. Specify either a test file or use 'all'.
Example: $0 test test/controllers/test_users_controller.rb -v --name=name_of_the_test
Example (run all tests): $0 test all -v
@@ -20,12 +21,13 @@ Description:
Options:
--reset-cache : Remove Docker volumes (used with 'dev').
+ --with-provision : Parse ontology for use.
--api-url API_URL : Specify the API URL.
--api-key API_KEY : Specify the API key.
--old-path OLD_PATH : Specify the path for ontologies_linked_data.
--goo-path GOO_PATH : Specify the path for goo.
--sparql-client-path : Specify the path for sparql-client.
- test_file | all : Specify either a test file or all the tests will be run.
+ test_file | all : Specify either a test file or all the tests will be run.
-v : Enable verbosity.
--name=name_of_the_test : Specify the name of the test.
@@ -101,6 +103,32 @@ build_docker_run_cmd() {
}
+provision() {
+ echo "[+] Running Cron provisioning"
+ source .env
+
+ echo "[+] Cleaning volumes"
+ docker compose -f docker-compose.yml --profile 4store down --volumes >/dev/null 2>&1
+ docker compose -p ontoportal_docker down --volumes >/dev/null 2>&1
+
+ commands=(
+ "bundle exec rake user:create[admin,admin@nodomain.org,password]"
+ "bundle exec rake user:adminify[admin]"
+ "bundle exec bin/ncbo_ontology_import --admin-user admin --ontologies $STARTER_ONTOLOGY --from-apikey $OP_API_KEY --from $OP_API_URL"
+ "bundle exec bin/ncbo_ontology_process -o ${STARTER_ONTOLOGY}"
+ )
+ for cmd in "${commands[@]}"; do
+ echo "[+] Run: $cmd"
+ docker_cron_cmd="docker compose -f docker-compose.yml -p ontoportal_docker run --remove-orphans --rm --name cron-service --service-ports ncbo_cron bash -c \"$cmd\" >/dev/null 2>&1"
+ if ! eval "$docker_cron_cmd"; then
+ echo "Error: Failed to run provisioning . $cmd"
+ exit 1
+ fi
+ done
+
+ echo "CRON Setup completed successfully!"
+}
+
# Function to handle the "dev" and "test" options
run_command() {
local custom_command="$1"
@@ -110,6 +138,7 @@ run_command() {
local old_path=""
local goo_path=""
local sparql_client_path=""
+ local with_provision=""
shift
# Check for command line arguments
@@ -135,6 +164,10 @@ run_command() {
sparql_client_path="$2"
shift 2
;;
+ --with-provision)
+ with_provision="$1"
+ shift 1
+ ;;
*)
echo "Unknown option: $1"
show_help
@@ -147,6 +180,7 @@ run_command() {
if [ "$reset_cache" = true ]; then
echo "Resetting cache. Running: docker compose down --volumes"
docker compose down --volumes
+ docker compose -p ontoportal_docker down --volumes
fi
# Check if arguments are provided
@@ -168,6 +202,12 @@ run_command() {
fi
+ # run provision
+ if [ "$with_provision" == "--with-provision" ]; then
+ provision
+ else
+ echo "[+] Skipping Cron provisioning"
+ fi
# Build the Docker run command
echo "Run: $custom_command"
diff --git a/config/environments/config.rb.sample b/config/environments/config.rb.sample
index 4e7900b7e..0eabcee81 100644
--- a/config/environments/config.rb.sample
+++ b/config/environments/config.rb.sample
@@ -52,6 +52,10 @@ LinkedData.config do |config|
config.repository_folder = REPOSITORY_FOLDER.to_s
# config.enable_notifications = false
+ # SPARQL logging
+ config.log_file = './sparql.log'
+ config.logging = false
+
config.interportal_hash = {
"agroportal" => {
"api" => "http://data.agroportal.lirmm.fr",
@@ -138,4 +142,4 @@ NcboCron.config do |config|
config.redis_host = REDIS_PERSISTENT_HOST.to_s
config.redis_port = REDIS_PORT.to_i
config.ontology_report_path = REPORT_PATH
-end
\ No newline at end of file
+end
diff --git a/config/environments/test.rb b/config/environments/test.rb
index 2cef531ad..ac9c104ea 100644
--- a/config/environments/test.rb
+++ b/config/environments/test.rb
@@ -102,5 +102,6 @@
NcboCron.config do |config|
config.redis_host = REDIS_PERSISTENT_HOST.to_s
config.redis_port = REDIS_PORT.to_i
+ config.graphs_counts_report_path = './test/ontologies_report.json'
# config.ontology_report_path = REPORT_PATH
end
diff --git a/config/logging.rb b/config/logging.rb
index e37ba4aa3..66c323a57 100644
--- a/config/logging.rb
+++ b/config/logging.rb
@@ -1,23 +1,9 @@
require 'logger'
-class CustomLogger < Logger
- alias write <<
- def flush
- ((self.instance_variable_get :@logdev).instance_variable_get :@dev).flush
- end
-end
-
-# Setup global logging
-require 'rack/logger'
-# if [:development, :console, :test].include?(settings.environment)
-if [:development, :console].include?(settings.environment)
- LOGGER = CustomLogger.new(STDOUT)
- LOGGER.level = Logger::DEBUG
-else
- Dir.mkdir('log') unless File.exist?('log')
- log = File.new("log/#{settings.environment}.log", "a+")
- log.sync = true
- LOGGER = CustomLogger.new(log)
- LOGGER.level = Logger::INFO
- use Rack::CommonLogger, log
+configure do
+ log_file = File.new("log/#{settings.environment}.log", 'a+')
+ log_file.sync = true
+ LOGGER = Logger.new(log_file)
+ LOGGER.level = settings.development? ? Logger::DEBUG : Logger::INFO
+ set :logger, LOGGER
end
diff --git a/controllers/admin_graphs_controller.rb b/controllers/admin_graphs_controller.rb
new file mode 100644
index 000000000..89f172630
--- /dev/null
+++ b/controllers/admin_graphs_controller.rb
@@ -0,0 +1,34 @@
+require 'ncbo_cron/graphs_counts'
+class AdminGraphsController < ApplicationController
+
+ namespace '/admin' do
+ GRAPH_COUNT_REPORT_PATH = NcboCron.settings.graphs_counts_report_path
+ before do
+ if LinkedData.settings.enable_security && (!env['REMOTE_USER'] || !env['REMOTE_USER'].admin?)
+ error 403, 'Access denied'
+ end
+ end
+
+ get '/graphs' do
+ output = NcboCron::GraphsCounts.new(nil, GRAPH_COUNT_REPORT_PATH).read_graph_counts
+ reply output
+ end
+
+ post '/graphs' do
+ generate_graphs_counts
+ reply({ message: 'Graph counts generated', status: 200 })
+ end
+
+ delete '/graphs' do
+ url = params['url']
+ error 400, 'You must provide a valid URL for the graph to delete' if url.blank?
+ Goo.sparql_data_client.delete_graph(url)
+ generate_graphs_counts
+ reply({ message: "Graph #{url} deleted", status: 200 })
+ end
+
+ def generate_graphs_counts
+ NcboCron::GraphsCounts.new(nil, GRAPH_COUNT_REPORT_PATH).run
+ end
+ end
+end
diff --git a/controllers/artefacts.rb b/controllers/artefacts.rb
new file mode 100644
index 000000000..348f43340
--- /dev/null
+++ b/controllers/artefacts.rb
@@ -0,0 +1,62 @@
+class ArtefactsController < ApplicationController
+
+ namespace "/artefacts" do
+ # Get all Semantic Artefacts
+ get do
+ check_last_modified_collection(LinkedData::Models::SemanticArtefact)
+ attributes, page, pagesize, _, _ = settings_params(LinkedData::Models::SemanticArtefact)
+ pagesize = 20 if params["pagesize"].nil?
+ artefacts = LinkedData::Models::SemanticArtefact.all_artefacts(attributes, page, pagesize)
+ reply artefacts
+ end
+
+ # Get one semantic artefact by ID
+ get "/:artefactID" do
+ artefact = LinkedData::Models::SemanticArtefact.find(params["artefactID"])
+ error 404, "You must provide a valid `artefactID` to retrieve an artefact" if artefact.nil?
+ check_last_modified(artefact)
+ artefact.bring(*LinkedData::Models::SemanticArtefact.goo_attrs_to_load(includes_param))
+ reply artefact
+ end
+
+ # Display latest distribution
+ get "/:artefactID/distributions/latest" do
+ artefact = LinkedData::Models::SemanticArtefact.find(params["artefactID"])
+ error 404, "You must provide a valid artefactID to retrieve an artefact" if artefact.nil?
+ include_status = params["include_status"] && !params["include_status"].empty? ? params["include_status"].to_sym : :any
+ latest_distribution = artefact.latest_distribution(status: include_status)
+
+ if latest_distribution
+ check_last_modified(latest_distribution)
+ latest_distribution.bring(*LinkedData::Models::SemanticArtefactDistribution.goo_attrs_to_load(includes_param))
+ end
+ reply latest_distribution
+ end
+
+ # Display a distribution
+ get '/:artefactID/distributions/:distributionID' do
+ artefact = LinkedData::Models::SemanticArtefact.find(params["artefactID"])
+ error 422, "Semantic Artefact #{params["artefactID"]} does not exist" unless artefact
+ check_last_modified_segment(LinkedData::Models::SemanticArtefactDistribution, [params["artefactID"]])
+ artefact_distribution = artefact.distribution(params["distributionID"])
+ error 404, "Distribuution with #{params['distributionID']} not found" if artefact_distribution.nil?
+ artefact_distribution.bring(*LinkedData::Models::SemanticArtefactDistribution.goo_attrs_to_load(includes_param))
+ reply artefact_distribution
+ end
+
+ # Display a distribution
+ get '/:artefactID/distributions' do
+ artefact = LinkedData::Models::SemanticArtefact.find(params["artefactID"])
+ error 404, "Semantic Artefact #{params["acronym"]} does not exist" unless artefact
+ check_last_modified_segment(LinkedData::Models::SemanticArtefactDistribution, [params["artefactID"]])
+ options = {
+ status: (params["include_status"] || "ANY"),
+ includes: LinkedData::Models::SemanticArtefactDistribution.goo_attrs_to_load([])
+ }
+ distros = artefact.all_distributions(options)
+ reply distros.sort {|a,b| b.distributionId.to_i <=> a.distributionId.to_i }
+ end
+
+ end
+
+end
\ No newline at end of file
diff --git a/controllers/batch_controller.rb b/controllers/batch_controller.rb
index 2ee9b88ca..33276cc8d 100644
--- a/controllers/batch_controller.rb
+++ b/controllers/batch_controller.rb
@@ -15,7 +15,7 @@ class BatchController < ApplicationController
goo_include = LinkedData::Models::Class.goo_attrs_to_load(incl)
class_id_by_ontology = {}
collection.each do |class_input|
- unless class_input.instance_of?(Hash)
+ unless class_input.is_a?(Hash)
error 422, "The collection param needs to be { 'class' : CLS_ID, 'ontology' : ont_id }"
end
unless class_input.include?("ontology") and class_input.include?("class")
diff --git a/controllers/categories_controller.rb b/controllers/categories_controller.rb
index 518c8e0f1..1306a7c3e 100644
--- a/controllers/categories_controller.rb
+++ b/controllers/categories_controller.rb
@@ -13,7 +13,8 @@ class CategoriesController < ApplicationController
# Display all categories
get do
check_last_modified_collection(LinkedData::Models::Category)
- categories = Category.where.include(Category.goo_attrs_to_load(includes_param)).to_a
+ categories = Category.where.include(*Category.goo_attrs_to_load(includes_param), ontologies: [:viewingRestriction]).to_a
+ categories = reject_private_ontologies(categories) unless current_user.admin?
reply categories
end
@@ -21,8 +22,9 @@ class CategoriesController < ApplicationController
get '/:acronym' do
check_last_modified_collection(LinkedData::Models::Category)
acronym = params["acronym"]
- category = Category.find(acronym).include(Category.goo_attrs_to_load(includes_param)).first
+ category = Category.find(acronym).include(*Category.goo_attrs_to_load(includes_param), ontologies: [:viewingRestriction]).first
error 404, "Category #{acronym} not found" if category.nil?
+ category = reject_private_ontologies([category]).first unless current_user.admin?
reply 200, category
end
@@ -82,5 +84,7 @@ def create_category
end
reply 201, category
end
+
+
end
end
\ No newline at end of file
diff --git a/controllers/groups_controller.rb b/controllers/groups_controller.rb
index 3e670fc39..e33b8b68a 100644
--- a/controllers/groups_controller.rb
+++ b/controllers/groups_controller.rb
@@ -13,7 +13,8 @@ class GroupsController < ApplicationController
# Display all groups
get do
check_last_modified_collection(LinkedData::Models::Group)
- groups = Group.where.include(Group.goo_attrs_to_load(includes_param)).to_a
+ groups = Group.where.include(*Group.goo_attrs_to_load(includes_param), ontologies: [:viewingRestriction]).to_a
+ groups = reject_private_ontologies(groups) unless current_user.admin?
reply groups
end
@@ -21,8 +22,9 @@ class GroupsController < ApplicationController
get '/:acronym' do
check_last_modified_collection(LinkedData::Models::Group)
acronym = params["acronym"]
- g = Group.find(acronym).include(Group.goo_attrs_to_load(includes_param)).first
+ g = Group.find(acronym).include(*Group.goo_attrs_to_load(includes_param), ontologies: [:viewingRestriction]).first
error 404, "Group #{acronym} not found" if g.nil?
+ g = reject_private_ontologies([g]).first unless current_user.admin?
reply 200, g
end
@@ -81,5 +83,7 @@ def create_group
end
reply 201, group
end
+
+
end
end
\ No newline at end of file
diff --git a/controllers/home_controller.rb b/controllers/home_controller.rb
index 29aa851c7..7f3280795 100644
--- a/controllers/home_controller.rb
+++ b/controllers/home_controller.rb
@@ -1,244 +1,89 @@
require 'haml'
-require 'redcarpet'
class HomeController < ApplicationController
CLASS_MAP = {
- Property: "LinkedData::Models::ObjectProperty"
+ Property: 'LinkedData::Models::ObjectProperty'
}
- namespace "/" do
+ namespace '/' do
get do
expires 3600, :public
last_modified @@root_last_modified ||= Time.now.httpdate
routes = routes_list
- #TODO: delete when ccv will be on production
- routes.delete("/ccv")
- if LinkedData.settings.enable_resource_index == false
- routes.delete("/resource_index")
- end
+ # TODO: delete when ccv will be on production
+ routes.delete('/ccv')
+
+ routes.delete('/resource_index') if LinkedData.settings.enable_resource_index == false
routes.delete('/Agents')
routes_hash = {}
context = {}
+
routes.each do |route|
- next if route.length < 3 || route.split("/").length > 2
- route_no_slash = route.gsub("/", "")
- context[route_no_slash] = route_to_class_map[route].type_uri.to_s if route_to_class_map[route] && route_to_class_map[route].respond_to?(:type_uri)
+ next unless routes_by_class.key?(route)
+
+ route_no_slash = route.gsub('/', '')
+ context[route_no_slash] = routes_by_class[route].type_uri.to_s if routes_by_class[route].respond_to?(:type_uri)
routes_hash[route_no_slash] = LinkedData.settings.rest_url_prefix + route_no_slash
end
- config = LinkedData::Models::PortalConfig.current_portal_config
-
- federated_portals = config.federated_portals
- federated_portals. transform_values! { |v| v.delete(:apikey) ; v }
- config.init_federated_portals_settings(federated_portals)
- config.id = RDF::URI.new(LinkedData.settings.id_url_prefix)
- config.class.link_to *routes_hash.map { |key, url| LinkedData::Hypermedia::Link.new(key, url, context[key]) }
-
- reply config
- end
-
- get "documentation" do
- @metadata_all = metadata_all.sort { |a, b| a[0].name <=> b[0].name }
- haml "documentation/documentation".to_sym, :layout => "documentation/layout".to_sym
- end
-
- get "metadata/:class" do
- @metadata = metadata(params["class"])
- haml "documentation/metadata".to_sym, :layout => "documentation/layout".to_sym
- end
-
- def resource_collection_link(cls)
- resource = @metadata[:cls].name.split("::").last
- return "" if resource.nil?
-
- resource_path = "/" + resource.underscore.pluralize
-
- case
- when resource == "Class"
- "Example: "\
- ""\
- "/ontologies/SNOMEDCT/classes/http%3A%2F%2Fpurl.bioontology.org%2Fontology%2FSNOMEDCT%2F410607006"
- when resource == "Instance"
- "Example: "\
- ""\
- "/ontologies/CTX/classes/http%3A%2F%2Fwww.owl-ontologies.com%2FOntologyXCT.owl%23Eyelid/instances"
- when resource == "Mapping"
- "Example: "\
- ""\
- "/ontologies/SNOMEDCT/classes/http%3A%2F%2Fpurl.bioontology.org%2Fontology%2FSNOMEDCT%2F410607006/mappings"
- when resource == "Note"
- "Example: /ontologies/NCIT/notes"
- when resource == "OntologySubmission"
- "Example: "\
- ""\
- "/ontologies/NCIT/submissions?display=submissionId,version"
- when (routes_list().include? resource_path) == false
- "Example: coming soon"
- else
- "Resource collection: #{resource_path}"
+ catalog_class = LinkedData::Models::SemanticArtefactCatalog
+ catalog = catalog_class.all.first || create_catalog
+ attributes_to_include = includes_param[0] == :all ? catalog_class.attributes(:all) : catalog_class.goo_attrs_to_load(includes_param)
+ catalog.bring(*attributes_to_include)
+ if catalog.loaded_attributes.include?(:federated_portals)
+ catalog.federated_portals = catalog.federated_portals.map { |item| JSON.parse(item.gsub('=>', ':').gsub('\"', '"')) }
+ catalog.federated_portals.each { |item| item.delete('apikey') }
end
- end
-
- def metadata(cls)
- unless cls.is_a?(Class)
- cls = cls.singularize
- cls = LinkedData::Models.const_get(cls)
+ if catalog.loaded_attributes.include?(:fundedBy)
+ catalog.fundedBy = catalog.fundedBy.map { |item| JSON.parse(item.gsub('=>', ':').gsub('\"', '"')) }
end
- metadata_all[cls]
+ catalog.class.link_to *routes_hash.map { |key, url| LinkedData::Hypermedia::Link.new(key, url, context[key]) }
+
+ reply catalog
end
- def sample_objects
- ontology = LinkedData::Models::Ontology.read_only(id: LinkedData.settings.rest_url_prefix+"/ontologies/BRO", acronym: "BRO")
- submission = LinkedData::Models::OntologySubmission.read_only(id: LinkedData.settings.rest_url_prefix+"/ontologies/BRO/submissions/1", ontology: ontology)
- cls = LinkedData::Models::Class.read_only(id: "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Ontology_Development_and_Management", submission: submission)
- return {
- LinkedData::Models::Ontology.type_uri => ontology,
- LinkedData::Models::Class.type_uri => cls
- }
- end
-
- def metadata_all
- return @metadata_all_info if @metadata_all_info
- ld_classes = ObjectSpace.each_object(Class).select { |klass| klass < LinkedData::Hypermedia::Resource }
- info = {}
- ld_classes.each do |cls|
- next if routes_by_class[cls].nil? || routes_by_class[cls].empty?
- if cls.respond_to?(:attributes)
- attributes = (cls.attributes(:all) + cls.hypermedia_settings[:serialize_methods]).uniq
- else
- attributes = cls.instance_methods(false)
- end
- attributes_info = {}
- attributes.each do |attribute|
- next if cls.hypermedia_settings[:serialize_never].include?(attribute)
-
- if cls.ancestors.include?(LinkedData::Models::Base)
- model_cls = cls.range(attribute)
- if model_cls
- type = model_cls.type_uri if model_cls.respond_to?("type_uri")
- end
-
- shows_default = cls.hypermedia_settings[:serialize_default].empty? ? true : cls.hypermedia_settings[:serialize_default].include?(attribute)
-
- schema = cls.attribute_settings(attribute) rescue nil
- schema ||= {}
- attributes_info[attribute] = {
- type: type || "",
- shows_default: shows_default || " ",
- unique: cls.unique?(attribute) || " ",
- required: cls.required?(attribute) || " ",
- list: cls.list?(attribute) || " ",
- cardinality: (cls.cardinality(attribute) rescue nil) || " "
- }
- else
- attributes_info[attribute] = {
- type: "",
- shows_default: " ",
- unique: " ",
- required: " ",
- list: " ",
- cardinality: " "
- }
- end
- end
-
- cls_info = {
- attributes: attributes_info,
- uri: cls.type_uri,
- cls: cls
- }
-
- info[cls] = cls_info
- end
-
- # Sort by 'shown by default'
- info.each do |cls, cls_props|
- shown = {}
- not_shown = {}
- cls_props[:attributes].each {|attr,values| values[:shows_default] ? shown[attr] = values : not_shown[attr] = values}
- cls_props[:attributes] = shown.merge(not_shown)
+ patch do
+ catalog = LinkedData::Models::SemanticArtefactCatalog.where.first
+ error 422, "There is no catalog configs in the triple store" if catalog.nil?
+ populate_from_params(catalog, params)
+ if catalog.valid?
+ catalog.save
+ status 200
+ else
+ error 422, catalog.errors
end
-
- @metadata_all_info = info
- info
end
- def hypermedia_links(cls)
- cls.hypermedia_settings[:link_to]
+ get "doc/api" do
+ redirect "/documentation", 301
end
- def routes_by_class
- return @routes_by_class if @routes_by_class
- all_routes = Sinatra::Application.routes
- routes_by_file = {}
- all_routes.each do |method, routes|
- routes.each do |route|
- routes_by_file[route.file] ||= []
- routes_by_file[route.file] << route
- end
- end
- routes_by_class = {}
- routes_by_file.each do |file, routes|
- cls_name = file.split("/").last.gsub(".rb", "").classify.gsub("Controller", "").singularize
- cls = LinkedData::Models.const_get(cls_name) rescue nil
-
- # Check sub-modules for classes (IE LinkedData::Models::Notes for LinkedData::Models::Notes::Reply)
- if cls.nil?
- LinkedData::Models.constants.each do |const|
- sub_cls = LinkedData::Models.const_get(const).const_get(cls_name) rescue nil
- cls = sub_cls unless sub_cls.nil?
- end
- end
-
- # Check the map of NON-ONE-TO-ONE mappings
- if cls.nil?
- if CLASS_MAP.include?(cls_name.to_sym)
- cls = CLASS_MAP[cls_name.to_sym].constantize
- end
- end
-
- next if cls.nil?
-
- routes.each do |route|
- next if route.verb == "HEAD"
- routes_by_class[cls] ||= []
- routes_by_class[cls] << [route.verb, route.path]
- end
- end
- @routes_by_class = routes_by_class
- routes_by_class
+ get "documentation" do
+ @metadata_all = get_metadata_all.sort { |a, b| a[0].name <=> b[0].name }
+ haml "documentation/documentation".to_sym, :layout => "documentation/layout".to_sym
end
- def route_to_class_map
- return @route_to_class_map if @route_to_class_map
- map = {}
- routes_by_class.each do |cls, routes|
- routes.each do |route|
- map[route[1]] = cls
- end
- end
- @route_to_class_map = map
- map
- end
+ private
- def routes_list
- return @navigable_routes if @navigable_routes
- routes = Sinatra::Application.routes["GET"]
- navigable_routes = []
- Sinatra::Application.each_route do |route|
- if route.verb.eql?("GET")
- navigable_routes << route.path.split("?").first
- end
+ def create_catalog
+ catalog = nil
+ catalogs = LinkedData::Models::SemanticArtefactCatalog.all
+ if catalogs.nil? || catalogs.empty?
+ catalog = instance_from_params(LinkedData::Models::SemanticArtefactCatalog, {"test_attr_to_persist" => "test_to_persist"})
+ if catalog.valid?
+ catalog.save
+ else
+ error 422, catalog.errors
+ end
end
- @navigable_routes = navigable_routes
- navigable_routes
- end
+ catalog
+ end
+
end
end
-
diff --git a/controllers/logging_controller.rb b/controllers/logging_controller.rb
new file mode 100644
index 000000000..09ee23473
--- /dev/null
+++ b/controllers/logging_controller.rb
@@ -0,0 +1,40 @@
+require 'multi_json'
+
+module Admin
+
+ class LoggingController < ApplicationController
+
+ namespace "/admin" do
+ before {
+ if LinkedData.settings.enable_security && (!env["REMOTE_USER"] || !env["REMOTE_USER"].admin?)
+ error 403, "Access denied"
+ end
+ }
+
+ get '/latest_day_query_logs' do
+ logs = Goo.logger.get_logs
+ reply 200, paginate_logs(logs)
+ end
+
+ get '/last_n_s_query_logs' do
+ sec = params[:seconds] || 10
+ logs = Goo.logger.queries_last_n_seconds(sec.to_i)
+ reply 200, paginate_logs(logs)
+ end
+
+ get '/user_query_count' do
+ counts = Goo.logger.users_query_count
+ reply 200, counts
+ end
+
+ def paginate_logs(logs)
+ page, size = page_params
+ start = (page - 1) * size
+ page_end = [start + size - 1, logs.size].min
+ page_logs = logs[start..page_end] || []
+ page_object(page_logs, logs.size)
+ end
+
+ end
+ end
+end
diff --git a/controllers/metrics_controller.rb b/controllers/metrics_controller.rb
index cfc476bf6..98512bd15 100644
--- a/controllers/metrics_controller.rb
+++ b/controllers/metrics_controller.rb
@@ -4,31 +4,10 @@ class MetricsController < ApplicationController
# Display all metrics
get do
check_last_modified_collection(LinkedData::Models::Metric)
- submissions = retrieve_latest_submissions(params)
- submissions = submissions.values
-
- metrics_include = LinkedData::Models::Metric.goo_attrs_to_load(includes_param)
- LinkedData::Models::OntologySubmission.where.models(submissions)
- .include(metrics: metrics_include).all
-
- #just a fallback or metrics that are not really built.
- to_remove = []
- submissions.each do |x|
- if x.metrics
- begin
- x.metrics.submission
- rescue
- LOGGER.error("submission with inconsistent metrics #{x.id.to_s}")
- to_remove << x
- end
- end
- end
- to_remove.each do |x|
- submissions.delete x
- end
- #end fallback
-
- reply submissions.select { |s| !s.metrics.nil? }.map { |s| s.metrics }
+ latest_metrics = LinkedData::Models::Metric.where.include(LinkedData::Models::Metric.goo_attrs_to_load(includes_param)).all
+ .group_by { |x| x.id.split('/')[-4] }
+ .transform_values { |metrics| metrics.max_by { |x| x.id.split('/')[-2].to_i } }
+ reply latest_metrics.values
end
#
@@ -84,33 +63,23 @@ class MetricsController < ApplicationController
# Display metrics for ontology
get "/ontologies/:ontology/metrics" do
check_last_modified_collection(LinkedData::Models::Metric)
- ont, sub = get_ontology_and_submission
+ ont = Ontology.find(params['ontology']).first
error 404, "Ontology #{params['ontology']} not found" unless ont
- sub.bring(ontology: [:acronym], metrics: LinkedData::Models::Metric.goo_attrs_to_load(includes_param))
- reply sub.metrics || {}
- # ont_str = ""
- # LinkedData::Models::Ontology.all.each do |ont|
- # begin
- # sub = ont.latest_submission(status: :rdf)
- # sub.bring(ontology: [:acronym], metrics: LinkedData::Models::Metric.goo_attrs_to_load(includes_param))
- # if !sub.metrics
- # ont_str << "#{ont.acronym},"
- # puts ont_str
- # end
- # rescue Exception => e
- # puts "#{ont.acronym}: #{e.message}"
- # end
- # end
- # puts ont_str
- # reply {}
+ ontology_metrics = LinkedData::Models::Metric
+ .where(submission: {ontology: [acronym: params['ontology']]})
+ .order_by(submission: {submissionId: :desc})
+ .include(LinkedData::Models::Metric.goo_attrs_to_load(includes_param)).first
+ reply ontology_metrics || {}
end
get "/ontologies/:ontology/submissions/:ontology_submission_id/metrics" do
check_last_modified_collection(LinkedData::Models::Metric)
- ont, sub = get_ontology_and_submission
+ ont = Ontology.find(params['ontology']).first
error 404, "Ontology #{params['ontology']} not found" unless ont
- sub.bring(ontology: [:acronym], metrics: LinkedData::Models::Metric.goo_attrs_to_load(includes_param))
- reply sub.metrics || {}
+ ontology_submission_metrics = LinkedData::Models::Metric
+ .where(submission: { submissionId: params['ontology_submission_id'].to_i, ontology: [acronym: params['ontology']] })
+ .include(LinkedData::Models::Metric.goo_attrs_to_load(includes_param)).first
+ reply ontology_submission_metrics || {}
end
diff --git a/controllers/notes_controller.rb b/controllers/notes_controller.rb
index d0ca83f88..cecca1f69 100644
--- a/controllers/notes_controller.rb
+++ b/controllers/notes_controller.rb
@@ -1,7 +1,7 @@
class NotesController < ApplicationController
##
# Ontology notes
- get "/ontologies/:ontology/notes?:include_threads?" do
+ get '/ontologies/:ontology/notes' do
ont = Ontology.find(params["ontology"]).include(:acronym).first
error 404, "You must provide a valid id to retrieve notes for an ontology" if ont.nil?
check_last_modified_segment(LinkedData::Models::Note, [ont.acronym])
@@ -13,7 +13,7 @@ class NotesController < ApplicationController
##
# Class notes
- get "/ontologies/:ontology/classes/:cls/notes?:include_threads?" do
+ get "/ontologies/:ontology/classes/:cls/notes" do
ont = Ontology.find(params["ontology"]).include(:submissions, :acronym).first
error 404, "You must provide a valid id to retrieve notes for an ontology" if ont.nil?
check_last_modified_segment(LinkedData::Models::Note, [ont.acronym])
@@ -27,7 +27,7 @@ class NotesController < ApplicationController
namespace "/notes" do
# Display all notes
- get "?:include_threads?" do
+ get '' do
check_last_modified_collection(LinkedData::Models::Note)
notes = LinkedData::Models::Note.where.include(LinkedData::Models::Note.goo_attrs_to_load(includes_param)).to_a
recurse_replies(notes) if params["include_threads"]
@@ -35,7 +35,7 @@ class NotesController < ApplicationController
end
# Display a single note
- get '/:noteid?:include_threads?' do
+ get '/:noteid' do
noteid = params["noteid"]
note = LinkedData::Models::Note.find(noteid).include(relatedOntology: [:acronym]).first
error 404, "Note #{noteid} not found" if note.nil?
@@ -121,4 +121,4 @@ def clean_notes_hash(hash)
hash
end
end
-end
\ No newline at end of file
+end
diff --git a/controllers/ontology_analytics_controller.rb b/controllers/ontology_analytics_controller.rb
index 8ecd77d55..81218b84e 100644
--- a/controllers/ontology_analytics_controller.rb
+++ b/controllers/ontology_analytics_controller.rb
@@ -4,14 +4,14 @@ class OntologyAnalyticsController < ApplicationController
##
# get all ontology analytics for a given year/month combination
- namespace "/analytics" do
+ namespace '/analytics' do
get do
expires 86400, :public
year = year_param(params)
- error 400, "The year you supplied is invalid. Valid years start with 2 and contain 4 digits." if params["year"] && !year
+ error 400, 'The year you supplied is invalid. Valid years start with 2 and contain 4 digits.' if params['year'] && !year
month = month_param(params)
- error 400, "The month you supplied is invalid. Valid months are 1-12." if params["month"] && !month
+ error 400, 'The month you supplied is invalid. Valid months are 1-12.' if params['month'] && !month
acronyms = restricted_ontologies_to_acronyms(params)
analytics = Ontology.analytics(year, month, acronyms)
@@ -22,32 +22,31 @@ class OntologyAnalyticsController < ApplicationController
##
# get all analytics for a given ontology
- namespace "/ontologies/:acronym/analytics" do
+ namespace '/ontologies/:acronym/analytics' do
get do
expires 86400, :public
- ont = Ontology.find(params["acronym"]).first
+ ont = Ontology.find(params['acronym']).first
error 404, "No ontology exists with the acronym: #{params["acronym"]}" if ont.nil?
analytics = ont.analytics
- if params["format"].to_s.downcase.eql?("csv")
+ if params['format'].to_s.downcase.eql?('csv')
tf = Tempfile.new("analytics-#{params['acronym']}")
csv = CSV.new(tf, headers: true, return_headers: true, write_headers: true)
csv << [:month, :visits]
- years = analytics[params["acronym"]].keys.sort
+ years = analytics[params['acronym']].keys.sort
now = Time.now
years.each do |year|
- months = analytics[params["acronym"]][year].keys.sort
+ months = analytics[params['acronym']][year].keys.sort
months.each do |month|
next if now.year == year && now.month <= month || (year == 2013 && month < 10) # we don't have good data going back past Oct 2013
- visits = analytics[params["acronym"]][year][month]
- month = DateTime.parse("#{year}/#{month}").strftime("%b %Y")
+ visits = analytics[params['acronym']][year][month]
+ month = DateTime.parse("#{year}/#{month}").strftime('%b %Y')
csv << [month, visits]
end
end
csv.close
- content_type "text/csv"
- send_file tf.path, filename: "analytics-#{params['acronym']}.csv"
+ send_file tf.path, filename: "analytics-#{params['acronym']}.csv", type: 'text/csv', status: 200
else
reply analytics
end
diff --git a/controllers/replies_controller.rb b/controllers/replies_controller.rb
index 081238a17..9ee0fbd61 100644
--- a/controllers/replies_controller.rb
+++ b/controllers/replies_controller.rb
@@ -12,7 +12,7 @@ class RepliesController < ApplicationController
namespace "/replies" do
# Display all replies
- get "?:include_threads?" do
+ get "" do
check_last_modified_collection(LinkedData::Models::Notes::Reply)
replies = LinkedData::Models::Notes::Reply.where.include(LinkedData::Models::Notes::Reply.goo_attrs_to_load(includes_param)).to_a
reply replies
@@ -82,4 +82,4 @@ class RepliesController < ApplicationController
halt 204
end
end
-end
\ No newline at end of file
+end
diff --git a/controllers/search_controller.rb b/controllers/search_controller.rb
index ce34d51d9..682bd7bf7 100644
--- a/controllers/search_controller.rb
+++ b/controllers/search_controller.rb
@@ -184,7 +184,7 @@ class SearchController < ApplicationController
def search(model, query, params = {})
query = query.blank? ? "*" : query
- resp = model.search(query, search_params(params))
+ resp = model.search(query, search_params(**params))
total_found = resp["response"]["numFound"]
docs = resp["response"]["docs"]
diff --git a/controllers/submission_metadata_controller.rb b/controllers/submission_metadata_controller.rb
index db6fbb78c..7007f837c 100644
--- a/controllers/submission_metadata_controller.rb
+++ b/controllers/submission_metadata_controller.rb
@@ -13,4 +13,7 @@ class SubmissionMetadataController < ApplicationController
reply klass_metadata(LinkedData::Models::Ontology, "ontology_metadata")
end
+ get "/catalog_metadata" do
+ reply klass_metadata(LinkedData::Models::SemanticArtefactCatalog, "catalog_metadata")
+ end
end
\ No newline at end of file
diff --git a/helpers/application_helper.rb b/helpers/application_helper.rb
index 51bd4f08d..c65541410 100644
--- a/helpers/application_helper.rb
+++ b/helpers/application_helper.rb
@@ -8,13 +8,13 @@ module ApplicationHelper
##
# Escape text for use in html
def h(text)
- Rack::Utils.escape_html(text)
+ Rack::Utils.escape_html(text).gsub('/', '/')
end
##
# Populate +obj+ using values from +params+
# Will also try to find related objects using a Goo lookup.
- # TODO: Currerntly, this allows for mass-assignment of everything, which will permit
+ # TODO: Currently, this allows for mass-assignment of everything, which will permit
# users to overwrite any attribute, including things like passwords.
def populate_from_params(obj, params)
return if obj.nil?
@@ -23,7 +23,7 @@ def populate_from_params(obj, params)
if obj.is_a?(LinkedData::Models::Base)
obj.bring_remaining if obj.exist?
no_writable_attributes = obj.class.attributes(:all) - obj.class.attributes
- params = params.reject {|k,v| no_writable_attributes.include? k.to_sym}
+ params = params.reject { |k, v| no_writable_attributes.include? k.to_sym }
end
params.each do |attribute, value|
next if value.nil?
@@ -63,7 +63,7 @@ def populate_from_params(obj, params)
elsif attr_cls && not_hash_or_array || (attr_cls && not_array_of_hashes)
# Replace the initial value with the object, handling Arrays as appropriate
if value.is_a?(Array)
- value = value.map {|e| attr_cls.find(uri_as_needed(e)).include(attr_cls.attributes).first}
+ value = value.map { |e| attr_cls.find(uri_as_needed(e)).include(attr_cls.attributes).first }
elsif !value.nil?
value = attr_cls.find(uri_as_needed(value)).include(attr_cls.attributes).first
end
@@ -72,6 +72,7 @@ def populate_from_params(obj, params)
if value.is_a?(Array)
retrieved_values = []
value.each do |e|
+ e = e.to_h
retrieved_value = attr_cls.where(e.symbolize_keys).first
if retrieved_value
retrieved_values << retrieved_value
@@ -80,7 +81,7 @@ def populate_from_params(obj, params)
end
end
else
- retrieved_values = attr_cls.where(value.symbolize_keys).to_a
+ retrieved_values = attr_cls.where(value.to_h.symbolize_keys).to_a
unless retrieved_values
retrieved_values = populate_from_params(attr_cls.new, e.symbolize_keys).save
end
@@ -89,7 +90,7 @@ def populate_from_params(obj, params)
elsif attribute_settings && attribute_settings[:enforce] && attribute_settings[:enforce].include?(:date_time)
# TODO: Remove this awful hack when obj.class.model_settings[:range][attribute] contains DateTime class
is_array = value.is_a?(Array)
- value = Array(value).map{ |v| DateTime.parse(v) }
+ value = Array(value).map { |v| DateTime.parse(v) }
value = value.first unless is_array
value
elsif attribute_settings && attribute_settings[:enforce] && attribute_settings[:enforce].include?(:uri) && attribute_settings[:enforce].include?(:list)
@@ -157,9 +158,19 @@ def halt(*response)
status = obj
obj = nil
end
- status, obj = response.first, response.last if response.length == 2
- status, headers, obj = response.first, response[1], response.last if response.length == 3
- if obj.is_a?(Rack::File) # Avoid the serializer when returning files
+
+ if response.length == 2
+ status = response.first
+ obj = response.last
+ end
+
+ if response.length == 3
+ status = response.first
+ headers = response[1]
+ obj = response.last
+ end
+
+ if obj.is_a?(Rack::Files) || obj.is_a?(Rack::Files::Iterator) # Avoid the serializer when returning files
super(response)
else
super(LinkedData::Serializer.build_response(@env, status: status, headers: headers, ld_object: obj))
@@ -184,7 +195,7 @@ def error(*message)
# Look for the includes parameter and provide a formatted list of attributes
def includes_param
if @params["display"]
- return @params["display"].split(",").map {|e| e.to_sym}
+ return @params["display"].split(",").map { |e| e.to_sym }
end
Array.new
end
@@ -192,14 +203,14 @@ def includes_param
##
# Look for the ontologies acronym and give back a formatted list of ontolody id uris
# This can be called without passing an argument and it will use the values from the current request
- def ontologies_param(params=nil)
+ def ontologies_param(params = nil)
params ||= @params
if params["ontologies"]
# Get list
- ontologies = params["ontologies"].split(",").map {|o| o.strip}
+ ontologies = params["ontologies"].split(",").map { |o| o.strip }
# When they aren't URIs, make them URIs
- ontologies.map! {|o| o.start_with?("http://") ? replace_url_prefix(o) : ontology_uri_from_acronym(o)}
+ ontologies.map! { |o| o.start_with?("http://") ? replace_url_prefix(o) : ontology_uri_from_acronym(o) }
if ontologies.include? nil
error 404, "The ontologies parameter `[#{params["ontologies"]}]` includes non-existent acronyms. Notice that acronyms are case sensitive."
end
@@ -208,7 +219,7 @@ def ontologies_param(params=nil)
Array.new
end
- def restricted_ontologies(params=nil)
+ def restricted_ontologies(params = nil)
params ||= @params
found_onts = false
@@ -237,23 +248,23 @@ def restricted_ontologies(params=nil)
return onts
end
- def restricted_ontologies_to_acronyms(params=nil, onts=nil)
+ def restricted_ontologies_to_acronyms(params = nil, onts = nil)
onts ||= restricted_ontologies(params)
- return onts.map {|o| o.acronym }
+ return onts.map { |o| o.acronym }
end
- def ontologies_param_to_acronyms(params=nil)
+ def ontologies_param_to_acronyms(params = nil)
ontResourceIds = ontologies_param(params)
- return ontResourceIds.map { |ontResourceId| ontResourceId.to_s.split('/')[-1]}
+ return ontResourceIds.map { |ontResourceId| ontResourceId.to_s.split('/')[-1] }
end
##
# Get semantic types parameter in the form [semantic_types=T099,T085,T345]
- def semantic_types_param(params=nil)
+ def semantic_types_param(params = nil)
params ||= @params
if params["semantic_types"]
- semanticTypes = params["semantic_types"].split(",").map {|o| o.strip}
+ semanticTypes = params["semantic_types"].split(",").map { |o| o.strip }
return semanticTypes
end
Array.new
@@ -261,21 +272,21 @@ def semantic_types_param(params=nil)
##
# Get cui parameter in the form [cui=C0302369,C0522224,C0176617]
- def cui_param(params=nil)
+ def cui_param(params = nil)
params ||= @params
if params["cui"]
- cui = params["cui"].split(",").map {|o| o.strip}
+ cui = params["cui"].split(",").map { |o| o.strip }
return cui
end
Array.new
end
# validates month for 1-12 or 01-09
- def month_param(params=nil)
+ def month_param(params = nil)
params ||= @params
if params["month"]
month = params["month"].strip
- if %r{(?^(0[1-9]|[1-9]|1[0-2])$)}x === month
+ if /(?^(0[1-9]|[1-9]|1[0-2])$)/x === month
return month.to_i.to_s
end
end
@@ -283,11 +294,11 @@ def month_param(params=nil)
end
# validates year for starting with 1 or 2 and containing 4 digits
- def year_param(params=nil)
+ def year_param(params = nil)
params ||= @params
if params["year"]
year = params["year"].strip
- if %r{(?^([1-2]\d{3})$)}x === year
+ if /(?^([1-2]\d{3})$)/x === year
return year.to_i.to_s
end
end
@@ -327,14 +338,14 @@ def ontology_from_acronym(acronym)
def ontology_objects_from_params(params = nil)
ontologies = Set.new(ontologies_param(params))
all_onts = LinkedData::Models::Ontology.where.include(LinkedData::Models::Ontology.goo_attrs_to_load).to_a
- all_onts.select {|o| ontologies.include?(o.id.to_s)}
+ all_onts.select { |o| ontologies.include?(o.id.to_s) }
end
def ontology_uri_acronym_map
cached_map = naive_expiring_cache_read(__method__)
return cached_map if cached_map
map = {}
- LinkedData::Models::Ontology.where.include(:acronym).all.each {|o| map[o.acronym] = o.id.to_s}
+ LinkedData::Models::Ontology.where.include(:acronym).all.each { |o| map[o.acronym] = o.id.to_s }
naive_expiring_cache_write(__method__, map)
map
end
@@ -343,7 +354,7 @@ def acronym_ontology_uri_map
cached_map = naive_expiring_cache_read(__method__)
return cached_map if cached_map
map = {}
- LinkedData::Models::Ontology.where.include(:acronym).all.each {|o| map[o.id.to_s] = o.acronym}
+ LinkedData::Models::Ontology.where.include(:acronym).all.each { |o| map[o.id.to_s] = o.acronym }
naive_expiring_cache_write(__method__, map)
map
end
@@ -381,10 +392,10 @@ def retrieve_latest_submissions(options = {})
def get_ontology_and_submission
ont = Ontology.find(@params["ontology"])
- .include(:acronym, :administeredBy, :acl, :viewingRestriction)
- .include(submissions:
- [:submissionId, submissionStatus: [:code], ontology: [:acronym], metrics: :classes])
- .first
+ .include(:acronym, :administeredBy, :acl, :viewingRestriction)
+ .include(submissions:
+ [:submissionId, submissionStatus: [:code], ontology: [:acronym], metrics: :classes])
+ .first
error(404, "Ontology '#{@params["ontology"]}' not found.") if ont.nil?
check_access(ont) if LinkedData.settings.enable_security # Security check
submission = nil
@@ -392,7 +403,7 @@ def get_ontology_and_submission
submission = ont.submission(@params[:ontology_submission_id])
if submission.nil?
error 404,
- "You must provide an existing submission ID for the #{@params["acronym"]} ontology"
+ "You must provide an existing submission ID for the #{@params["acronym"]} ontology"
end
else
submission = ont.latest_submission(status: [:RDF])
@@ -418,28 +429,29 @@ def include_param_contains?(str)
return class_params_include || params_include
end
-
##
# Checks to see if the request has a file attached
def request_has_file?
- @params.any? {|p,v| v.instance_of?(Hash) && v.key?(:tempfile) && v[:tempfile].instance_of?(Tempfile)}
+ @params.any? { |p, v| v.instance_of?(Hash) && v.key?(:tempfile) && v[:tempfile].instance_of?(Tempfile) }
end
##
# Looks for a file that was included as a multipart in a request
def file_from_request
- @params.each do |param, value|
- if value.instance_of?(Hash) && value.has_key?(:tempfile) && value[:tempfile].instance_of?(Tempfile)
+ @params.each_value do |value|
+ if value.is_a?(Hash) && value.key?(:tempfile) && value[:tempfile].instance_of?(Tempfile)
return value[:filename], value[:tempfile]
end
end
- return nil, nil
+
+ [nil, nil]
end
+
private
def naive_expiring_cache_write(key, object, timeout = 60)
@naive_expiring_cache ||= {}
- @naive_expiring_cache[key] = {timeout: Time.now + timeout, object: object}
+ @naive_expiring_cache[key] = { timeout: Time.now + timeout, object: object }
end
def naive_expiring_cache_read(key)
@@ -450,7 +462,6 @@ def naive_expiring_cache_read(key)
return object[:object]
end
-
def save_submission_language(submission, language_property = :naturalLanguage)
request_lang = RequestStore.store[:requested_lang]
@@ -463,7 +474,7 @@ def save_submission_language(submission, language_property = :naturalLanguage)
collection_natural_language = collection_natural_language.values.flatten if collection_natural_language.is_a?(Hash)
submissions_language = collection_natural_language.map { |natural_language| natural_language.to_s.split('/').last[0..1] }.compact.first
- RequestStore.store[:requested_lang] = submissions_language if submissions_language
+ RequestStore.store[:requested_lang] = submissions_language if submissions_language
end
end
diff --git a/helpers/home_helper.rb b/helpers/home_helper.rb
new file mode 100644
index 000000000..c794dc281
--- /dev/null
+++ b/helpers/home_helper.rb
@@ -0,0 +1,168 @@
+require 'sinatra/base'
+
+module Sinatra
+ module Helpers
+
+ module HomeHelper
+
+ def routes_list
+ return @navigable_routes if @navigable_routes
+
+ routes = Sinatra::Application.routes['GET']
+ navigable_routes = []
+ routes.each do |route|
+ navigable_routes << route[0].to_s.split('?').first
+ end
+ @navigable_routes = navigable_routes
+ navigable_routes
+ end
+
+ def routes_by_class
+ {
+ '/agents' => LinkedData::Models::Agent,
+ '/annotator' => nil,
+ '/categories' => LinkedData::Models::Category,
+ '/groups' => LinkedData::Models::Group,
+ '/documentation' => nil,
+ '/mappings' => LinkedData::Models::Mapping,
+ '/metrics' => LinkedData::Models::Metric,
+ '/notes' => LinkedData::Models::Note,
+ '/ontologies' => LinkedData::Models::Ontology,
+ '/ontologies_full' => LinkedData::Models::Ontology,
+ '/analytics' => nil,
+ '/submissions' => LinkedData::Models::OntologySubmission,
+ '/projects' => LinkedData::Models::Project,
+ '/property_search' => nil,
+ '/provisional_classes' => LinkedData::Models::ProvisionalClass,
+ '/provisional_relations' => LinkedData::Models::ProvisionalRelation,
+ '/recommender' => nil,
+ '/replies' => LinkedData::Models::Notes::Reply,
+ '/reviews' => LinkedData::Models::Review,
+ '/search' => nil,
+ '/slices' => LinkedData::Models::Slice,
+ '/submission_metadata' => nil,
+ '/ontology_metadata' => nil,
+ '/users' => LinkedData::Models::User
+ }
+ end
+
+ def resource_collection_link(cls)
+ resource = @metadata[:cls].name.split("::").last
+ return "" if resource.nil?
+
+ resource_path = "/" + resource.underscore.pluralize
+
+ case
+ when resource == "Class"
+ "Example: "\
+ ""\
+ "/ontologies/SNOMEDCT/classes/http%3A%2F%2Fpurl.bioontology.org%2Fontology%2FSNOMEDCT%2F410607006"
+ when resource == "Instance"
+ "Example: "\
+ ""\
+ "/ontologies/CTX/classes/http%3A%2F%2Fwww.owl-ontologies.com%2FOntologyXCT.owl%23Eyelid/instances"
+ when resource == "Mapping"
+ "Example: "\
+ ""\
+ "/ontologies/SNOMEDCT/classes/http%3A%2F%2Fpurl.bioontology.org%2Fontology%2FSNOMEDCT%2F410607006/mappings"
+ when resource == "Note"
+ "Example: /ontologies/NCIT/notes"
+ when resource == "OntologySubmission"
+ "Example: "\
+ ""\
+ "/ontologies/NCIT/submissions?display=submissionId,version"
+ when (routes_list().include? resource_path) == false
+ "Example: coming soon"
+ else
+ "Resource collection: #{resource_path}"
+ end
+ end
+
+
+ def sample_objects
+ ontology = LinkedData::Models::Ontology.read_only(id: LinkedData.settings.rest_url_prefix+"/ontologies/BRO", acronym: "BRO")
+ submission = LinkedData::Models::OntologySubmission.read_only(id: LinkedData.settings.rest_url_prefix+"/ontologies/BRO/submissions/1", ontology: ontology)
+ cls = LinkedData::Models::Class.read_only(id: "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Ontology_Development_and_Management", submission: submission)
+ return {
+ LinkedData::Models::Ontology.type_uri => ontology,
+ LinkedData::Models::Class.type_uri => cls
+ }
+ end
+
+
+ def hypermedia_links(cls)
+ cls.hypermedia_settings[:link_to]
+ end
+
+ def get_metadata_all
+ return @metadata_all_info if @metadata_all_info
+
+ ld_classes = ObjectSpace.each_object(Class).select { |klass| klass < LinkedData::Hypermedia::Resource }
+ info = {}
+
+ ld_classes.each do |cls|
+ next unless routes_by_class.value?(cls)
+
+ attributes = if cls.respond_to?(:attributes)
+ (cls.attributes(:all) + cls.hypermedia_settings[:serialize_methods]).uniq
+ else
+ cls.instance_methods(false)
+ end
+ attributes_info = {}
+ attributes.each do |attribute|
+ next if cls.hypermedia_settings[:serialize_never].include?(attribute)
+
+ if cls.ancestors.include?(LinkedData::Models::Base)
+ model_cls = cls.range(attribute)
+ type = model_cls.type_uri if model_cls.respond_to?('type_uri')
+
+ shows_default = cls.hypermedia_settings[:serialize_default].empty? ? true : cls.hypermedia_settings[:serialize_default].include?(attribute)
+
+ schema = cls.attribute_settings(attribute) rescue nil
+ schema ||= {}
+ attributes_info[attribute] = {
+ type: type || '',
+ shows_default: shows_default || ' ',
+ unique: cls.unique?(attribute) || ' ',
+ required: cls.required?(attribute) || ' ',
+ list: cls.list?(attribute) || ' ',
+ cardinality: (cls.cardinality(attribute) rescue nil) || ' '
+ }
+ else
+ attributes_info[attribute] = {
+ type: '',
+ shows_default: ' ',
+ unique: ' ',
+ required: ' ',
+ list: ' ',
+ cardinality: ' '
+ }
+ end
+ end
+
+ cls_info = {
+ attributes: attributes_info,
+ uri: cls.type_uri,
+ cls: cls
+ }
+
+ info[cls] = cls_info
+ end
+
+ # Sort by 'shown by default'
+ info.each_value do |cls_props|
+ shown = {}
+ not_shown = {}
+ cls_props[:attributes].each { |attr, values| values[:shows_default] ? shown[attr] = values : not_shown[attr] = values }
+ cls_props[:attributes] = shown.merge(not_shown)
+ end
+
+ @metadata_all_info = info
+ info
+ end
+ end
+
+ end
+end
+
+helpers Sinatra::Helpers::HomeHelper
diff --git a/helpers/ontology_helper.rb b/helpers/ontology_helper.rb
index 3d82939e2..23e485df7 100644
--- a/helpers/ontology_helper.rb
+++ b/helpers/ontology_helper.rb
@@ -75,6 +75,15 @@ def add_file_to_submission(ont, submission)
end
return filename, tmpfile
end
+
+ # reject private ontologies in groups and categories
+ def reject_private_ontologies(items)
+ items.each do |item|
+ public_ontologies = item.ontologies.reject { |ontology| ontology.viewingRestriction == "private" }
+ item.instance_variable_set(:@ontologies, public_ontologies)
+ end
+ end
+
end
end
end
diff --git a/helpers/users_helper.rb b/helpers/users_helper.rb
index fbb10d92e..92dccc843 100644
--- a/helpers/users_helper.rb
+++ b/helpers/users_helper.rb
@@ -38,12 +38,10 @@ def token(len)
end
def reset_password(email, username, token)
- user = LinkedData::Models::User.where(email: email, username: username).include(User.goo_attrs_to_load(includes_param)).first
+ user = LinkedData::Models::User.where(email: email, username: username).include(User.goo_attrs_to_load(includes_param) + [:resetToken, :passwordHash, :resetTokenExpireTime]).first
error 404, "User not found" unless user
- user.bring(:resetToken)
- user.bring(:passwordHash)
user.show_apikey = true
token_accepted = token.eql?(user.resetToken)
if token_accepted
diff --git a/init.rb b/init.rb
index 44a1eef52..0fd644a9c 100644
--- a/init.rb
+++ b/init.rb
@@ -1,34 +1,30 @@
-# Recursively require files from directories and their sub-directories
+# Recursively require files from directories
def require_dir(dir)
- Dir.glob("#{dir}/*.rb").each {|f| require_relative f }
- Dir.glob("#{dir}/*/").each {|d| require_dir(d.gsub(/\/+$/, '')) }
+ Dir.glob("#{dir}/**/*.rb").sort.each { |f| require_relative f }
end
-# Require controller base files
-require_relative "controllers/application_controller"
+# Require core files
+require_relative 'controllers/application_controller'
+require_dir('lib')
+require_dir('helpers')
+require_dir('models')
+require_dir('controllers')
-# Require known directories
-require_dir("lib")
-require_dir("helpers")
-require_dir("models")
-require_dir("controllers")
+# Add optional trailing slash to routes
+Sinatra.register do
+ def self.registered(app)
+ app.routes.each do |verb, routes|
+ routes.each do |route|
+ pattern = route[0]
+ next if pattern.to_s.end_with?('/')
-##
-# Look for routes without an optional trailing slash or existing trailing slash
-# and add the optional trailing slash so both /ontologies/ and /ontologies works
-def rewrite_routes_trailing_slash
- trailing_slash = Regexp.new(/.*\/\?\\z/)
- no_trailing_slash = Regexp.new(/(.*)\\z\//)
- Sinatra::Application.routes.each do |method, routes|
- routes.each do |r|
- route_regexp_str = r[0].inspect
- if trailing_slash.match(route_regexp_str)
- next
- else
- new_route = route_regexp_str.gsub(no_trailing_slash, "\\1\\/?\\z/")
- r[0] = eval(new_route)
+ http_verb = verb.to_s.downcase
+ app.public_send(http_verb, "#{pattern}/") do
+ pass unless request.path_info.end_with?('/')
+ redirect_path = request.path_info.chomp('/')
+ redirect redirect_path, 301
+ end
end
end
end
end
-rewrite_routes_trailing_slash()
\ No newline at end of file
diff --git a/lib/rack/cube_reporter.rb b/lib/rack/cube_reporter.rb
deleted file mode 100644
index d6694b874..000000000
--- a/lib/rack/cube_reporter.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-require 'cube'
-
-##
-# This enables collection of request statistics for anaylsis via cube.
-# A cube server is required. See http://square.github.io/cube/ for more info.
-module Rack
- class CubeReporter
-
- def initialize(app = nil, options = {})
- host = options[:cube_host] || "localhost"
- port = options[:cube_port] || 1180
- @app = app
- @cube = ::Cube::Client.new(host, port)
- end
-
- def call(env)
- start = Time.now
- data = @app.call(env)
- finish = Time.now
- cache_hit = !data[1]["X-Rack-Cache"].nil? && data[1]["X-Rack-Cache"].eql?("fresh")
- user = env["REMOTE_USER"]
- apikey = user.apikey if user
- username = user.username if user
- req_data = {
- duration_ms: ((finish - start)*1000).ceil,
- path: env["REQUEST_PATH"],
- cache_hit: cache_hit,
- status: data[0],
- user: {
- apikey: apikey,
- username: username,
- ip: env["REMOTE_ADDR"],
- user_agent: env["HTTP_USER_AGENT"]
- }
- }
- @cube.send "ontologies_api_request", DateTime.now, req_data
- data
- end
-
- end
-end
\ No newline at end of file
diff --git a/mise.toml b/mise.toml
new file mode 100644
index 000000000..a050f48bf
--- /dev/null
+++ b/mise.toml
@@ -0,0 +1,2 @@
+[tools]
+ruby = "3.1.0"
diff --git a/test/controllers/test_batch_controller.rb b/test/controllers/test_batch_controller.rb
index ca37b156e..72d0a98c3 100644
--- a/test/controllers/test_batch_controller.rb
+++ b/test/controllers/test_batch_controller.rb
@@ -22,7 +22,7 @@ def test_class_batch_one_ontology
"display" => "prefLabel,synonym"
}
}
- post "/batch/", call_params
+ post "/batch", call_params
assert last_response.ok?
data = MultiJson.load(last_response.body)
classes = data["http://www.w3.org/2002/07/owl#Class"]
@@ -48,7 +48,7 @@ def test_class_wrong_params
"display" => "prefLabel,synonym"
}
}
- post "/batch/", call_params
+ post "/batch", call_params
assert last_response.status = 422
end
@@ -72,7 +72,7 @@ def test_class_batch_multiple
"display" => "prefLabel"
}
}
- post "/batch/", call_params
+ post "/batch", call_params
assert last_response.ok?
data = MultiJson.load(last_response.body)
classes = data["http://www.w3.org/2002/07/owl#Class"]
@@ -101,7 +101,7 @@ def test_class_all_bro
"display" => "prefLabel"
}
}
- post "/batch/", call_params
+ post "/batch", call_params
assert last_response.ok?
# refute last_response.ok?
data = MultiJson.load(last_response.body)
diff --git a/test/controllers/test_classes_controller.rb b/test/controllers/test_classes_controller.rb
index 323d241d4..2def5e899 100644
--- a/test/controllers/test_classes_controller.rb
+++ b/test/controllers/test_classes_controller.rb
@@ -420,7 +420,7 @@ def test_calls_not_found
escaped_cls= CGI.escape("http://my.bogus.inexistent.class/that/this/is")
#404 on ontology
- get "/ontologies/NO-ONT-ZZZZZZ/classes/"
+ get "/ontologies/NO-ONT-ZZZZZZ/classes"
assert last_response.status == 404
get "/ontologies/NO-ONT-ZZZZZZ/classes/#{escaped_cls}/children"
assert last_response.status == 404
diff --git a/test/controllers/test_external_mappings_controller.rb b/test/controllers/test_external_mappings_controller.rb
index 0a18bf631..1bd0eaf59 100644
--- a/test/controllers/test_external_mappings_controller.rb
+++ b/test/controllers/test_external_mappings_controller.rb
@@ -65,7 +65,7 @@ def delete_external_mappings
creator: "tim"
}
- post "/mappings/", MultiJson.dump(mapping), "CONTENT_TYPE" => "application/json"
+ post "/mappings", MultiJson.dump(mapping), "CONTENT_TYPE" => "application/json"
assert last_response.status == 201, "Error creating the external mapping: #{last_response.body}"
response = MultiJson.load(last_response.body)
diff --git a/test/controllers/test_graphs_admin_controller.rb b/test/controllers/test_graphs_admin_controller.rb
new file mode 100644
index 000000000..e161aad30
--- /dev/null
+++ b/test/controllers/test_graphs_admin_controller.rb
@@ -0,0 +1,53 @@
+require_relative '../test_case'
+
+class TestGraphAdminController < TestCase
+ def setup
+ ontologies = LinkedData::Models::Ontology.all
+ if ontologies.empty?
+ LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
+ @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies(process_submission: false)
+ end
+ file_path = AdminGraphsController::GRAPH_COUNT_REPORT_PATH
+ File.delete(file_path) if File.exist?(file_path)
+ end
+
+ def test_initial_graphs_admin_actions
+ get '/admin/graphs'
+ assert last_response.ok?
+ response = MultiJson.load(last_response.body)
+ assert_empty response
+ end
+
+ def test_graph_creation_and_retrieval
+ post '/admin/graphs'
+
+ get '/admin/graphs'
+ assert last_response.ok?
+ response = MultiJson.load(last_response.body)
+ refute_empty response
+
+ response.each do |graph, count|
+ assert graph.is_a?(String)
+ assert count.is_a?(Array)
+ assert count[0].is_a?(Integer)
+ assert count[1].is_a?(TrueClass) || count[1].is_a?(FalseClass)
+ end
+ end
+
+ def test_graph_deletion
+ post '/admin/graphs'
+
+ get '/admin/graphs'
+ response = MultiJson.load(last_response.body)
+ refute_empty response
+
+ graph = 'http://data.bioontology.org/metadata/OntologySubmission'
+
+ delete '/admin/graphs', url: graph
+
+ get '/admin/graphs'
+ assert last_response.ok?
+ response = MultiJson.load(last_response.body)
+ assert_nil response[graph]
+ end
+end
diff --git a/test/controllers/test_logging_controller.rb b/test/controllers/test_logging_controller.rb
new file mode 100644
index 000000000..341621458
--- /dev/null
+++ b/test/controllers/test_logging_controller.rb
@@ -0,0 +1,59 @@
+require_relative '../test_case'
+require "multi_json"
+
+class TestLoggingController < TestCase
+
+ def setup
+ Goo.use_cache = true
+ Goo.redis_client.flushdb
+ Goo.add_query_logger(enabled: true, file: "./queries.log")
+ end
+
+ def teardown
+ Goo.add_query_logger(enabled: false, file: nil)
+ File.delete("./queries.log") if File.exist?("./queries.log")
+ Goo.redis_client.flushdb
+ Goo.use_cache = false
+ end
+
+ def test_logging_endpoint
+ (1..10).each do |_i|
+ LinkedData::Models::Ontology.where.include(:acronym).all
+ end
+
+ get '/admin/latest_day_query_logs?page=1&pagesize=9'
+ assert last_response.ok?
+ logs = MultiJson.load(last_response.body)
+ assert_equal 9, logs['collection'].size
+
+ get '/admin/latest_day_query_logs?page=2&pagesize=9'
+ assert last_response.ok?
+ logs = MultiJson.load(last_response.body)
+ refute_empty logs['collection']
+
+ get '/admin/latest_day_query_logs?page=3&pagesize=9'
+ assert last_response.ok?
+ logs = MultiJson.load(last_response.body)
+ assert_empty logs['collection']
+ end
+
+ def test_n_last_seconds_logs
+ Goo.logger.info("Test log")
+ (1..10).each do |_i|
+ LinkedData::Models::Ontology.where.include(:acronym).all
+ end
+
+ Goo.logger.info("Test log")
+ get '/admin/last_n_s_query_logs?seconds=2&page=1&pagesize=10'
+ assert last_response.ok?
+ logs = MultiJson.load(last_response.body)
+ assert_equal 10, logs['collection'].size
+
+ sleep 1
+ LinkedData::Models::Ontology.where.include(:acronym).all
+ get '/admin/last_n_s_query_logs?seconds=1&page=1&pagesize=10'
+ assert last_response.ok?
+ logs = MultiJson.load(last_response.body)
+ assert_equal 1, logs['collection'].size
+ end
+end
diff --git a/test/controllers/test_mappings_controller.rb b/test/controllers/test_mappings_controller.rb
index 2ab612616..9aa76b238 100644
--- a/test/controllers/test_mappings_controller.rb
+++ b/test/controllers/test_mappings_controller.rb
@@ -124,7 +124,7 @@ def commun_created_mappings_test(created, mapping_term_a, mapping_term_b, relati
end
assert rest_count == 3
- get "/mappings/recent/"
+ get "/mappings/recent"
assert last_response.status == 200
response = MultiJson.load(last_response.body)
assert (response.length == 5)
@@ -191,7 +191,7 @@ def mappings_between_ontologies
]
ontologies_params.each do |ontologies|
ont1, ont2 = ontologies.split(",")
- get "/mappings/?ontologies=#{ontologies}"
+ get "/mappings?ontologies=#{ontologies}"
assert last_response.ok?
mappings = MultiJson.load(last_response.body)
#pages
@@ -284,7 +284,7 @@ def create_mapping
created = []
mappings.each_with_index do |mapping, i|
- post '/mappings/',
+ post '/mappings',
MultiJson.dump(mapping),
"CONTENT_TYPE" => "application/json"
@@ -315,7 +315,7 @@ def delete_mapping
created = []
mappings.each do |mapping|
- post "/mappings/",
+ post "/mappings",
MultiJson.dump(mapping),
"CONTENT_TYPE" => "application/json"
@@ -351,7 +351,7 @@ def mappings_statistics
end
NcboCron::Models::QueryWarmer.new(Logger.new(TestLogFile.new)).run
assert LinkedData::Models::MappingCount.where.all.length > 2
- get "/mappings/statistics/ontologies/"
+ get "/mappings/statistics/ontologies"
assert last_response.ok?
stats = MultiJson.load(last_response.body)
data = {"CNO-TEST-MAP-0"=>19,
diff --git a/test/controllers/test_ontologies_controller.rb b/test/controllers/test_ontologies_controller.rb
index d05959e8f..970053397 100644
--- a/test/controllers/test_ontologies_controller.rb
+++ b/test/controllers/test_ontologies_controller.rb
@@ -97,7 +97,7 @@ def test_create_ontology
assert last_response.status == 201
delete "/ontologies/#{@@acronym}"
- post "/ontologies/", @@file_params.merge(acronym: @@acronym)
+ post "/ontologies", @@file_params.merge(acronym: @@acronym)
assert last_response.status == 201
end
diff --git a/test/controllers/test_ontology_submissions_controller.rb b/test/controllers/test_ontology_submissions_controller.rb
index 670658a72..f9130c85d 100644
--- a/test/controllers/test_ontology_submissions_controller.rb
+++ b/test/controllers/test_ontology_submissions_controller.rb
@@ -10,27 +10,27 @@ def before_suite
end
def self._set_vars
- @@acronym = "TST"
- @@name = "Test Ontology"
- @@test_file = File.expand_path("../../data/ontology_files/BRO_v3.1.owl", __FILE__)
+ @@acronym = 'TST'
+ @@name = 'Test Ontology'
+ @@test_file = File.expand_path('../../data/ontology_files/BRO_v3.1.owl', __FILE__)
@@file_params = {
name: @@name,
- hasOntologyLanguage: "OWL",
- administeredBy: "tim",
- "file" => Rack::Test::UploadedFile.new(@@test_file, ""),
+ hasOntologyLanguage: 'OWL',
+ administeredBy: 'tim',
+ 'file' => Rack::Test::UploadedFile.new(@@test_file, ''),
released: DateTime.now.to_s,
- contact: [{name: "test_name", email: "test3@example.org"}],
+ contact: [{name: 'test_name', email: 'test3@example.org'}],
URI: 'https://test.com/test',
status: 'production',
description: 'ontology description'
}
- @@status_uploaded = "UPLOADED"
- @@status_rdf = "RDF"
+ @@status_uploaded = 'UPLOADED'
+ @@status_rdf = 'RDF'
end
def self._create_user
- username = "tim"
- test_user = User.new(username: username, email: "#{username}@example.org", password: "password")
+ username = 'tim'
+ test_user = User.new(username: username, email: "#{username}@example.org", password: 'password')
test_user.save if test_user.valid?
@@user = test_user.valid? ? test_user : User.find(username).first
end
@@ -47,7 +47,7 @@ def setup
end
def test_submissions_for_given_ontology
- num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1)
+ _, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1)
ontology = created_ont_acronyms.first
get "/ontologies/#{ontology}/submissions"
assert last_response.ok?
@@ -59,104 +59,104 @@ def test_submissions_for_given_ontology
end
def test_create_new_submission_missing_file_and_pull_location
- post "/ontologies/#{@@acronym}/submissions", name: @@name, hasOntologyLanguage: "OWL"
- assert_equal(400, last_response.status, msg=get_errors(last_response))
- assert MultiJson.load(last_response.body)["errors"]
+ post "/ontologies/#{@@acronym}/submissions", name: @@name, hasOntologyLanguage: 'OWL'
+ assert_equal(400, last_response.status, get_errors(last_response))
+ assert MultiJson.load(last_response.body)['errors']
end
def test_create_new_submission_file
post "/ontologies/#{@@acronym}/submissions", @@file_params
- assert_equal(201, last_response.status, msg=get_errors(last_response))
+ assert_equal(201, last_response.status, get_errors(last_response))
sub = MultiJson.load(last_response.body)
get "/ontologies/#{@@acronym}"
ont = MultiJson.load(last_response.body)
- assert ont["acronym"].eql?(@@acronym)
+ assert ont['acronym'].eql?(@@acronym)
# Cleanup
delete "/ontologies/#{@@acronym}/submissions/#{sub['submissionId']}"
- assert_equal(204, last_response.status, msg=get_errors(last_response))
+ assert_equal(204, last_response.status, get_errors(last_response))
end
def test_create_new_ontology_submission
post "/ontologies/#{@@acronym}/submissions", @@file_params
- assert_equal(201, last_response.status, msg=get_errors(last_response))
+ assert_equal(201, last_response.status, get_errors(last_response))
# Cleanup
sub = MultiJson.load(last_response.body)
delete "/ontologies/#{@@acronym}/submissions/#{sub['submissionId']}"
- assert_equal(204, last_response.status, msg=get_errors(last_response))
+ assert_equal(204, last_response.status, get_errors(last_response))
end
def test_patch_ontology_submission
- num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1)
+ _, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1)
ont = Ontology.find(created_ont_acronyms.first).include(submissions: [:submissionId, ontology: :acronym]).first
assert(ont.submissions.length > 0)
submission = ont.submissions[0]
- new_values = {description: "Testing new description changes"}
- patch "/ontologies/#{submission.ontology.acronym}/submissions/#{submission.submissionId}", MultiJson.dump(new_values), "CONTENT_TYPE" => "application/json"
- assert_equal(204, last_response.status, msg=get_errors(last_response))
+ new_values = {description: 'Testing new description changes'}
+ patch "/ontologies/#{submission.ontology.acronym}/submissions/#{submission.submissionId}", MultiJson.dump(new_values), 'CONTENT_TYPE' => 'application/json'
+ assert_equal(204, last_response.status, get_errors(last_response))
get "/ontologies/#{submission.ontology.acronym}/submissions/#{submission.submissionId}"
submission = MultiJson.load(last_response.body)
- assert submission["description"].eql?("Testing new description changes")
+ assert submission['description'].eql?('Testing new description changes')
end
def test_delete_ontology_submission
- num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1, random_submission_count: false, submission_count: 5)
+ _, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1, random_submission_count: false, submission_count: 5)
acronym = created_ont_acronyms.first
submission_to_delete = (1..5).to_a.shuffle.first
delete "/ontologies/#{acronym}/submissions/#{submission_to_delete}"
- assert_equal(204, last_response.status, msg=get_errors(last_response))
+ assert_equal(204, last_response.status, get_errors(last_response))
get "/ontologies/#{acronym}/submissions/#{submission_to_delete}"
- assert_equal(404, last_response.status, msg=get_errors(last_response))
+ assert_equal(404, last_response.status, get_errors(last_response))
end
def test_download_submission
num_onts_created, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: false)
- assert_equal(1, num_onts_created, msg="Failed to create 1 ontology?")
- assert_equal(1, onts.length, msg="Failed to create 1 ontology?")
+ assert_equal(1, num_onts_created, 'Failed to create 1 ontology?')
+ assert_equal(1, onts.length, 'Failed to create 1 ontology?')
ont = onts.first
ont.bring(:submissions, :acronym)
- assert_instance_of(Ontology, ont, msg="ont is not a #{Ontology.class}")
- assert_equal(1, ont.submissions.length, msg="Failed to create 1 ontology submission?")
+ assert_instance_of(Ontology, ont, "ont is not a #{Ontology.class}")
+ assert_equal(1, ont.submissions.length, 'Failed to create 1 ontology submission?')
sub = ont.submissions.first
sub.bring(:submissionId)
- assert_instance_of(OntologySubmission, sub, msg="sub is not a #{OntologySubmission.class}")
+ assert_instance_of(OntologySubmission, sub, "sub is not a #{OntologySubmission.class}")
# Clear restrictions on downloads
LinkedData::OntologiesAPI.settings.restrict_download = []
# Download the specific submission
get "/ontologies/#{ont.acronym}/submissions/#{sub.submissionId}/download"
- assert_equal(200, last_response.status, msg='failed download for specific submission : ' + get_errors(last_response))
+ assert_equal(200, last_response.status, 'failed download for specific submission : ' + get_errors(last_response))
# Add restriction on download
acronym = created_ont_acronyms.first
LinkedData::OntologiesAPI.settings.restrict_download = [acronym]
# Try download
get "/ontologies/#{ont.acronym}/submissions/#{sub.submissionId}/download"
# download should fail with a 403 status
- assert_equal(403, last_response.status, msg='failed to restrict download for ontology : ' + get_errors(last_response))
+ assert_equal(403, last_response.status, 'failed to restrict download for ontology : ' + get_errors(last_response))
# Clear restrictions on downloads
LinkedData::OntologiesAPI.settings.restrict_download = []
# see also test_ontologies_controller::test_download_ontology
# Test downloads of nonexistent ontology
- get "/ontologies/BOGUS66/submissions/55/download"
- assert_equal(422, last_response.status, "failed to handle downloads of nonexistent ontology" + get_errors(last_response))
+ get '/ontologies/BOGUS66/submissions/55/download'
+ assert_equal(422, last_response.status, 'failed to handle downloads of nonexistent ontology' + get_errors(last_response))
end
def test_download_ontology_submission_rdf
- count, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: true)
+ _, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: true)
acronym = created_ont_acronyms.first
ont = onts.first
sub = ont.submissions.first
get "/ontologies/#{acronym}/submissions/#{sub.submissionId}/download?download_format=rdf"
- assert_equal(200, last_response.status, msg="Download failure for '#{acronym}' ontology: " + get_errors(last_response))
+ assert_equal(200, last_response.status, "Download failure for '#{acronym}' ontology: " + get_errors(last_response))
# Download should fail with a 400 status.
get "/ontologies/#{acronym}/submissions/#{sub.submissionId}/download?download_format=csr"
- assert_equal(400, last_response.status, msg="Download failure for '#{acronym}' ontology: " + get_errors(last_response))
+ assert_equal(400, last_response.status, "Download failure for '#{acronym}' ontology: " + get_errors(last_response))
end
def test_download_acl_only
- count, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: false)
+ _, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: false)
acronym = created_ont_acronyms.first
ont = onts.first.bring_remaining
ont.bring(:submissions)
@@ -165,65 +165,65 @@ def test_download_acl_only
begin
allowed_user = User.new({
- username: "allowed",
- email: "test4@example.org",
- password: "12345"
+ username: 'allowed',
+ email: 'test4@example.org',
+ password: '12345'
})
allowed_user.save
blocked_user = User.new({
- username: "blocked",
- email: "test5@example.org",
- password: "12345"
+ username: 'blocked',
+ email: 'test5@example.org',
+ password: '12345'
})
blocked_user.save
ont.acl = [allowed_user]
- ont.viewingRestriction = "private"
+ ont.viewingRestriction = 'private'
ont.save
LinkedData.settings.enable_security = true
get "/ontologies/#{acronym}/submissions/#{sub.submissionId}/download?apikey=#{allowed_user.apikey}"
- assert_equal(200, last_response.status, msg="User who is in ACL couldn't download ontology")
+ assert_equal(200, last_response.status, "User who is in ACL couldn't download ontology")
get "/ontologies/#{acronym}/submissions/#{sub.submissionId}/download?apikey=#{blocked_user.apikey}"
- assert_equal(403, last_response.status, msg="User who isn't in ACL could download ontology")
+ assert_equal(403, last_response.status, "User who isn't in ACL could download ontology")
admin = ont.administeredBy.first
admin.bring(:apikey)
get "/ontologies/#{acronym}/submissions/#{sub.submissionId}/download?apikey=#{admin.apikey}"
- assert_equal(200, last_response.status, msg="Admin couldn't download ontology")
+ assert_equal(200, last_response.status, "Admin couldn't download ontology")
ensure
LinkedData.settings.enable_security = false
- del = User.find("allowed").first
+ del = User.find('allowed').first
del.delete if del
- del = User.find("blocked").first
+ del = User.find('blocked').first
del.delete if del
end
end
def test_ontology_submissions_access_controller
- count, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 2, submission_count: 1, process_submission: false)
+ _, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 2, submission_count: 1, process_submission: false)
# case first submission is private
- acronym = created_ont_acronyms.first
+ created_ont_acronyms.first
ont = onts.first.bring_remaining
begin
allowed_user = User.new({
- username: "allowed",
- email: "test@example.org",
- password: "12345"
+ username: 'allowed',
+ email: 'test@example.org',
+ password: '12345'
})
allowed_user.save
blocked_user = User.new({
- username: "blocked",
- email: "test1254@example.org",
- password: "12345"
+ username: 'blocked',
+ email: 'test1254@example.org',
+ password: '12345'
})
blocked_user.save
ont.acl = [allowed_user]
- ont.viewingRestriction = "private"
+ ont.viewingRestriction = 'private'
ont.save
LinkedData.settings.enable_security = true
@@ -239,34 +239,34 @@ def test_ontology_submissions_access_controller
assert_equal 1, submissions.size
ensure
LinkedData.settings.enable_security = false
- del = User.find("allowed").first
+ del = User.find('allowed').first
del.delete if del
- del = User.find("blocked").first
+ del = User.find('blocked').first
del.delete if del
end
end
def test_submissions_pagination
- num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 2, submission_count: 2)
+ create_ontologies_and_submissions(ont_count: 2, submission_count: 2)
- get "/submissions"
+ get '/submissions'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
assert_equal 2, submissions.length
- get "/submissions?page=1&pagesize=1"
+ get '/submissions?page=1&pagesize=1'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- assert_equal 1, submissions["collection"].length
+ assert_equal 1, submissions['collection'].length
end
def test_submissions_pagination_filter
num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 10, submission_count: 1)
- group1 = LinkedData::Models::Group.new(acronym: 'group-1', name: "Test Group 1").save
- group2 = LinkedData::Models::Group.new(acronym: 'group-2', name: "Test Group 2").save
- category1 = LinkedData::Models::Category.new(acronym: 'category-1', name: "Test Category 1").save
- category2 = LinkedData::Models::Category.new(acronym: 'category-2', name: "Test Category 2").save
+ group1 = LinkedData::Models::Group.new(acronym: 'group-1', name: 'Test Group 1').save
+ group2 = LinkedData::Models::Group.new(acronym: 'group-2', name: 'Test Group 2').save
+ category1 = LinkedData::Models::Category.new(acronym: 'category-1', name: 'Test Category 1').save
+ category2 = LinkedData::Models::Category.new(acronym: 'category-2', name: 'Test Category 2').save
ontologies1 = ontologies[0..5].each do |o|
o.bring_remaining
@@ -287,28 +287,28 @@ def test_submissions_pagination_filter
# test filter by group and category
get "/submissions?page=1&pagesize=100&group=#{group1.acronym}"
assert last_response.ok?
- assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length
+ assert_equal ontologies1.size, MultiJson.load(last_response.body)['collection'].length
get "/submissions?page=1&pagesize=100&group=#{group2.acronym}"
assert last_response.ok?
- assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)['collection'].length
get "/submissions?page=1&pagesize=100&hasDomain=#{category1.acronym}"
assert last_response.ok?
- assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length
+ assert_equal ontologies1.size, MultiJson.load(last_response.body)['collection'].length
get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}"
assert last_response.ok?
- assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)['collection'].length
get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}&group=#{group1.acronym}"
assert last_response.ok?
- assert_equal 0, MultiJson.load(last_response.body)["collection"].length
+ assert_equal 0, MultiJson.load(last_response.body)['collection'].length
get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}&group=#{group2.acronym}"
assert last_response.ok?
- assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)['collection'].length
ontologies3 = ontologies[9]
ontologies3.bring_remaining
ontologies3.group = [group1, group2]
ontologies3.hasDomain = [category1, category2]
- ontologies3.name = "name search test"
+ ontologies3.name = 'name search test'
ontologies3.save
# test search with acronym
@@ -320,7 +320,7 @@ def test_submissions_pagination_filter
get "/submissions?page=1&pagesize=100&acronym=#{acronym_search}"
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- assert_equal count, submissions["collection"].length
+ assert_equal count, submissions['collection'].length
end
@@ -333,94 +333,94 @@ def test_submissions_pagination_filter
get "/submissions?page=1&pagesize=100&name=#{name_search}"
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- binding.pry unless submissions["collection"].length.eql?(count)
- assert_equal count, submissions["collection"].length
+ binding.pry unless submissions['collection'].length.eql?(count)
+ assert_equal count, submissions['collection'].length
end
# test search with name and acronym
# search by name
- get "/submissions?page=1&pagesize=100&name=search&acronym=search"
+ get '/submissions?page=1&pagesize=100&name=search&acronym=search'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- assert_equal 1, submissions["collection"].length
+ assert_equal 1, submissions['collection'].length
# search by acronym
- get "/submissions?page=1&pagesize=100&name=9&acronym=9"
+ get '/submissions?page=1&pagesize=100&name=9&acronym=9'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- assert_equal 1, submissions["collection"].length
+ assert_equal 1, submissions['collection'].length
# search by acronym or name
- get "/submissions?page=1&pagesize=100&name=search&acronym=8"
+ get '/submissions?page=1&pagesize=100&name=search&acronym=8'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- assert_equal 2, submissions["collection"].length
+ assert_equal 2, submissions['collection'].length
- ontologies.first.name = "sort by test"
+ ontologies.first.name = 'sort by test'
ontologies.first.save
sub = ontologies.first.latest_submission(status: :any).bring_remaining
sub.status = 'retired'
- sub.description = "234"
+ sub.description = '234'
sub.creationDate = DateTime.yesterday.to_datetime
sub.hasOntologyLanguage = LinkedData::Models::OntologyFormat.find('SKOS').first
sub.save
#test search with sort
- get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=ontology_name"
+ get '/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=ontology_name'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- refute_empty submissions["collection"]
- assert_equal ontologies.map{|x| x.name}.sort, submissions["collection"].map{|x| x["ontology"]["name"]}
+ refute_empty submissions['collection']
+ assert_equal ontologies.map{|x| x.name}.sort, submissions['collection'].map{|x| x['ontology']['name']}
- get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=creationDate"
+ get '/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=creationDate'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- refute_empty submissions["collection"]
- assert_equal ontologies.map{|x| x.latest_submission(status: :any).bring(:creationDate).creationDate}.sort, submissions["collection"].map{|x| DateTime.parse(x["creationDate"])}.reverse
+ refute_empty submissions['collection']
+ assert_equal ontologies.map{|x| x.latest_submission(status: :any).bring(:creationDate).creationDate}.sort, submissions['collection'].map{|x| DateTime.parse(x['creationDate'])}.reverse
# test search with format
- get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=SKOS"
+ get '/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=SKOS'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- refute_empty submissions["collection"]
- assert_equal 1, submissions["collection"].size
+ refute_empty submissions['collection']
+ assert_equal 1, submissions['collection'].size
- get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=OWL"
+ get '/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=OWL'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- refute_empty submissions["collection"]
- assert_equal ontologies.size-1 , submissions["collection"].size
+ refute_empty submissions['collection']
+ assert_equal ontologies.size-1 , submissions['collection'].size
# test ontology filter with submission filter attributes
- get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&group=group-2&category=category-2&hasOntologyLanguage=OWL"
+ get '/submissions?page=1&pagesize=100&acronym=tes&name=tes&group=group-2&category=category-2&hasOntologyLanguage=OWL'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- refute_empty submissions["collection"]
- assert_equal ontologies2.size + 1 , submissions["collection"].size
+ refute_empty submissions['collection']
+ assert_equal ontologies2.size + 1 , submissions['collection'].size
# test ontology filter with status
- get "/submissions?page=1&pagesize=100&status=retired"
+ get '/submissions?page=1&pagesize=100&status=retired'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- refute_empty submissions["collection"]
- assert_equal 1 , submissions["collection"].size
+ refute_empty submissions['collection']
+ assert_equal 1 , submissions['collection'].size
- get "/submissions?page=1&pagesize=100&status=alpha,beta,production"
+ get '/submissions?page=1&pagesize=100&status=alpha,beta,production'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- refute_empty submissions["collection"]
- assert_equal ontologies.size - 1 , submissions["collection"].size
- get "/submissions?page=1&pagesize=100&description=234&acronym=234&name=234"
+ refute_empty submissions['collection']
+ assert_equal ontologies.size - 1 , submissions['collection'].size
+ get '/submissions?page=1&pagesize=100&description=234&acronym=234&name=234'
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
- assert_equal 1 , submissions["collection"].size
+ assert_equal 1 , submissions['collection'].size
end
def test_submissions_default_includes
ontology_count = 5
- num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: [])
+ _, created_ont_acronyms, = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: [])
submission_default_attributes = LinkedData::Models::OntologySubmission.hypermedia_settings[:serialize_default].map(&:to_s)
- get("/submissions?display_links=false&display_context=false&include_status=ANY")
+ get('/submissions?display_links=false&display_context=false&include_status=ANY')
assert last_response.ok?
submissions = MultiJson.load(last_response.body)
@@ -435,80 +435,59 @@ def test_submissions_default_includes
assert(submissions.all? { |sub| submission_default_attributes.eql?(submission_keys(sub)) })
end
+
def test_submissions_all_includes
ontology_count = 5
- num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: [])
- def submission_all_attributes
- attrs = OntologySubmission.goo_attrs_to_load([:all])
- embed_attrs = attrs.select { |x| x.is_a?(Hash) }.first
-
- attrs.delete_if { |x| x.is_a?(Hash) }.map(&:to_s) + embed_attrs.keys.map(&:to_s)
- end
- get("/submissions?include=all&display_links=false&display_context=false")
-
- assert last_response.ok?
- submissions = MultiJson.load(last_response.body)
- assert_equal ontology_count, submissions.size
+ _, created_ont_acronyms, = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: [])
- assert(submissions.all? { |sub| submission_all_attributes.sort.eql?(submission_keys(sub).sort) })
- assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) })
+ submission_all_attributes = begin
+ attrs = OntologySubmission.goo_attrs_to_load([:all])
+ embed_attrs = attrs.select { |x| x.is_a?(Hash) }.first || {}
+ attrs.reject { |x| x.is_a?(Hash) }.map(&:to_s) + embed_attrs.keys.map(&:to_s)
+ end.sort
- get("/ontologies/#{created_ont_acronyms.first}/submissions?include=all&display_links=false&display_context=false")
+ params = '?include=all&display_links=false&display_context=false'
- assert last_response.ok?
- submissions = MultiJson.load(last_response.body)
- assert_equal 1, submissions.size
-
- assert(submissions.all? { |sub| submission_all_attributes.sort.eql?(submission_keys(sub).sort) })
- assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) })
-
- get("/ontologies/#{created_ont_acronyms.first}/latest_submission?include=all&display_links=false&display_context=false")
- assert last_response.ok?
- sub = MultiJson.load(last_response.body)
+ [
+ "/submissions#{params}",
+ "/ontologies/#{created_ont_acronyms.first}/submissions#{params}",
+ "/ontologies/#{created_ont_acronyms.first}/latest_submission#{params}",
+ "/ontologies/#{created_ont_acronyms.first}/submissions/1#{params}"
+ ].each do |url|
+ get(url)
+ assert last_response.ok?
- assert(submission_all_attributes.sort.eql?(submission_keys(sub).sort))
- assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])))
+ response_body = MultiJson.load(last_response.body)
+ submissions = response_body.is_a?(Array) ? response_body : [response_body]
- get("/ontologies/#{created_ont_acronyms.first}/submissions/1?include=all&display_links=false&display_context=false")
- assert last_response.ok?
- sub = MultiJson.load(last_response.body)
-
- assert(submission_all_attributes.sort.eql?(submission_keys(sub).sort))
- assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])))
+ assert_equal(ontology_count, submissions.size) if url == "/submissions#{params}"
+ assert(submissions.all? { |sub| submission_all_attributes.eql?(submission_keys(sub).sort) })
+ assert(submissions.all? { |sub| sub['contact']&.first&.keys.to_a.sort.eql?(%w[name email id].sort) })
+ end
end
def test_submissions_custom_includes
ontology_count = 5
- num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: [])
- include = 'ontology,contact,submissionId'
-
- get("/submissions?include=#{include}&display_links=false&display_context=false")
+ _, created_ont_acronyms, _ = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: [])
+ include_keys = %w[ontology contact submissionId]
+ params = "?include=#{include_keys.join(',')}&display_links=false&display_context=false"
- assert last_response.ok?
- submissions = MultiJson.load(last_response.body)
- assert_equal ontology_count, submissions.size
- assert(submissions.all? { |sub| include.split(',').eql?(submission_keys(sub)) })
- assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) })
-
- get("/ontologies/#{created_ont_acronyms.first}/submissions?include=#{include}&display_links=false&display_context=false")
-
- assert last_response.ok?
- submissions = MultiJson.load(last_response.body)
- assert_equal 1, submissions.size
- assert(submissions.all? { |sub| include.split(',').eql?(submission_keys(sub)) })
- assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) })
+ [
+ "/submissions#{params}",
+ "/ontologies/#{created_ont_acronyms.first}/submissions#{params}",
+ "/ontologies/#{created_ont_acronyms.first}/latest_submission#{params}",
+ "/ontologies/#{created_ont_acronyms.first}/submissions/1#{params}"
+ ].each do |url|
+ get(url)
+ assert last_response.ok?
- get("/ontologies/#{created_ont_acronyms.first}/latest_submission?include=#{include}&display_links=false&display_context=false")
- assert last_response.ok?
- sub = MultiJson.load(last_response.body)
- assert(include.split(',').eql?(submission_keys(sub)))
- assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])))
+ response_body = MultiJson.load(last_response.body)
+ submissions = response_body.is_a?(Array) ? response_body : [response_body]
- get("/ontologies/#{created_ont_acronyms.first}/submissions/1?include=#{include}&display_links=false&display_context=false")
- assert last_response.ok?
- sub = MultiJson.load(last_response.body)
- assert(include.split(',').eql?(submission_keys(sub)))
- assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])))
+ assert_equal(ontology_count, submissions.size) if url == "/submissions#{params}"
+ assert(submissions.all? { |sub| include_keys.eql?(submission_keys(sub)) })
+ assert(submissions.all? { |sub| sub['contact']&.first&.keys&.sort.eql?(%w[name email id].sort) })
+ end
end
def test_submissions_param_include
@@ -519,7 +498,7 @@ def test_submissions_param_include
end
def test_submission_diff
- num_onts_created, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 2,
+ _, _, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 2,
process_submission: true,
process_options: { process_rdf: true, extract_metadata: false, diff: true} )
diff --git a/test/helpers/test_application_helper.rb b/test/helpers/test_application_helper.rb
index 2315a677d..f15572433 100644
--- a/test/helpers/test_application_helper.rb
+++ b/test/helpers/test_application_helper.rb
@@ -9,22 +9,22 @@ def before_suite
def test_it_escapes_html
escaped_html = helper.h("http://testlink.com")
- assert escaped_html.eql?("<a>http://testlink.com</a>")
+ assert_equal "<a>http://testlink.com</a>", escaped_html
end
def test_ontologies_param
- ids = @@ontologies.map {|o| o.id.to_s}
- acronyms = @@ontologies.map {|o| o.id.to_s.split("/").last}
- params = {"ontologies" => acronyms.join(",")}
+ ids = @@ontologies.map { |o| o.id.to_s }
+ acronyms = @@ontologies.map { |o| o.id.to_s.split("/").last }
+ params = { "ontologies" => acronyms.join(",") }
ontologies = ontologies_param(params)
assert ontologies == ids
- params = {"ontologies" => ids.join(",")}
+ params = { "ontologies" => ids.join(",") }
ontologies = ontologies_param(params)
assert ontologies == ids
id_acronym = ids + acronyms
- params = {"ontologies" => id_acronym.join(",")}
+ params = { "ontologies" => id_acronym.join(",") }
ontologies = ontologies_param(params)
assert ontologies == (ids + ids)
end
@@ -48,16 +48,16 @@ def test_acronym_from_ontology_uri
def test_bad_accept_header_handling
# This accept header contains '*; q=.2', which isn't valid according to the spec, should be '*/*; q=.2'
bad_accept_header = "text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2"
- get "/ontologies", {}, {"HTTP_ACCEPT" => bad_accept_header}
+ get "/ontologies", {}, { "HTTP_ACCEPT" => bad_accept_header }
assert last_response.status == 400
assert last_response.body.include?("Accept header `#{bad_accept_header}` is invalid")
end
def test_http_method_override
- post "/ontologies", {}, {"HTTP_X_HTTP_METHOD_OVERRIDE" => "GET"}
+ post "/ontologies", {}, { "HTTP_X_HTTP_METHOD_OVERRIDE" => "GET" }
assert last_response.ok?
- acronyms = @@ontologies.map {|o| o.bring(:acronym).acronym}.sort
- resp_acronyms = MultiJson.load(last_response.body).map {|o| o["acronym"]}.sort
+ acronyms = @@ontologies.map { |o| o.bring(:acronym).acronym }.sort
+ resp_acronyms = MultiJson.load(last_response.body).map { |o| o["acronym"] }.sort
assert_equal acronyms, resp_acronyms
end
end
diff --git a/test/helpers/test_slices_helper.rb b/test/helpers/test_slices_helper.rb
index ae01aae75..7e8cfdac8 100644
--- a/test/helpers/test_slices_helper.rb
+++ b/test/helpers/test_slices_helper.rb
@@ -79,7 +79,7 @@ def test_search_slices
def test_mappings_slices
LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new))
- get "/mappings/statistics/ontologies/"
+ get "/mappings/statistics/ontologies"
expected_result_without_slice = ["PARSED-0",
"PARSED-1",
@@ -90,7 +90,7 @@ def test_mappings_slices
assert_equal expected_result_without_slice, MultiJson.load(last_response.body).keys.sort
- get "http://#{@@group_acronym}/mappings/statistics/ontologies/"
+ get "http://#{@@group_acronym}/mappings/statistics/ontologies"
expected_result_with_slice = ["PARSED-0",
"http://data.bioontology.org/metadata/ExternalMappings",
diff --git a/test/middleware/test_rack_attack.rb b/test/middleware/test_rack_attack.rb
index 92b4d6369..937c1bf06 100644
--- a/test/middleware/test_rack_attack.rb
+++ b/test/middleware/test_rack_attack.rb
@@ -40,7 +40,7 @@ def before_suite
# Fork the process to create two servers. This isolates the Rack::Attack configuration, which makes other tests fail if included.
@@pid1 = fork do
require_relative '../../config/rack_attack'
- Rack::Server.start(
+ Rackup::Server.start(
config: RACK_CONFIG,
Port: @@port1
)
@@ -50,7 +50,7 @@ def before_suite
@@port2 = unused_port
@@pid2 = fork do
require_relative '../../config/rack_attack'
- Rack::Server.start(
+ Rackup::Server.start(
config: RACK_CONFIG,
Port: @@port2
)
diff --git a/test/test_case.rb b/test/test_case.rb
index e9b8956d8..b1de654c8 100644
--- a/test/test_case.rb
+++ b/test/test_case.rb
@@ -28,21 +28,24 @@
require 'multi_json'
require 'oj'
require 'json-schema'
-
+require 'minitest/reporters'
+Minitest::Reporters.use! [Minitest::Reporters::SpecReporter.new(:color => true), Minitest::Reporters::MeanTimeReporter.new]
MAX_TEST_REDIS_SIZE = 10_000
# Check to make sure you want to run if not pointed at localhost
safe_hosts = Regexp.new(/localhost|-ut|ncbo-dev*|ncbo-unittest*/)
+
def safe_redis_hosts?(sh)
return [LinkedData.settings.http_redis_host,
- Annotator.settings.annotator_redis_host,
- LinkedData.settings.goo_redis_host].select { |x|
+ Annotator.settings.annotator_redis_host,
+ LinkedData.settings.goo_redis_host].select { |x|
x.match(sh)
}.length == 3
end
+
unless LinkedData.settings.goo_host.match(safe_hosts) &&
- safe_redis_hosts?(safe_hosts) &&
- LinkedData.settings.search_server_url.match(safe_hosts)
+ safe_redis_hosts?(safe_hosts) &&
+ LinkedData.settings.search_server_url.match(safe_hosts)
print "\n\n================================== WARNING ==================================\n"
print "** TESTS CAN BE DESTRUCTIVE -- YOU ARE POINTING TO A POTENTIAL PRODUCTION/STAGE SERVER **\n"
print "Servers:\n"
@@ -77,8 +80,7 @@ def count_pattern(pattern)
def backend_4s_delete
if count_pattern("?s ?p ?o") < 400000
puts 'clear backend & index'
- raise StandardError, 'Too many triples in KB, does not seem right to run tests' unless
- count_pattern('?s ?p ?o') < 400000
+ raise StandardError, 'Too many triples in KB, does not seem right to run tests' unless count_pattern('?s ?p ?o') < 400000
graphs = Goo.sparql_query_client.query("SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o . } }")
graphs.each_solution do |sol|
@@ -114,8 +116,6 @@ def after_all
super
end
-
-
def _run_suite(suite, type)
begin
backend_4s_delete
@@ -160,12 +160,11 @@ def app
# @option options [TrueClass, FalseClass] :process_submission Parse the test ontology file
def create_ontologies_and_submissions(options = {})
if options[:process_submission] && options[:process_options].nil?
- options[:process_options] = { process_rdf: true, extract_metadata: false, generate_missing_labels: false }
+ options[:process_options] = { process_rdf: true, extract_metadata: false, generate_missing_labels: false }
end
LinkedData::SampleData::Ontology.create_ontologies_and_submissions(options)
end
-
def agent_data(type: 'organization')
schema_agencies = LinkedData::Models::AgentIdentifier::IDENTIFIER_SCHEMES.keys
users = LinkedData::Models::User.all
@@ -206,13 +205,13 @@ def delete_goo_models(gooModelArray)
# @param [String] jsonData a json string that will be parsed by MultiJson.load
# @param [String] jsonSchemaString a json schema string that will be parsed by MultiJson.load
# @param [boolean] list set it true for jsonObj array of items to validate against jsonSchemaString
- def validate_json(jsonData, jsonSchemaString, list=false)
+ def validate_json(jsonData, jsonSchemaString, list = false)
schemaVer = :draft3
jsonObj = MultiJson.load(jsonData)
jsonSchema = MultiJson.load(jsonSchemaString)
assert(
- JSON::Validator.validate(jsonSchema, jsonObj, :list => list, :version => schemaVer),
- JSON::Validator.fully_validate(jsonSchema, jsonObj, :list => list, :version => schemaVer, :validate_schema => true).to_s
+ JSON::Validator.validate(jsonSchema, jsonObj, list: list, version: schemaVer),
+ JSON::Validator.fully_validate(jsonSchema, jsonObj, list: list, version: schemaVer, validate_schema: true).to_s
)
end
@@ -236,11 +235,10 @@ def self.enable_security
LinkedData.settings.enable_security = true
end
- def self.reset_security(old_security = @@old_security_setting)
+ def self.reset_security(old_security = @@old_security_setting)
LinkedData.settings.enable_security = old_security
end
-
def self.make_admin(user)
user.bring_remaining
user.role = [LinkedData::Models::Users::Role.find(LinkedData::Models::Users::Role::ADMIN).first]
@@ -261,6 +259,7 @@ def unused_port
end
private
+
def port_in_use?(port)
server = TCPServer.new(port)
server.close
diff --git a/views/documentation/metadata.haml b/views/documentation/metadata.haml
index c14072d84..841ac492b 100644
--- a/views/documentation/metadata.haml
+++ b/views/documentation/metadata.haml
@@ -1,20 +1,19 @@
--routes = routes_by_class[@metadata[:cls]]
--return "" if routes.nil? || routes.empty?
+- return "" unless routes_by_class.value?(@metadata[:cls])
%h3.text-success{id: @metadata[:cls].name.split("::").last}= @metadata[:uri]
%div.resource
%div.collection_link
=resource_collection_link(@metadata[:cls])
- -if routes
- %h4 HTTP Methods for Resource
- %table.table.table-striped.table-bordered
- %tr
- %th HTTP Verb
- %th Path
- -routes.each do |route|
- %tr
- %td= route[0]
- %td= route[1]
+ -# -if routes
+ -# %h4 HTTP Methods for Resource
+ -# %table.table.table-striped.table-bordered
+ -# %tr
+ -# %th HTTP Verb
+ -# %th Path
+ -# -routes.each do |route|
+ -# %tr
+ -# %td= route[0]
+ -# %td= route[1]
%h4 Resource Description
%table.table.table-striped.table-bordered