diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 040cd9c..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,27 +0,0 @@
-language: python
-matrix:
- include:
- - python: "2.7"
- env: TOX_ENV=pep8
- - python: "2.7"
- env: TOX_ENV=py27
- - python: "3.3"
- env: TOX_ENV=py33
- - python: "3.4"
- env: TOX_ENV=py34
- - python: "3.5"
- env: TOX_ENV=py35
-before_install:
- - sudo apt-get -qq update
- - sudo apt-get install -y build-essential libcap-dev
-install:
- - pip install tox
- - if [[ $TOX_ENV == py27 ]] || [[ $TOX_ENV == py35 ]]; then
- pip install coveralls;
- fi
-script:
- - tox -e $TOX_ENV
-after_success:
- - if [[ $TOX_ENV == py27 ]] || [[ $TOX_ENV == py35 ]]; then
- coveralls;
- fi
diff --git a/Dockerfile b/Dockerfile
index 6005872..f3dbe2c 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,91 +1,19 @@
-FROM python:3.12.3 as build
-MAINTAINER RAMS Project "code@magfest.org"
-LABEL version.sideboard ="1.0"
+FROM python:3.12.3-slim as build
WORKDIR /app
-
-# This is actually the least bad way to compose two Dockerfile tech stacks right now.
-# The following is copied and pasted from the Node Dockerfile at
-# https://github.com/nodejs/docker-node/blob/main/12/buster/Dockerfile
-# Update this comment and change the entire copypasta section to upgrade Node version
-
-#########################################
-# START NODEJS DOCKERFILE COPYPASTA #
-# https://github.com/nodejs/docker-node #
-#########################################
-RUN groupadd --gid 1000 node \
- && useradd --uid 1000 --gid node --shell /bin/bash --create-home node
-
-ENV NODE_VERSION 12.22.3
-
-RUN ARCH= && dpkgArch="$(dpkg --print-architecture)" \
- && case "${dpkgArch##*-}" in \
- amd64) ARCH='x64';; \
- ppc64el) ARCH='ppc64le';; \
- s390x) ARCH='s390x';; \
- arm64) ARCH='arm64';; \
- armhf) ARCH='armv7l';; \
- i386) ARCH='x86';; \
- *) echo "unsupported architecture"; exit 1 ;; \
- esac \
- # gpg keys listed at https://github.com/nodejs/node#release-keys
- && set -ex \
- && for key in \
- 4ED778F539E3634C779C87C6D7062848A1AB005C \
- 94AE36675C464D64BAFA68DD7434390BDBE9B9C5 \
- 74F12602B6F1C4E913FAA37AD3A89613643B6201 \
- 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1 \
- 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 \
- C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8 \
- C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C \
- DD8F2338BAE7501E3DD5AC78C273792F7D83545D \
- A48C2BEE680E841632CD4E44F07496B3EB3C1762 \
- 108F52B48DB57BB0CC439B2997B01419BD92F80A \
- B9E2F5981AA6E0CD28160D9FF13993A75599653C \
- ; do \
- gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" || \
- gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
- done \
- && curl -fsSLO --compressed "https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-$ARCH.tar.xz" \
- && curl -fsSLO --compressed "https://nodejs.org/dist/v$NODE_VERSION/SHASUMS256.txt.asc" \
- && gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
- && grep " node-v$NODE_VERSION-linux-$ARCH.tar.xz\$" SHASUMS256.txt | sha256sum -c - \
- && tar -xJf "node-v$NODE_VERSION-linux-$ARCH.tar.xz" -C /usr/local --strip-components=1 --no-same-owner \
- && rm "node-v$NODE_VERSION-linux-$ARCH.tar.xz" SHASUMS256.txt.asc SHASUMS256.txt \
- # smoke tests
- && node --version \
- && npm --version
-
-RUN curl -SLO "https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-x64.tar.xz" \
- && curl -SLO "https://nodejs.org/dist/v$NODE_VERSION/SHASUMS256.txt.asc" \
- && gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
- && grep " node-v$NODE_VERSION-linux-x64.tar.xz\$" SHASUMS256.txt | sha256sum -c - \
- && tar -xJf "node-v$NODE_VERSION-linux-x64.tar.xz" -C /usr/local --strip-components=1 \
- && rm "node-v$NODE_VERSION-linux-x64.tar.xz" SHASUMS256.txt.asc SHASUMS256.txt \
- && ln -s /usr/local/bin/node /usr/local/bin/nodejs
-###################################
-# END NODEJS DOCKERFILE COPYPASTA #
-###################################
-
-# required for python-prctl
-RUN apt-get update && apt-get install -y libcap-dev && rm -rf /var/lib/apt/lists/*
-
-RUN pip3 install virtualenv \
- && virtualenv --always-copy /app/env \
- && /app/env/bin/pip3 install paver
+ENV PYTHONPATH=/app:/app/plugins/*/
ADD requirements.txt requirements.txt
-ADD test_requirements.txt test_requirements.txt
-ADD setup.py setup.py
-ADD sideboard/_version.py sideboard/_version.py
-ADD pavement.py pavement.py
-
-RUN /app/env/bin/paver install_deps
-ADD . /app/
+RUN --mount=type=cache,target=/root/.cache \
+ pip install -r requirements.txt
FROM build as test
-RUN /app/env/bin/pip install mock pytest
-CMD /app/env/bin/python3 -m pytest
+ADD test_requirements.txt test_requirements.txt
+RUN --mount=type=cache,target=/root/.cache \
+ pip install -r test_requirements.txt
+CMD python -m pytest
+ADD . /app/
FROM build as release
-CMD /app/env/bin/python3 /app/sideboard/run_server.py
-EXPOSE 8282
\ No newline at end of file
+CMD python /app/sideboard/run_server.py
+EXPOSE 80
+ADD . /app/
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index 9861bfd..0000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,2 +0,0 @@
-include requirements.txt
-recursive-include sideboard *
\ No newline at end of file
diff --git a/data/__init__.py b/data/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/data/paver/__init__.py b/data/paver/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/data/paver/skeleton/__init__.py b/data/paver/skeleton/__init__.py
deleted file mode 100644
index a91f5a9..0000000
--- a/data/paver/skeleton/__init__.py
+++ /dev/null
@@ -1,89 +0,0 @@
-import os
-import re
-from datetime import datetime
-
-import six
-import jinja2
-import sphinx.quickstart
-
-env = jinja2.Environment()
-
-__here__ = os.path.dirname(os.path.abspath(__file__))
-
-
-def render(to_render, settings):
- if isinstance(to_render, six.string_types):
- return env.from_string(to_render).render(settings)
- else:
- with open(os.path.join(__here__, *to_render)) as template_file:
- return env.from_string(template_file.read()).render(settings)
-
-
-def create_plugin(plugins_dir, plugin, **settings):
- assert ' ' not in plugin, "plugins probably shouldn't have spaces; but either way we aren't specifically handling spaces"
- module = plugin.replace('-', '_')
- plugin = plugin.replace('_', '-')
- settings.update({'plugin': plugin, 'module': module, 'generated_date': datetime.utcnow()})
-
- package_dir = os.path.join(plugins_dir, plugin)
- assert not os.path.exists(package_dir), '{} plugin already exists at {}'.format(plugin, package_dir)
- os.makedirs(os.path.join(package_dir, module, 'tests'))
- for fname, template in TEMPLATES.items():
- fname = render(fname, settings)
- if fname:
- fpath = os.path.join(package_dir, fname)
- try:
- os.makedirs(os.path.dirname(fpath))
- except (OSError, IOError) as e:
- pass
-
- with open(fpath, 'w') as f:
- # our templates often have a lot of {% if %} clauses which lead to a lot of blank lines,
- # so we collapse those such that we never have more than 1 blank line in a row
- f.write(re.sub(r'\n{3,}', '\n\n', render(template, settings).strip() + '\n'))
-
- if settings.get('sphinx', True):
- sphinx_settings = dict(
- path=os.path.join(package_dir, 'docs'),
- sep=False,
- dot='_',
- project=plugin,
- author='{} Team'.format(plugin),
- release='0.1.0',
- version='0.1.0',
- suffix='.rst',
- master='index',
- epub=False,
- ext_autodoc=False,
- ext_doctest=False,
- ext_intersphinx=False,
- ext_todo=False,
- ext_coverage=False,
- ext_pngmath=False,
- ext_mathjax=False,
- ext_ifconfig=False,
- ext_viewcode=False,
- makefile=True,
- batchfile=False
- )
- sphinx.quickstart.generate(sphinx_settings)
-
-TEMPLATES = {
- '{{ module }}/_version.py': ('templates', '_version.py.template'),
- 'requirements.txt': ('templates', 'requirements.txt.template'),
- 'setup.cfg': ('templates', 'setup.cfg.template'),
- 'setup.py': ('templates', 'setup.py.template'),
- 'conftest.py': ('templates', 'conftest.py.template'),
- '{{ module }}/__init__.py': ('templates', '__init__.py.template'),
- '{{ module }}/tests/__init__.py': ('templates', 'tests-__init__.py.template'),
- '{% if sqlalchemy %}{{ module }}/sa.py{% endif %}': ('templates', 'sa.py.template'),
- '{% if service %}{{ module }}/service.py{% endif %}': ('templates', 'service.py.template'),
- '{{ module }}/configspec.ini': ('templates', 'configspec.ini.template'),
- 'development-defaults.ini': ('templates', 'development-defaults.ini'),
- '{% if webapp %}{{ module }}/templates/index.html{% endif %}': ('templates', 'index.html.template'),
- 'MANIFEST.in': ('templates', 'MANIFEST.in.template'),
- '.gitignore': ('templates', '.gitignore.template'),
- 'fabfile.py': ('templates', 'fabfile.py.template'),
- 'package-support/{{ plugin }}.cfg': ('templates', 'plugin_name.cfg.template'),
- '{% if cli %}{{ module }}/cli.py{% endif %}': ('templates', 'cli.py.template'),
-}
diff --git a/data/paver/skeleton/templates/.gitignore.template b/data/paver/skeleton/templates/.gitignore.template
deleted file mode 100644
index edc099b..0000000
--- a/data/paver/skeleton/templates/.gitignore.template
+++ /dev/null
@@ -1,5 +0,0 @@
-.idea
-env
-*.pyc
-*.egg-info
-dist
\ No newline at end of file
diff --git a/data/paver/skeleton/templates/MANIFEST.in.template b/data/paver/skeleton/templates/MANIFEST.in.template
deleted file mode 100644
index 48c6b99..0000000
--- a/data/paver/skeleton/templates/MANIFEST.in.template
+++ /dev/null
@@ -1,2 +0,0 @@
-include requirements.txt
-recursive-include {{ module }} *
\ No newline at end of file
diff --git a/data/paver/skeleton/templates/__init__.py.template b/data/paver/skeleton/templates/__init__.py.template
deleted file mode 100644
index ec09510..0000000
--- a/data/paver/skeleton/templates/__init__.py.template
+++ /dev/null
@@ -1,61 +0,0 @@
-from __future__ import unicode_literals{% if django %}
-import os
-import sys
-{% endif %}
-
-{% if webapp or django %}
-import cherrypy
-{% if django %}
-import django
-from django.core.handlers.wsgi import WSGIHandler
-{% endif %}
-{% endif %}
-
-from sideboard.lib import log, parse_config{% if webapp %}, render_with_templates{% endif %}{% if service or sqlalchemy %}, services{% endif %}
-from {{ module }}._version import __version__
-
-config = parse_config(__file__)
-
-{% if service %}
-from {{ module }} import service
-services.register(service, '{{ module }}')
-
-{% endif %}
-
-{% if sqlalchemy %}
-from {{ module }} import sa
-services.register(sa.Session.crud, '{{ module }}_crud')
-{% endif %}
-
-{% if webapp %}
-@render_with_templates(config['template_dir'])
-class Root(object):
- def index(self):
- log.debug('this data will be used to render a template')
- return {
- 'plugin': '{{ plugin }}',
- 'header': True
- }
-
-cherrypy.tree.mount(Root(), '/{{ module }}')
-{% endif %}
-
-{% if django %}
-# add our Django site to our Python path so we can import it
-sys.path.append(os.path.join(config['root'], '{{ django }}'))
-
-# since we're not using mod_wsgi we'll use the env var approach to setting up Django
-os.environ['DJANGO_SETTINGS_MODULE'] = '{{ django }}.settings'
-django.setup()
-cherrypy.tree.graft(WSGIHandler(), '/{{ django }}')
-
-# expose the static files used by the Django admin interface
-# NOTE: if you have Apache serving these files directly then you can remove this part
-class Static(object):
- admin = cherrypy.tools.staticdir.handler(
- section="/admin",
- dir=os.path.dirname(django.__file__) + '/contrib/admin/static/admin'
- )
-
-cherrypy.tree.mount(Static(), '/{{ django }}/static')
-{% endif %}
diff --git a/data/paver/skeleton/templates/_version.py.template b/data/paver/skeleton/templates/_version.py.template
deleted file mode 100644
index 748c016..0000000
--- a/data/paver/skeleton/templates/_version.py.template
+++ /dev/null
@@ -1,3 +0,0 @@
-from __future__ import unicode_literals
-
-__version__ = '0.1.0'
diff --git a/data/paver/skeleton/templates/cli.py.template b/data/paver/skeleton/templates/cli.py.template
deleted file mode 100644
index 587a3cc..0000000
--- a/data/paver/skeleton/templates/cli.py.template
+++ /dev/null
@@ -1,13 +0,0 @@
-from __future__ import unicode_literals
-
-from sideboard.lib import entry_point, log
-from {{ module }} import __version__
-
-@entry_point
-def {{ module }}(*args):
- """
- Basic cli entry, logs the version of the plugin and then exits. Override after creating the
- plugin
- """
-
- log.info('the version of {} is {}', '{{ module }}', __version__)
diff --git a/data/paver/skeleton/templates/configspec.ini.template b/data/paver/skeleton/templates/configspec.ini.template
deleted file mode 100644
index 72a90cf..0000000
--- a/data/paver/skeleton/templates/configspec.ini.template
+++ /dev/null
@@ -1,2 +0,0 @@
-{% if sqlalchemy %}sqlalchemy.url = string{% endif %}
-{% if webapp %}template_dir = string(default="%(module_root)s/templates"){% endif %}
diff --git a/data/paver/skeleton/templates/conftest.py.template b/data/paver/skeleton/templates/conftest.py.template
deleted file mode 100644
index 98f0d86..0000000
--- a/data/paver/skeleton/templates/conftest.py.template
+++ /dev/null
@@ -1,21 +0,0 @@
-import sideboard
-{% if sqlalchemy %}
-import shutil
-import pytest
-from {{ plugin }} import sa
-from sideboard.tests import patch_session
-
-@pytest.fixture(scope='session', autouse=True)
-def init_db(request):
- patch_session(sa.Session, request) # swap out your database for SQLite
- with sa.Session() as session:
- # You can initialize test data here, which will be inserted once when
- # your tests start (which might be really slow) but then before each
- # test case the database is restored again, which is really fast.
- pass
-
-@pytest.fixture(autouse=True)
-def db(request, init_db):
- shutil.copy('/tmp/{{ plugin }}.db', '/tmp/{{ plugin }}.db.backup')
- request.addfinalizer(lambda: shutil.move('/tmp/{{ plugin }}.db.backup', '/tmp/{{ plugin }}.db'))
-{% endif %}
diff --git a/data/paver/skeleton/templates/development-defaults.ini b/data/paver/skeleton/templates/development-defaults.ini
deleted file mode 100644
index dcd5af9..0000000
--- a/data/paver/skeleton/templates/development-defaults.ini
+++ /dev/null
@@ -1 +0,0 @@
-{% if sqlalchemy %}sqlalchemy.url = "sqlite:////tmp/{{ plugin }}.db"{% endif %}
\ No newline at end of file
diff --git a/data/paver/skeleton/templates/fabfile.py.template b/data/paver/skeleton/templates/fabfile.py.template
deleted file mode 100644
index aaa17f2..0000000
--- a/data/paver/skeleton/templates/fabfile.py.template
+++ /dev/null
@@ -1,44 +0,0 @@
-from __future__ import unicode_literals
-import time
-from os.path import abspath, basename, dirname, join
-
-from sh import pip, fpm, chmod
-
-__here__ = dirname(abspath(__file__))
-package_name = basename(__here__)
-plugin_name = package_name.replace('-', '_')
-
-POSTINSTALL = '/tmp/postinstall.sh'
-
-def _make_postinstall_script():
- with open(POSTINSTALL, 'w') as f:
- f.write('#!/bin/bash\n')
- f.write('set -e\n')
- f.write('source /opt/sideboard/bin/activate && /opt/sideboard/bin/pip install --use-wheel --find-links /opt/sideboard/plugins/{package_name}/wheelhouse/ -r /opt/sideboard/plugins/{package_name}/requirements.txt\n'
- .format(package_name=package_name))
- f.write('chown -R sideboard.sideboard /opt/sideboard\n')
- chmod('755', POSTINSTALL)
-
-def package(package_type, iteration='testing'):
- import sideboard
- plugin = __import__(plugin_name)
- pip('wheel', r='requirements.txt')
- _make_postinstall_script()
- if iteration == 'testing':
- iteration = '0.{}'.format(int(time.time()))
- fpm('-t', package_type,
- '-s', 'dir',
- '--{}-user'.format(package_type), 'sideboard',
- '--{}-group'.format(package_type), 'sideboard',
- '--name', 'sideboard-{}'.format(package_name),
- '--version', plugin.__version__,
- '--license', 'COMPANY-PROPRIETARY',
- '--iteration', iteration,
- '--depends', 'sideboard >= {}'.format(sideboard.__version__),
- '--after-install', POSTINSTALL,
- '--config-files', '/etc/sideboard/plugins.d/{package_name}.cfg'.format(package_name=package_name),
- './package-support/{package_name}.cfg=/etc/sideboard/plugins.d/{package_name}.cfg'.format(package_name=package_name),
- './requirements.txt=/opt/sideboard/plugins/{}/requirements.txt'.format(package_name),
- './wheelhouse=/opt/sideboard/plugins/{}'.format(package_name),{% if django %}
- './{{ django }}=/opt/sideboard/plugins/{}/{{ django }}'.format(package_name),{% endif %}
- './{}=/opt/sideboard/plugins/{}'.format(plugin_name, package_name))
diff --git a/data/paver/skeleton/templates/index.html.template b/data/paver/skeleton/templates/index.html.template
deleted file mode 100644
index 0bbc0f1..0000000
--- a/data/paver/skeleton/templates/index.html.template
+++ /dev/null
@@ -1,9 +0,0 @@
-{% raw %}
-
-
{{ plugin }} skeleton page
-
- {% if header %}
-
Hello {{ plugin }} developer!
- {% endif %}
-
-{% endraw%}
diff --git a/data/paver/skeleton/templates/plugin_name.cfg.template b/data/paver/skeleton/templates/plugin_name.cfg.template
deleted file mode 100644
index e500cfb..0000000
--- a/data/paver/skeleton/templates/plugin_name.cfg.template
+++ /dev/null
@@ -1 +0,0 @@
-{% if sqlalchemy %}sqlalchemy.url = "sqlite:////opt/sideboard/db/{{ plugin }}.db"{% endif %}
diff --git a/data/paver/skeleton/templates/requirements.txt.template b/data/paver/skeleton/templates/requirements.txt.template
deleted file mode 100644
index e69de29..0000000
diff --git a/data/paver/skeleton/templates/sa.py.template b/data/paver/skeleton/templates/sa.py.template
deleted file mode 100644
index e204dae..0000000
--- a/data/paver/skeleton/templates/sa.py.template
+++ /dev/null
@@ -1,18 +0,0 @@
-from __future__ import unicode_literals
-import uuid
-
-import sqlalchemy
-from sqlalchemy.schema import Column
-
-from {{ module }} import config
-from sideboard.lib.sa import declarative_base, SessionManager, UUID
-
-
-@declarative_base
-class Base(object):
- id = Column(UUID(), primary_key=True, default=uuid.uuid4)
-
-# put your table declarations here and have them extend Base
-
-class Session(SessionManager):
- engine = sqlalchemy.create_engine(config['sqlalchemy.url'])
diff --git a/data/paver/skeleton/templates/service.py.template b/data/paver/skeleton/templates/service.py.template
deleted file mode 100644
index d18b3bc..0000000
--- a/data/paver/skeleton/templates/service.py.template
+++ /dev/null
@@ -1,9 +0,0 @@
-from __future__ import unicode_literals
-
-# underscore-prefixed functions are not exposed
-def _greeting_impl(s):
- return 'Hello {}!'.format(s)
-
-# all other methods are public
-def greeting():
- return _greeting_impl('World')
\ No newline at end of file
diff --git a/data/paver/skeleton/templates/setup.cfg.template b/data/paver/skeleton/templates/setup.cfg.template
deleted file mode 100644
index 9d29187..0000000
--- a/data/paver/skeleton/templates/setup.cfg.template
+++ /dev/null
@@ -1,2 +0,0 @@
-[easy_install]
-zip_ok = False
diff --git a/data/paver/skeleton/templates/setup.py.template b/data/paver/skeleton/templates/setup.py.template
deleted file mode 100644
index ed4571c..0000000
--- a/data/paver/skeleton/templates/setup.py.template
+++ /dev/null
@@ -1,28 +0,0 @@
-from __future__ import unicode_literals
-
-import os.path
-from setuptools import setup, find_packages
-
-pkg_name = '{{ plugin }}'
-__here__ = os.path.abspath(os.path.dirname(__file__))
-# Sideboard's implementation of http://stackoverflow.com/a/16084844/171094
-# after this, __version__ should exist in the namespace
-exec(open(os.path.join(__here__, pkg_name.replace('-', '_'), '_version.py')).read())
-req_data = open(os.path.join(__here__, 'requirements.txt')).read()
-requires = [r.strip() for r in req_data.split() if r.strip() != '']
-requires = list(reversed(requires))
-
-if __name__ == '__main__':
- setup(
- name=pkg_name,
- version=__version__,
- description='Sideboard ' + pkg_name + ' plugin',
- license='COMPANY-PROPRIETARY',
- scripts=[],
- setup_requires=['distribute'],
- install_requires=requires,
- packages=find_packages(),
- include_package_data=True,
- package_data={},
- zip_safe=False
- )
diff --git a/data/paver/skeleton/templates/tests-__init__.py.template b/data/paver/skeleton/templates/tests-__init__.py.template
deleted file mode 100644
index 5a75852..0000000
--- a/data/paver/skeleton/templates/tests-__init__.py.template
+++ /dev/null
@@ -1,11 +0,0 @@
-from __future__ import unicode_literals
-import pytest
-
-
-# you can py.test-style test cases, with fixtures
-@pytest.fixture
-def boolean_true():
- return True
-
-def test_something_simple_the_pytest_way(boolean_true):
- assert boolean_true != False
diff --git a/data/sessions/.gitkeep b/data/sessions/.gitkeep
deleted file mode 100644
index e69de29..0000000
diff --git a/db/README.txt b/db/README.txt
deleted file mode 100644
index 2b09269..0000000
--- a/db/README.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Directory for plugins' sqlite files. Sideboard exposes this directory as a config option called 'sqlite_dir'.
-In production, it's located at '/opt/sideboard/db/'.
diff --git a/development-defaults.ini b/development-defaults.ini
deleted file mode 100644
index eb39f37..0000000
--- a/development-defaults.ini
+++ /dev/null
@@ -1,10 +0,0 @@
-debug = False
-ws.auth_required = False
-
-[cherrypy]
-server.socket_host = "0.0.0.0"
-engine.autoreload.on = True
-tools.cpstats.on = False
-
-[loggers]
-root = "DEBUG"
diff --git a/fabfile.py b/fabfile.py
deleted file mode 100644
index 04082a8..0000000
--- a/fabfile.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from __future__ import unicode_literals
-import time
-import os.path
-
-import yaml
-import ship_it
-
-
-__here__ = os.path.abspath(os.path.dirname(__file__))
-MANIFEST_YAML = os.path.join(__here__, 'manifest.yaml')
-MANIFEST_TEMPLATE = MANIFEST_YAML + '.template'
-
-
-def _populate_manifest_and_invoke_fpm(iteration):
- import sideboard
- with open(MANIFEST_TEMPLATE) as f:
- manifest = yaml.load(f)
- manifest[b'version'] = sideboard.__version__
- manifest[b'iteration'] = iteration
-
- with open(MANIFEST_YAML, 'w') as f:
- yaml.dump(manifest, f)
-
- ship_it.fpm(MANIFEST_YAML)
-
-
-def fpm_stable(iteration):
- _populate_manifest_and_invoke_fpm(iteration)
-
-
-def fpm_testing():
- _populate_manifest_and_invoke_fpm(b'0.{}'.format(int(time.time())))
diff --git a/manifest.yaml.template b/manifest.yaml.template
deleted file mode 100644
index 1a735a2..0000000
--- a/manifest.yaml.template
+++ /dev/null
@@ -1,10 +0,0 @@
-description: Sideboard Framework
-name: sideboard
-before_install: package-support/preinstall.sh
-after_install: package-support/postinstall.sh
-config_files:
- /etc/sideboard/sideboard-server.cfg: package-support/sideboard-server.cfg
- /etc/init.d/sideboard: package-support/init.d/sideboard
- /etc/sysconfig/sideboard: package-support/sysconfig/sideboard
-depends:
- - python27
diff --git a/package-support/init.d/sideboard b/package-support/init.d/sideboard
deleted file mode 100755
index 03383bd..0000000
--- a/package-support/init.d/sideboard
+++ /dev/null
@@ -1,181 +0,0 @@
-#!/bin/bash
-
-# chkconfig: - 88 14
-# description: Sideboard exit node manager
-# processname: sideboard
-#
-### BEGIN INIT INFO
-# Provides: sideboard
-# Required-Start: $local_fs $remote_fs $network $named
-# Required-Stop: $local_fs $remote_fs $network
-# Short-Description: start and stop sideboard
-# Description: Sideboard exit node manager
-### END INIT INFO
-
-RETVAL=0
-prog="sideboard"
-
-DESC=sideboard
-VENV=/opt/sideboard
-PYTHON=$VENV/bin/python
-SEP=$VENV/bin/sep
-CHERRYD=$VENV/bin/cherryd
-USER=sideboard
-PID_FILE=/var/run/sideboard/sideboard.pid
-COUNTDOWN=10
-
-SIDEBOARDMODE=server
-if [ -f /etc/sysconfig/sideboard ]; then
- # import $SIDEBOARDMODE from defaults file
-. /etc/sysconfig/sideboard
-fi
-
-OPTIONS="mainloop_daemon --pidfile=$PID_FILE"
-if [ "$SIDEBOARDMODE" == "server" ]; then
- OPTIONS="-d --pidfile=$PID_FILE --import=sideboard.server"
-fi
-
-procgrep() {
- grep "python.*${DESC}" | grep -v grep
-}
-
-filepid() {
- cat $PID_FILE 2>/dev/null
-}
-
-procpid() {
- ps aux | procgrep | awk '{print $2}'
-}
-
-allpids () {
- FP=$(filepid)
- PP=$(procpid)
- if [ "$PP" == "$FP" ]; then
- echo $PP
- else
- FP_IS_CORRECT_PROC=$(cat /proc/$FP/cmdline 2>/dev/null | procgrep)
- if [ -n "$FP_IS_CORRECT_PROC" ]; then
- echo $PP $FP
- else
- echo $PP
- fi
- fi
-}
-
-isrunning() {
- if [ -n "$(filepid)" ]; then
- RETVAL=0
- elif [ -n "$(procpid)" ]; then
- RETVAL=0
- else
- RETVAL=1
- fi
- return $RETVAL
-}
-
-start() {
- if isrunning; then
- echo "Starting $prog: $prog is already running [FAIL]"
- RETVAL=1
- else
- echo -n $"Starting $prog: "
- if [ "$SIDEBOARDMODE" == "server" ]; then
- sudo -u $USER $CHERRYD $OPTIONS
- else
- sudo -u $USER $SEP $OPTIONS
- fi
- RETVAL=$?
- if [ $RETVAL -eq 0 ]; then
- echo '[OK]'
- else
- echo '[FAIL]'
- fi
- fi
- return $RETVAL
-}
-
-exitcountdown() {
- while [ "$COUNTDOWN" -gt 0 ]; do
- if [ -z "$(procpid)" ]; then
- break
- fi
- echo -n .
- sleep 1
- COUNTDOWN=`expr $COUNTDOWN - 1`
- done
-}
-
-stop() {
- if isrunning; then
- echo -n "Shutting down $prog"
- kill $(allpids) 2>/dev/null
- exitcountdown
- if [ -n "$(procpid)" ]; then
- echo -n " $prog failed to exit cleanly, terminating"
- kill -9 $(allpids) 2>/dev/null
- COUNTDOWN=3
- exitcountdown
- fi
- rm -f $PID_FILE
- if isrunning; then
- echo ' [FAIL]'
- else
- echo ' [OK]'
- fi
- RETVAL=0
- else
- echo "$prog is not running"
- RETVAL=1
- fi
- return $RETVAL
-}
-
-restart() {
- stop
- start
-}
-
-condrestart() {
- if isrunning; then
- restart
- fi
-}
-
-status() {
- if isrunning; then
- FP=$(filepid)
- PP=$(procpid)
- if [ "$FP" == "$PP" ]; then
- RETVAL=0
- echo "$prog is running with pid $PP"
- else
- RETVAL=1
- echo "$prog has a pidfile with pid ${FP:-} but a running process with pid ${PP:-}"
- fi
- else
- RETVAL=3
- echo "$prog is not running"
- fi
- return $RETVAL
-}
-
-case "$1" in
- start)
- start
- ;;
- stop)
- stop
- ;;
- status)
- status
- ;;
- restart)
- restart
- ;;
- condrestart|try-restart)
- condrestart
- ;;
- *)
- echo $"Usage: $0 {start|stop|status|restart|condrestart}"
- RETVAL=1
-esac
diff --git a/package-support/postinstall.sh b/package-support/postinstall.sh
deleted file mode 100644
index dad1b91..0000000
--- a/package-support/postinstall.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-
-for dname in /var/run/sideboard /var/tmp/sideboard /var/tmp/sideboard/sessions /opt/sideboard /opt/sideboard/db /opt/sideboard/plugins; do
- mkdir -p $dname
- chmod 750 $dname
- chown sideboard.sideboard $dname
-done
-
-# unlike all of the other directories in the above loop, we want this directory (and also its contents) to be sideboard.root
-chown -R root.sideboard /etc/sideboard
-
-chown root.root /etc/init.d/sideboard
-chown root.root /etc/sysconfig/sideboard
-
-# TODO: instead of doing this in postinstall, we should eventually do ---use-file-permissions
-chmod 700 /etc/init.d/sideboard
-chmod 600 /etc/sysconfig/sideboard
diff --git a/package-support/preinstall.sh b/package-support/preinstall.sh
deleted file mode 100644
index 36d0831..0000000
--- a/package-support/preinstall.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-
-if ! id -u sideboard &>/dev/null; then
- groupadd --force -r sideboard -g 600
- useradd -r --shell /sbin/nologin -uid 600 --gid sideboard sideboard
-fi
diff --git a/package-support/sideboard-server.cfg b/package-support/sideboard-server.cfg
deleted file mode 100644
index a1cf43f..0000000
--- a/package-support/sideboard-server.cfg
+++ /dev/null
@@ -1,22 +0,0 @@
-plugins_dir = "/opt/sideboard/plugins"
-
-[cherrypy]
-server.socket_host = "127.0.0.1"
-tools.sessions.storage_path = "/var/tmp/sideboard/sessions"
-
-[loggers]
-root = "INFO"
-
-[handlers]
-[[syslog]]
-class = "logging.handlers.SysLogHandler"
-address = "/dev/log"
-formatter = syslog
-
-[formatters]
-[[syslog]]
-format = "$$(levelname)-5.5s $$(threadName)s [$$(name)s] $$(message)s"
-
-[[default]]
-format = "$$(asctime)s,$$(msecs)03d $$(levelname)-5.5s $$(threadName)s [$$(name)s] $$(message)s"
-datefmt = "$$m-$$d $$H:$$M:$$S"
diff --git a/package-support/sysconfig/sideboard b/package-support/sysconfig/sideboard
deleted file mode 100644
index 6b81aa9..0000000
--- a/package-support/sysconfig/sideboard
+++ /dev/null
@@ -1,6 +0,0 @@
-# defaults for sideboard package
-# SIDEBOARDMODE=server
-SIDEBOARDMODE=daemon
-
-# DESC=sideboard
-DESC=sideboard-daemon
diff --git a/pavement.py b/pavement.py
deleted file mode 100644
index 404242b..0000000
--- a/pavement.py
+++ /dev/null
@@ -1,193 +0,0 @@
-from __future__ import unicode_literals, print_function
-import os
-import sys
-import glob
-from itertools import chain
-from os.path import abspath, dirname, exists, join
-
-import paver.virtual as virtual
-from paver.easy import * # paver docs pretty consistently want you to do this
-from paver.path import path # primarily here to support the rmtree method of a path object
-
-__here__ = path(abspath(dirname(__file__)))
-PLUGINS_DIR = __here__ / path('plugins')
-SIDEBOARD_DIR = __here__ / path('sideboard')
-
-
-def bootstrap_venv(intended_venv, bootstrap_name=None):
- # bootstrap wants options available in options.virtualenv which is a Bunch
- if exists(intended_venv):
- intended_venv.rmtree()
-
- venv = getattr(options, 'virtualenv', Bunch())
-
- with open(path(dirname(intended_venv)) / path('requirements.txt')) as reqs:
- # we expect this to be reversed in setup.py
- venv.packages_to_install = [line.strip() for line in reqs.readlines()[::-1] if line.strip()]
-
- venv.dest_dir = intended_venv
- if bootstrap_name:
- venv.script_name = '{}-bootstrap.py'.format(bootstrap_name)
-
- options.virtualenv = venv
- virtual.bootstrap()
- if sys.executable:
- # if we can figure out the python associated with this paver, execute the bootstrap script
- # and we can expect the virtual env will then exist
- sh('{python_path} "{script_name}"'.format(python_path=sys.executable,
- script_name=venv.script_name))
-
-
-def guess_plugin_module_name(containing_folder):
- """
- given a containing folder, guess what the plugin name should be
-
- :param containing_folder: the folder that possibly contains a plugin
- :type containing_folder: unicode
- :return:
- """
- # TODO: this only works as long as insist that the plugin dir be the module name
- return os.path.split(containing_folder)[-1].replace('-', '_')
-
-
-def collect_plugin_dirs(module=False):
- """
- :param module: if True, return the module within a plugin directory, else (default) just return
- the plugin directory
- :return: the plugin folders in a form that can be iterated over
- :rtype: collections.Iterator
- """
- for potential_folder in glob.glob(PLUGINS_DIR / path('*')):
- if all(exists(join(potential_folder, req_file)) for req_file in ('setup.py', 'requirements.txt')):
- if module:
- yield join(potential_folder, guess_plugin_module_name(potential_folder))
- else:
- yield potential_folder
-
-
-@task
-def make_venv():
- """
- make a virtualenv for the sideboard project
- """
- bootstrap_venv(__here__ / path('env'), 'sideboard')
- develop_sideboard()
-
-
-def install_pip_requirements_in_dir(dir_of_requirements_txt):
- path_to_pip = __here__ / path('env/bin/pip')
-
- print("---- installing dependencies in {} ----"
- .format(dir_of_requirements_txt))
-
- sh('{pip} install -e {dir_of_requirements_txt}'
- .format(
- pip=path_to_pip,
- dir_of_requirements_txt=dir_of_requirements_txt))
-
-
-def run_setup_py(path):
- venv_python = str(__here__ / 'env' / 'bin' / 'python')
- sh('cd {path} && {python_path} {setup_path} develop'
- .format(
- path=path,
- python_path=venv_python if exists(venv_python) else sys.executable,
- setup_path=join(path, 'setup.py')))
-
-
-def develop_sideboard():
- run_setup_py(__here__)
-
-
-@task
-def pull_plugins():
- """
- invoke git pull from each plug-in directory, your global git either needs to allow this to \
-happen auth-free, or you need to enter your credentials each time
- """
- for plugin_dir in collect_plugin_dirs():
- sh('cd "{}";git pull'.format(plugin_dir))
-
-
-@task
-def assert_all_projects_correctly_define_a_version():
- """
- error if there are plugins where we can't find a version defined
- """
- all_files_with_bad_versions = []
- # FIXME: should we try to execfile? that's what setup.py is going to do anyway
- cmd = (r'grep -xP "__version__\s*=\s*[\'\"][0-9]+\.[0-9]+(\.[0-9]+)?[\'\+]" {0}/_version.py')
- for test_dir in chain(['sideboard'], collect_plugin_dirs(module=True)):
- try:
- sh(cmd.format(test_dir))
- except BuildFailure:
- all_files_with_bad_versions.append(test_dir)
-
- if all_files_with_bad_versions:
- print('the following directories do not include a _version.py file with __version__ '
- 'specified:')
- print('\n'.join(all_files_with_bad_versions))
- print('Your plugin should be in agreement with this stack overflow post:')
- print('http://stackoverflow.com/questions/458550/'
- 'standard-way-to-embed-version-into-python-package/7071358#7071358')
-
- raise BuildFailure("there were projects that didn't include correctly specify __version__")
-
-
-@task
-@needs(['assert_all_files_import_unicode_literals',
- 'assert_all_projects_correctly_define_a_version'])
-def run_all_assertions():
- """
- run all the assertion tasks that sideboard supports
- """
-
-
-@task
-@cmdopts([
- ('name=', 'n', 'name of the plugin to create'),
- ('drop', 'd', 'delete existing plugin if present'),
- ('no_webapp', 'w', 'do not expose webpages in the plugin'),
- ('no_sqlalchemy', 'a', 'do not use SQLAlchemy in the plugin'),
- ('no_service', 'r', 'do not expose a service in the plugin'),
- ('no_sphinx', 's', 'do not generate Sphinx docs'),
- ('django=', 'j', 'create a Django project alongside the plugin with this name'),
- ('cli', 'c', 'make this a cli application; implies -w/-r')
-])
-def create_plugin(options):
- """create a plugin skeleton to start a new project"""
-
- plugin_name = options.create_plugin.name
-
- if getattr(options.create_plugin, 'drop', False) and (PLUGINS_DIR / path(plugin_name.replace('_', '-'))).exists():
- # rmtree fails if the dir doesn't exist apparently
- (PLUGINS_DIR / path(plugin_name.replace('_', '-'))).rmtree()
-
- kwargs = {}
- for opt in ['webapp', 'sqlalchemy', 'service', 'sphinx']:
- kwargs[opt] = not getattr(options.create_plugin, 'no_' + opt, False)
- kwargs['cli'] = getattr(options.create_plugin, 'cli', False)
- kwargs['django'] = getattr(options.create_plugin, 'django', None)
- if kwargs['cli']:
- kwargs['webapp'] = False
- kwargs['service'] = False
-
- from data.paver import skeleton
- skeleton.create_plugin(PLUGINS_DIR, plugin_name, **kwargs)
- print('{} successfully created'.format(options.create_plugin.name))
-
-
-@task
-def install_deps():
- install_pip_requirements_in_dir(__here__)
- for pdir in collect_plugin_dirs():
- install_pip_requirements_in_dir(pdir)
-
-
-@task
-def clean():
- """
- clean all pyc and __pycache__ files
- """
- sh("find . -name '*.pyc' | xargs rm -f")
- sh("find . -name __pycache__ | xargs rm -fr")
diff --git a/requirements.txt b/requirements.txt
index b701108..0fbb1c5 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,15 +1,5 @@
cherrypy==18.9.0
configobj==5.0.8
-Jinja2==3.1.3
-paver==1.3.4
-pip==24.0
psutil==5.9.8
-python-prctl==1.8.1; 'linux' in sys_platform
+pyyaml==6.0.1
redis==5.0.3
-requests==2.31.0
-rpctools==0.3.1
-sh==2.0.6
-six==1.16.0
-SQLAlchemy==1.4.52
-wheel==0.43.0
-ws4py==0.5.1
diff --git a/sideboard/__init__.py b/sideboard/__init__.py
index 825ec9a..ef3ee86 100644
--- a/sideboard/__init__.py
+++ b/sideboard/__init__.py
@@ -2,16 +2,12 @@
import os
import importlib
-import six
import cherrypy
from sideboard._version import __version__
import sideboard.server
from sideboard.internal.imports import _discover_plugins
-from sideboard.internal.logging import _configure_logging
-import sideboard.run_mainloop
if 'SIDEBOARD_MODULE_TESTING' not in os.environ:
_discover_plugins()
- _configure_logging()
diff --git a/sideboard/_version.py b/sideboard/_version.py
index 5b59135..946708c 100644
--- a/sideboard/_version.py
+++ b/sideboard/_version.py
@@ -1,3 +1,3 @@
from __future__ import unicode_literals
-__version__ = '1.0.14'
+__version__ = '2024.04'
diff --git a/sideboard/config.py b/sideboard/config.py
index d6a27b2..5d47ba4 100755
--- a/sideboard/config.py
+++ b/sideboard/config.py
@@ -1,9 +1,10 @@
from __future__ import unicode_literals
+import pathlib
+import json
+import yaml
import os
-import re
from os import unlink
-from collections.abc import Sized, Iterable, Mapping
from copy import deepcopy
from tempfile import NamedTemporaryFile
@@ -11,82 +12,6 @@
from validate import Validator
-def uniquify(xs):
- """
- Returns an order-preserved copy of `xs` with duplicate items removed.
-
- >>> uniquify(['a', 'z', 'a', 'b', 'a', 'y', 'a', 'c', 'a', 'x'])
- ['a', 'z', 'b', 'y', 'c', 'x']
-
- """
- is_listy = isinstance(xs, Sized) \
- and isinstance(xs, Iterable) \
- and not isinstance(xs, (Mapping, type(b''), type('')))
- assert is_listy, 'uniquify requires a listy argument'
-
- seen = set()
- return [x for x in xs if x not in seen and not seen.add(x)]
-
-
-class ConfigurationError(RuntimeError):
- pass
-
-
-def get_config_overrides():
- """
- Returns a list of config file paths used to override the default config.
-
- The SIDEBOARD_CONFIG_OVERRIDES environment variable may be set to a
- semicolon separated list of absolute and/or relative paths. If the
- SIDEBOARD_CONFIG_OVERRIDES is set, this function returns a list of its
- contents, split on semicolons::
-
- # SIDEBOARD_CONFIG_OVERRIDES='/absolute/config.ini;relative/config.ini'
- return ['/absolute/config.ini', 'relative/config.ini']
-
- If any of the paths listed in SIDEBOARD_CONFIG_OVERRIDES ends with the
- suffix "-defaults." then a similarly named path
- "." will also be included::
-
- # SIDEBOARD_CONFIG_OVERRIDES='test-defaults.ini'
- return ['test-defaults.ini', 'test.ini']
-
- If the SIDEBOARD_CONFIG_OVERRIDES environment variable is NOT set, this
- function returns a list with two relative paths::
-
- return ['development-defaults.ini', 'development.ini']
- """
- config_overrides = os.environ.get(
- 'SIDEBOARD_CONFIG_OVERRIDES',
- 'development-defaults.ini')
-
- defaults_re = re.compile(r'(.+)-defaults(\.\w+)$')
- config_paths = []
- for config_path in uniquify([s.strip() for s in config_overrides.split(';')]):
- config_paths.append(config_path)
- m = defaults_re.match(config_path)
- if m:
- config_paths.append(m.group(1) + m.group(2))
-
- return config_paths
-
-
-def get_config_root():
- """
- Returns the config root for the system, defaults to '/etc/sideboard'.
-
- If the SIDEBOARD_CONFIG_ROOT environment variable is set, its contents
- will be returned instead.
- """
- default_root = '/etc/sideboard'
- config_root = os.environ.get('SIDEBOARD_CONFIG_ROOT', default_root)
- if config_root != default_root and not os.path.isdir(config_root):
- raise AssertionError('cannot find {!r} directory'.format(config_root))
- elif os.path.isdir(config_root) and not os.access(config_root, os.R_OK):
- raise AssertionError('{!r} directory is not readable'.format(config_root))
- return config_root
-
-
def get_module_and_root_dirs(requesting_file_path, is_plugin):
"""
Returns the "module_root" and "root" directories for the given file path.
@@ -114,63 +39,28 @@ def get_module_and_root_dirs(requesting_file_path, is_plugin):
Sideboard itself is making the request.
Returns:
- tuple(str): The "module_root" and "root" directories for the
+ tuple(Path, Path, str): The "module_root" and "root" directories, and plugin name for the
given module.
"""
- module_dir = os.path.dirname(os.path.abspath(requesting_file_path))
+ module_dir = pathlib.Path(requesting_file_path).parents[0]
if is_plugin:
from sideboard.lib import config
- plugin_name = os.path.basename(module_dir)
- root_dir = os.path.join(config['plugins_dir'], plugin_name)
- if '_' in plugin_name and not os.path.exists(root_dir):
- root_dir = os.path.join(config['plugins_dir'], plugin_name.replace('_', '-'))
+ plugin_name = module_dir.name
+ root_dir = pathlib.Path(config['plugins_dir']) / plugin_name
+ if '_' in plugin_name and not root_dir.exists():
+ root_dir = pathlib.Path(config['plugins_dir']) / plugin_name.replace('_', '-')
else:
- root_dir = os.path.realpath(os.path.join(module_dir, '..'))
- return module_dir, root_dir
+ root_dir = module_dir.parents[0]
+ plugin_name = "sideboard"
+ return module_dir, root_dir, plugin_name
def get_config_files(requesting_file_path, is_plugin):
"""
Returns a list of absolute paths to config files for the given file path.
-
- When the returned config files are parsed by ConfigObj each subsequent
- file will override values in earlier files.
-
- If `is_plugin` is `True` the first of the returned files is:
-
- * /etc/sideboard/plugins.d/.cfg, which is the config file we
- expect in production
-
-
- If `is_plugin` is `False` the first two returned files are:
-
- * /etc/sideboard/sideboard-core.cfg, which is the sideboard core config
- file we expect in production
-
- * /etc/sideboard/sideboard-server.cfg, which is the sideboard server config
- file we expect in production
-
-
- The rest of the files returned are as follows, though we wouldn't
- necessarily expect these to exist on a production install (these are
- controlled by SIDEBOARD_CONFIG_OVERRIDES):
-
- * /development-defaults.ini, which can be checked into source
- control and include whatever we want to be present in a development
- environment.
-
- * /development.ini, which shouldn't be checked into source
- control, allowing a developer to include local settings not shared with
- others.
-
-
- When developing on a machine with an installed production config file, we
- might want to ignore the "real" config file and limit ourselves to only the
- development files. This behavior is turned on by setting the environment
- variable SIDEBOARD_MODULE_TESTING to any value. (This environment variable
- is also used elsewhere to turn off automatically loading all plugins in
- order to facilitate testing modules which rely on Sideboard but which are
- not themselves Sideboard plugins.)
+
+ If the file is in a plugin we check the environment variable
+ _CONFIG_FILES and return any paths from there, seperated by ;
Args:
requesting_file_path (str): The Python __file__ of the module
@@ -182,24 +72,56 @@ def get_config_files(requesting_file_path, is_plugin):
Returns:
list(str): List of absolute paths to config files for the given module.
"""
- config_root = get_config_root()
- module_dir, root_dir = get_module_and_root_dirs(requesting_file_path, is_plugin)
- module_name = os.path.basename(module_dir)
-
- if 'SIDEBOARD_MODULE_TESTING' in os.environ:
- base_configs = []
- elif is_plugin:
- base_configs = [os.path.join(config_root, 'plugins.d', '{}.cfg'.format(module_name.replace('_', '-')))]
- else:
- assert module_name == 'sideboard', 'Unexpected module name {!r} requesting "non-plugin" configuration files'.format(module_name)
- base_configs = [
- os.path.join(config_root, 'sideboard-core.cfg'),
- os.path.join(config_root, 'sideboard-server.cfg')
- ]
-
- config_overrides = [os.path.join(root_dir, config_path) for config_path in get_config_overrides()]
- return base_configs + config_overrides
-
+ module_dir, root_dir, plugin_name = get_module_and_root_dirs(requesting_file_path, is_plugin)
+ config_files_str = os.environ.get(f"{plugin_name.upper()}_CONFIG_FILES", "")
+ absolute_config_files = []
+ if config_files_str:
+ config_files = [pathlib.Path(x) for x in config_files_str.split(";")]
+ for path in config_files:
+ if path.is_absolute():
+ if not path.exists():
+ raise ValueError(f"Config file {path} specified in {plugin_name.upper()}_CONFIG_FILES does not exist!")
+ absolute_config_files.append(path)
+ else:
+ if not (root_dir / path).exists():
+ raise ValueError(f"Config file {root_dir / path} specified in {plugin_name.upper()}_CONFIG_FILES does not exist!")
+ absolute_config_files.append(root_dir / path)
+ return absolute_config_files
+
+def normalize_name(name):
+ return name.replace(".", "_")
+
+def load_section_from_environment(path, section):
+ """
+ Looks for configuration in environment variables.
+
+ Args:
+ path (str): The prefix of the current config section. For example,
+ sideboard.ini:
+ [cherrypy]
+ server.thread_pool: 10
+ would translate to sideboard_cherrypy_server.thread_pool
+ section (configobj.ConfigObj): The section of the configspec to search
+ for the current path in.
+ """
+ config = {}
+ for setting in section:
+ if setting == "__many__":
+ prefix = f"{path}_"
+ for envvar in os.environ:
+ if envvar.startswith(prefix) and not envvar.split(prefix, 1)[1] in [normalize_name(x) for x in section]:
+ config[envvar.split(prefix, 1)[1]] = os.environ[envvar]
+ else:
+ if isinstance(section[setting], configobj.Section):
+ child_path = f"{path}_{setting}"
+ child = load_section_from_environment(child_path, section[setting])
+ if child:
+ config[setting] = child
+ else:
+ name = normalize_name(f"{path}_{setting}")
+ if name in os.environ:
+ config[setting] = yaml.safe_load(os.environ.get(normalize_name(name)))
+ return config
def parse_config(requesting_file_path, is_plugin=True):
"""
@@ -225,10 +147,10 @@ def parse_config(requesting_file_path, is_plugin=True):
Returns:
ConfigObj: The resulting configuration object.
"""
- module_dir, root_dir = get_module_and_root_dirs(requesting_file_path, is_plugin)
+ module_dir, root_dir, plugin_name = get_module_and_root_dirs(requesting_file_path, is_plugin)
- specfile = os.path.join(module_dir, 'configspec.ini')
- spec = configobj.ConfigObj(specfile, interpolation=False, list_values=False, encoding='utf-8', _inspec=True)
+ specfile = module_dir / 'configspec.ini'
+ spec = configobj.ConfigObj(str(specfile), interpolation=False, list_values=False, encoding='utf-8', _inspec=True)
# to allow more/better interpolations
root_conf = ['root = "{}"\n'.format(root_dir), 'module_root = "{}"\n'.format(module_dir)]
@@ -236,7 +158,17 @@ def parse_config(requesting_file_path, is_plugin=True):
for config_path in get_config_files(requesting_file_path, is_plugin):
# this gracefully handles nonexistent files
- temp_config.merge(configobj.ConfigObj(config_path, encoding='utf-8', interpolation=False))
+ file_config = configobj.ConfigObj(str(config_path), encoding='utf-8', interpolation=False)
+ if os.environ.get("LOG_CONFIG", "false").lower() == "true":
+ print(f"File config for {plugin_name} from {config_path}")
+ print(json.dumps(file_config, indent=2, sort_keys=True))
+ temp_config.merge(file_config)
+
+ environment_config = load_section_from_environment(plugin_name, spec)
+ if os.environ.get("LOG_CONFIG", "false").lower() == "true":
+ print(f"Environment config for {plugin_name}")
+ print(json.dumps(environment_config, indent=2, sort_keys=True))
+ temp_config.merge(configobj.ConfigObj(environment_config, encoding='utf-8', interpolation=False))
# combining the merge files to one file helps configspecs with interpolation
with NamedTemporaryFile(delete=False) as config_outfile:
@@ -249,16 +181,12 @@ def parse_config(requesting_file_path, is_plugin=True):
unlink(temp_name)
if validation is not True:
- raise ConfigurationError('configuration validation error(s) (): {!r}'.format(
+ raise RuntimeError('configuration validation error(s) (): {!r}'.format(
configobj.flatten_errors(config, validation))
)
if is_plugin:
sideboard_config = globals()['config']
- config['plugins'] = deepcopy(sideboard_config['plugins'])
- if 'rpc_services' in config:
- from sideboard.lib._services import _register_rpc_services
- _register_rpc_services(config['rpc_services'])
if 'default_url' in config:
priority = config.get('default_url_priority', 0)
diff --git a/sideboard/configspec.ini b/sideboard/configspec.ini
index 87b9a6d..b357730 100644
--- a/sideboard/configspec.ini
+++ b/sideboard/configspec.ini
@@ -28,46 +28,10 @@ client_key = string(default="")
client_cert = string(default="")
ssl_version = string(default="PROTOCOL_TLSv1")
-ws.thread_pool = integer(default=25)
-ws.call_timeout = integer(default=10) # seconds
-ws.poll_interval = integer(default=300) # seconds
-ws.reconnect_interval = integer(default=60) # seconds
-
-# Sideboard exposes a websocket at /ws and by default requires a logged-in
-# user to work. This setting can turn off that authentication check, which is
-# useful for development or for applications which require no authentication.
-ws.auth_required = boolean(default=True)
-
-# When performing authentication for the /ws websocket endpoint, this setting
-# determines which session field must be set for the request to be considered
-# "logged in". If your application sets a session field other than "username"
-# when a user logs in, you should change this setting to the name of that field.
-ws.auth_field = string(default="username")
-
-# When an authenticated websocket is established on the /ws endpoint, we copy
-# this configurable list of session fields into the websocket and make them
-# available as threadlocal fields on every websocket RPC requests. By default
-# we only do this with the username of the logged-in user, but applications
-# which store other data for logged in users can add those fields to this list.
-ws.session_fields = string_list(default=list("username"))
-
-# When a frontend server permforms authentication before proxying a request,
-# the username is often placed in an HTTP header. We copy this configureable
-# list of HTTP headers and make them available on every websocket RPC request
-# as fields inside threadlocal['headers']
-ws.header_fields = string_list(default=list())
-
# If the "debug" option is set, the default login form will allow people to log
# in with any username using this password.
debug_password = string(default="testpassword")
-# Sideboard has numerous background threads which wait on sideboard.lib.stopped
-# to either sleep or bail immediately on shutdown. Since these threads wait in
-# a loop, we don't want to set an interval too small or we'll eat a lot of CPU
-# while doing absolutely nothing. A hard-coded value of 1 second would probably
-# be fine for all workloads, but we've made it configurable just in case.
-thread_wait_interval = float(default=1)
-
# Plugins can register different authenticators, since different applications may
# have different ideas about what it means to be "logged in". The default
# authenticator is mainly used for the /ws and /json RPC endpoints, so this
@@ -76,10 +40,6 @@ thread_wait_interval = float(default=1)
default_authenticator = string(default="default")
-[plugins]
-sqlite_dir = string(default="%(root)s/db")
-
-
[cherrypy]
checker.check_skipped_app_config = boolean(default=False)
@@ -117,27 +77,30 @@ profiling.aggregate = boolean(default=False)
profiling.strip_dirs = boolean(default=False)
server.socket_host = string(default="127.0.0.1")
-server.socket_port = integer(default=8282)
-
-tools.reset_threadlocal.on = boolean(default=True)
+server.socket_port = integer(default=80)
+server.thread_pool = integer(default=10)
tools.sessions.on = boolean(default=True)
tools.sessions.path = string(default="/")
-tools.sessions.timeout = integer(default=30)
+tools.sessions.timeout = integer(default=60)
tools.sessions.storage_type = string(default="file")
tools.sessions.storage_path = string(default="%(root)s/data/sessions")
tools.sessions.secure = boolean(default=False)
+tools.sessions.prefix = string(default=sideboard)
+
+# RedisSession specific values
+tools.sessions.host = string(default="127.0.0.1")
+tools.sessions.port = integer(default=6379)
+tools.sessions.db = integer(default=0)
+tools.sessions.password = string(default=None)
+tools.sessions.tls_skip_verify = boolean(default=False)
+tools.sessions.is_sentinel = boolean(default=False)
+tools.sessions.ssl = boolean(default=False)
+tools.sessions.user = string(default="")
# Built-in CherryPy web server stats page
tools.cpstats.on = boolean(default=False)
-[rpc_services]
-___many___ = string
-
-[[__many__]]
-jsonrpc_only = boolean(default=False)
-
-
[loggers]
root = option("TRACE", "DEBUG", "INFO", "WARN", "WARNING", "ERROR", "CRITICAL", default="DEBUG")
cherrypy.error = option("TRACE", "DEBUG", "INFO", "WARNING", "WARN", "ERROR", "CRITICAL", default="DEBUG")
@@ -145,6 +108,10 @@ cherrypy.access = option("TRACE", "DEBUG", "INFO", "WARNING", "WARN", "ERROR", "
__many__ = option("TRACE", "DEBUG", "INFO", "WARN", "WARNING", "ERROR", "CRITICAL", default="INFO")
[handlers]
+[[stdout]]
+class = string(default="logging.StreamHandler")
+stream = string(default="ext://sys.stdout")
+formatter = string(default="indent_multiline")
[[__many__]]
formatter = string(default="default")
___many___ = string()
diff --git a/sideboard/internal/autolog.py b/sideboard/internal/autolog.py
deleted file mode 100755
index 7508f14..0000000
--- a/sideboard/internal/autolog.py
+++ /dev/null
@@ -1,243 +0,0 @@
-from __future__ import absolute_import, unicode_literals
-import sys
-import logging
-import inspect
-
-
-class EagerFormattingAdapter(logging.LoggerAdapter):
- """
- A `logging.LoggerAdapter` that add unterpolation support but performs the
- evaluation immediately if the appropriate loglevel is set.
- """
-
- def __init__(self, logger, extra=None):
- """
- Initialize the adapter with a logger and a dict-like object which
- provides contextual information. This constructor signature allows
- easy stacking of LoggerAdapters, if so desired.
-
- You can effectively pass keyword arguments as shown in the
- following example:
-
- adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2"))
- """
- self.logger = logger
- self.extra = extra
-
- def _eagerFormat(self, msg, level, args):
- """
- Eagerly apply log formatting if the appropriate level is enabled.
-
- Otherwise we just drop the log message (and return a string indicating
- that it was suppreseed).
- """
- if self.isEnabledFor(level):
- # Do the string formatting immediately.
- if args:
- return self._getUnterpolatedMessage(msg, args)
- else:
- return msg
- else:
- # Otherwise, just drop the message completely to avoid anything going
- # wrong in the future. This text shoudl clue one in to what's going
- # on in the bizarre edge case where this ever does show up.
- return '(log message suppressed due to insufficient log level)'
-
- def _getUnterpolatedMessage(self, msg, args):
- """
- Returns the formatted string, will first attempt str.format and will
- fallback to msg % args as it was originally.
-
- This is lifted almost wholesale from logging_unterpolation.
- """
- original_msg = msg
- if isinstance(args, dict):
- # special case handing for unpatched logging supporting
- # statements like:
- # logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
- args = (args,)
-
- try:
- msg = msg.format(*args)
- except UnicodeEncodeError:
- # This is most likely due to formatting a non-ascii string argument
- # into a bytestring, which the %-operator automatically handles
- # by casting the left side (the "msg" variable) in this context
- # to unicode. So we'll do that here
-
- if sys.version_info >= (3, 0,):
- # this is most likely unnecessary on python 3, but it's here
- # for completeness, in the case of someone manually creating
- # a bytestring
- unicode_type = str
- else:
- unicode_type = unicode
-
- # handle the attempt to print utf-8 encoded data, similar to
- # %-interpolation's handling of unicode formatting non-ascii
- # strings
- msg = unicode_type(msg).format(*args)
-
- except ValueError:
- # From PEP-3101, value errors are of the type raised by the format
- # method itself, so see if we should fall back to original
- # formatting since there was an issue
- if '%' in msg:
- msg = msg % args
- else:
- # we should NOT fall back, since there's no possible string
- # interpolation happening and we want a meaningful error
- # message
- raise
-
- if msg == original_msg and '%' in msg:
- # there must have been no string formatting methods
- # used, given the presence of args without a change in the msg
- # fall back to original formatting, including the special case
- # for one passed dictionary argument
- msg = msg % args
-
- return msg
-
- def debug(self, msg, *args, **kwargs):
- """
- Delegate a debug call to the underlying logger, after adding
- contextual information from this adapter instance.
- """
- self.log(logging.DEBUG, msg, *args, **kwargs)
-
- def info(self, msg, *args, **kwargs):
- """
- Delegate an info call to the underlying logger, after adding
- contextual information from this adapter instance.
- """
- self.log(logging.INFO, msg, *args, **kwargs)
-
- def warning(self, msg, *args, **kwargs):
- """
- Delegate a warning call to the underlying logger, after adding
- contextual information from this adapter instance.
- """
- self.log(logging.WARNING, msg, *args, **kwargs)
-
- def warn(self, msg, *args, **kwargs):
- """
- Delegate a warning call to the underlying logger, after adding
- contextual information from this adapter instance.
- """
- self.log(logging.WARNING, msg, *args, **kwargs)
-
- def error(self, msg, *args, **kwargs):
- """
- Delegate an error call to the underlying logger, after adding
- contextual information from this adapter instance.
- """
- self.log(logging.ERROR, msg, *args, **kwargs)
-
- def exception(self, msg, *args, **kwargs):
- """
- Delegate an exception call to the underlying logger, after adding
- contextual information from this adapter instance.
- """
- kwargs["exc_info"] = 1
- self.log(logging.ERROR, msg, *args, **kwargs)
-
- def critical(self, msg, *args, **kwargs):
- """
- Delegate a critical call to the underlying logger, after adding
- contextual information from this adapter instance.
- """
- self.log(logging.CRITICAL, msg, *args, **kwargs)
-
- def log(self, level, msg, *args, **kwargs):
- """
- Delegate a log call to the underlying logger, after adding
- contextual information from this adapter instance.
- """
- msg, kwargs = self.process(msg, kwargs)
- # We explicitly do not pass the args into the log method here, since
- # they should be "used up" by the eagerFormat method.
- self.logger.log(level, self._eagerFormat(msg, level, args), **kwargs)
-
-
-class AutoLogger(object):
- """
- A logger proxy object, with all of the methods and attributes of C{Logger}.
-
- When an attribute (e.g., "debug") is requested, inspects the stack for the
- calling module's name, and passes that name to C{logging.getLogger}.
-
- What this means is that you can instantiate an C{AutoLogger} anywhere, and
- when you call it, the log entry shows the module where you called it, not
- where it was created.
-
- C{AutoLogger} also inspects the local variables where it is called, looking
- for C{self}. If C{self} exists, its classname is added to the module name.
- """
-
- def __init__(self, adapter_class=None, adapter_args=None,
- adapter_kwargs=None):
- if adapter_args is None:
- adapter_args = []
- if adapter_kwargs is None:
- adapter_kwargs = {}
-
- self.adapter_class = adapter_class
- self.adapter_args = adapter_args
- self.adapter_kwargs = adapter_kwargs
-
- def __getattr__(self, name):
- f_locals = inspect.currentframe().f_back.f_locals
- if 'self' in f_locals and f_locals['self'] is not None:
- other = f_locals['self']
- caller_name = '%s.%s' % (other.__class__.__module__, other.__class__.__name__)
- else:
- caller_name = inspect.currentframe().f_back.f_globals['__name__']
- logger = logging.getLogger(caller_name)
-
- if self.adapter_class:
- logger = self.adapter_class(logger, *self.adapter_args,
- **self.adapter_kwargs)
-
- return getattr(logger, name)
-
-
-log = AutoLogger()
-
-
-def log_exceptions(fn):
- """ A decorator designed to wrap a function and log any exception that method produces.
-
- The exception will still be raised after being logged.
-
- Also logs (at the trace level) the arguments to every call.
-
- Currently this is only designed for module-level functions. Not sure what happens if a method is decorated
- with this (since logger is resolved from module name).
- """
-
- def wrapper(*args, **kwargs):
- try:
- a = args or []
- a = [str(x)[:255] for x in a]
- kw = kwargs or {}
- kw = {str(k)[:255]: str(v)[:255] for k, v in kw.items()}
- log.debug('Calling %s.%s %r %r' % (fn.__module__, fn.__name__, a, kw))
- return fn(*args, **kwargs)
- except Exception as e:
- log.error('Error calling function %s: %s' % (fn.__name__, e))
- log.exception(e)
- raise
-
- wrapper.__name__ = fn.__name__
- return wrapper
-
-
-TRACE_LEVEL = 5
-logging.addLevelName(TRACE_LEVEL, "TRACE")
-
-
-def trace(self, message, *args, **kws):
- # Yes, logger takes its '*args' as 'args'.
- self._log(TRACE_LEVEL, message, args, **kws)
-logging.Logger.trace = trace
diff --git a/sideboard/internal/connection_checker.py b/sideboard/internal/connection_checker.py
deleted file mode 100644
index eeb7673..0000000
--- a/sideboard/internal/connection_checker.py
+++ /dev/null
@@ -1,76 +0,0 @@
-from __future__ import unicode_literals, print_function
-import ssl
-import socket
-from contextlib import closing
-
-import six
-from six.moves.urllib_parse import urlparse
-
-from sideboard.lib import services, entry_point
-
-
-def _check(url, **ssl_params):
- status = ['checking {}'.format(url)]
-
- try:
- parsed = urlparse(url)
- except Exception as e:
- return status + ['failed to parse url: {!s}'.format(e)]
- else:
- host = parsed.hostname
- port = parsed.port or (443 if parsed.scheme in ['https', 'wss'] else 80)
- status.append('using hostname {} and port {}'.format(host, port))
-
- try:
- ip = socket.gethostbyname(host)
- except Exception as e:
- return status + ['failed to resolve host with DNS: {!s}'.format(e)]
- else:
- status.append('successfully resolved host {} to {}'.format(host, ip))
-
- sock = None
- try:
- sock = socket.create_connection((host, port))
- except Exception as e:
- return status + ['failed to establish a socket connection to {} on port {}: {!s}'.format(host, port, e)]
- else:
- status.append('successfully opened socket connection to {}:{}'.format(host, port))
-
- # check if any of the non-version SSL options have been set
- if any(val for val in ssl_params.values() if not isinstance(val, int)):
- try:
- wrapped = ssl.wrap_socket(sock, **ssl_params)
- except Exception as e:
- return status + ['failed to complete SSL handshake ({}): {!s}'.format(ssl_params, e)]
- else:
- status.append('succeeded at SSL handshake (without validating server cert)')
- finally:
- if sock:
- sock.close()
-
- try:
- with closing(socket.create_connection((host, port))) as sock:
- wrapped = ssl.wrap_socket(sock, **dict(ssl_params, cert_reqs=ssl.CERT_REQUIRED))
- status.append('succeeded at validating server cert')
- except Exception as e:
- return status + ['failed to validate server cert ({}): {!s}'.format(ssl_params, e)]
-
- status.append('everything seems to work')
- return status
-
-
-def check_all():
- checks = {}
- for name, jservice in services._jsonrpc.items():
- jproxy = jservice._send.im_self if six.PY2 else jservice._send.__self__ # ugly kludge to get the ServerProxy object
- url = '{}://{}/'.format(jproxy.type, jproxy.host)
- checks[name] = _check(url, **jproxy.ssl_opts)
- return checks
-
-
-@entry_point
-def check_connections():
- for service, results in sorted(check_all().items()):
- print(service)
- print('-' * len(service))
- print('\n'.join(results) + '\n')
diff --git a/sideboard/internal/logging.py b/sideboard/internal/logging.py
deleted file mode 100644
index 20e71b8..0000000
--- a/sideboard/internal/logging.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from __future__ import unicode_literals, absolute_import
-import os
-import logging.config
-
-from sideboard.config import config, get_config_root
-
-
-class IndentMultilinesLogFormatter(logging.Formatter):
- """
- Provide a formatter (unused by default) which adds indentation to messages
- which are split across multiple lines.
- """
- def format(self, record):
- s = super(IndentMultilinesLogFormatter, self).format(record)
- # indent all lines that start with a newline so they are easier for external log programs to parse
- s = s.rstrip('\n').replace('\n', '\n ')
- return s
-
-
-def _configure_logging():
- fname = os.path.join(get_config_root(), 'logging.cfg')
- if os.path.exists(fname):
- logging.config.fileConfig(fname, disable_existing_loggers=True)
- else:
- # ConfigObj doesn't support interpolation escaping, so we manually work around it here
- formatters = config['formatters'].dict()
- for formatter in formatters.values():
- formatter['format'] = formatter['format'].replace('$$', '%')
- formatter['datefmt'] = formatter['datefmt'].replace('$$', '%') or None
- formatters['indent_multiline'] = {
- '()': IndentMultilinesLogFormatter,
- 'format': formatters['default']['format']
- }
- logging.config.dictConfig({
- 'version': 1,
- 'root': {
- 'level': config['loggers']['root'],
- 'handlers': config['handlers'].dict().keys()
- },
- 'loggers': {
- name: {'level': level}
- for name, level in config['loggers'].items() if name != 'root'
- },
- 'handlers': config['handlers'].dict(),
- 'formatters': formatters
- })
diff --git a/sideboard/jsonrpc.py b/sideboard/jsonrpc.py
deleted file mode 100755
index 70b6e4d..0000000
--- a/sideboard/jsonrpc.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from __future__ import unicode_literals
-import json
-import traceback
-
-import cherrypy
-
-from sideboard.lib import log, config, serializer
-from sideboard.websockets import trigger_delayed_notifications
-
-
-ERR_INVALID_RPC = -32600
-ERR_MISSING_FUNC = -32601
-ERR_INVALID_PARAMS = -32602
-ERR_FUNC_EXCEPTION = -32603
-ERR_INVALID_JSON = -32700
-
-
-# TODO: this is ugly, it relies on the undocumented implementation of json_out so we should probably write our own force_json_out
-def json_handler(*args, **kwargs):
- value = cherrypy.serving.request._json_inner_handler(*args, **kwargs)
- return json.dumps(value, cls=serializer).encode('utf-8')
-
-
-def force_json_in():
- """A version of jsontools.json_in that forces all requests to be interpreted as JSON."""
- request = cherrypy.serving.request
- if not request.headers.get('Content-Length', ''):
- raise cherrypy.HTTPError(411)
-
- if cherrypy.request.method in ('POST', 'PUT'):
- body = request.body.fp.read()
- try:
- cherrypy.serving.request.json = json.loads(body.decode('utf-8'))
- except ValueError:
- raise cherrypy.HTTPError(400, 'Invalid JSON document')
-
-cherrypy.tools.force_json_in = cherrypy.Tool('before_request_body', force_json_in, priority=30)
-
-
-def _make_jsonrpc_handler(services, debug=config['debug'],
- precall=lambda body: None,
- errback=lambda err, message: log.error(message, exc_info=True)):
- @cherrypy.expose
- @cherrypy.tools.force_json_in()
- @cherrypy.tools.json_out(handler=json_handler)
- def jsonrpc_handler(self=None):
- id = None
-
- def error(code, message):
- body = {'jsonrpc': '2.0', 'id': id, 'error': {'code': code, 'message': message}}
- log.warning('returning error message: %s', body)
- return body
-
- body = cherrypy.request.json
- if not isinstance(body, dict):
- return error(ERR_INVALID_JSON, 'invalid json input {!r}'.format(cherrypy.request.body))
-
- log.debug('jsonrpc request body: %s', body)
-
- id, params = body.get('id'), body.get('params', [])
- if 'method' not in body:
- return error(ERR_INVALID_RPC, '"method" field required for jsonrpc request')
-
- method = body['method']
- if method.count('.') != 1:
- return error(ERR_MISSING_FUNC, 'invalid method ' + method)
-
- module, function = method.split('.')
- if module not in services:
- return error(ERR_MISSING_FUNC, 'no module ' + module)
-
- service = services[module]
- if not hasattr(service, function):
- return error(ERR_MISSING_FUNC, 'no function ' + method)
-
- if not isinstance(params, (list, dict)):
- return error(ERR_INVALID_PARAMS, 'invalid parameter list: {!r}'.format(params))
-
- args, kwargs = (params, {}) if isinstance(params, list) else ([], params)
-
- precall(body)
- try:
- response = {'jsonrpc': '2.0', 'id': id,
- 'result': getattr(service, function)(*args, **kwargs)}
- log.debug('returning success message: %s', response)
- return response
- except Exception as e:
- errback(e, 'unexpected jsonrpc error calling ' + method)
- message = 'unexpected error'
- if debug:
- message += ': ' + traceback.format_exc()
- return error(ERR_FUNC_EXCEPTION, message)
- finally:
- trigger_delayed_notifications()
-
- return jsonrpc_handler
diff --git a/sideboard/lib/__init__.py b/sideboard/lib/__init__.py
index 2ad5752..213e28f 100644
--- a/sideboard/lib/__init__.py
+++ b/sideboard/lib/__init__.py
@@ -1,27 +1,10 @@
from __future__ import unicode_literals
-import six
-from sideboard.internal.autolog import log
-from sideboard.config import config, ConfigurationError, parse_config
-from sideboard.lib._utils import is_listy, listify, serializer, cached_property, request_cached_property, class_property, entry_point, RWGuard
-from sideboard.lib._cp import stopped, on_startup, on_shutdown, mainloop, ajax, renders_template, render_with_templates, restricted, all_restricted, register_authenticator
-from sideboard.lib._profiler import cleanup_profiler, profile, Profiler, ProfileAggregator
-from sideboard.lib._threads import DaemonTask, Caller, GenericCaller, TimeDelayQueue
-from sideboard.lib._websockets import WebSocket, Model, Subscription, MultiSubscription
-from sideboard.websockets import subscribes, locally_subscribes, notifies, notify, threadlocal
-from sideboard.lib._services import services
+from sideboard.config import parse_config, config
+from sideboard.lib._utils import serializer, entry_point
+import sideboard.lib._redissession
+import sideboard.lib._threads
-__all__ = ['log',
- 'services',
- 'ConfigurationError', 'parse_config',
- 'is_listy', 'listify', 'serializer', 'cached_property', 'class_property', 'entry_point',
- 'stopped', 'on_startup', 'on_shutdown', 'mainloop', 'ajax', 'renders_template', 'render_with_templates',
- 'restricted', 'all_restricted', 'register_authenticator',
- 'cleanup_profiler', 'profile', 'Profiler', 'ProfileAggregator',
- 'DaemonTask', 'Caller', 'GenericCaller', 'TimeDelayQueue',
- 'WebSocket', 'Model', 'Subscription', 'MultiSubscription',
- 'listify', 'serializer', 'cached_property', 'request_cached_property', 'is_listy', 'entry_point', 'RWGuard',
- 'threadlocal', 'subscribes', 'locally_subscribes', 'notifies', 'notify']
-if six.PY2:
- __all__ = [s.encode('ascii') for s in __all__]
+__all__ = ['parse_config', 'config',
+ 'serializer', 'entry_point']
diff --git a/sideboard/lib/_cp.py b/sideboard/lib/_cp.py
deleted file mode 100644
index d409533..0000000
--- a/sideboard/lib/_cp.py
+++ /dev/null
@@ -1,244 +0,0 @@
-from __future__ import unicode_literals
-import json
-from threading import Event
-from functools import wraps
-from collections import defaultdict
-
-from six.moves.urllib_parse import quote
-
-import jinja2
-import cherrypy
-
-from sideboard.lib._redissession import RedisSession
-cherrypy.lib.sessions.RedisSession = RedisSession
-
-import sideboard.lib
-from sideboard.lib import log, config, serializer
-
-auth_registry = {}
-_startup_registry = defaultdict(list)
-_shutdown_registry = defaultdict(list)
-
-
-def _on_startup(func, priority):
- _startup_registry[priority].append(func)
- return func
-
-
-def _on_shutdown(func, priority):
- _shutdown_registry[priority].append(func)
- return func
-
-
-def on_startup(func=None, priority=50):
- """
- Register a function to be called when Sideboard starts. Startup functions
- have a priority, and the functions are invoked in priority order, where
- low-priority-numbered functions are invoked before higher numbers.
-
- Startup functions may be registered in one of three ways:
-
- 1) A function can be passed directly, e.g.
- on_startup(callback_function)
- on_startup(callback_function, priority=25)
-
- 2) This function can be used as a decorator, e.g.
- @on_startup
- def callback_function():
- ...
-
- 3) This function can be used as a decorator with a priority value, e.g.
- @on_startup(priority=25)
- def callback_function():
- ...
-
- If instead of running a function when Sideboard starts, you need to run a
- function immediately after Sideboard loads your plugin, you may optionally
- declare an on_load() function in your plugin's top-level __init__.py
- module. If it exists, Sideboard will call on_load() immediately after
- loading the plugin, before attempting to load any subsequent plugins.
-
- """
- if func:
- return _on_startup(func, priority)
- else:
- return lambda func: _on_startup(func, priority)
-
-
-def on_shutdown(func=None, priority=50):
- """
- Register a function to be called when Sideboard exits. See the on_startup
- function above for how this is used.
- """
- if func:
- return _on_shutdown(func, priority)
- else:
- return lambda func: _on_shutdown(func, priority)
-
-
-def _run_startup():
- for priority, functions in sorted(_startup_registry.items()):
- for func in functions:
- func()
-
-
-def _run_shutdown():
- for priority, functions in sorted(_shutdown_registry.items()):
- for func in functions:
- try:
- func()
- except Exception:
- log.warning('Ignored exception during shutdown', exc_info=True)
-
-stopped = Event()
-on_startup(stopped.clear, priority=0)
-on_shutdown(stopped.set, priority=0)
-
-cherrypy.engine.subscribe('start', _run_startup, priority=98)
-cherrypy.engine.subscribe('stop', _run_shutdown, priority=98)
-
-
-def mainloop():
- """
- This function exists for Sideboard plugins which do not run CherryPy. It
- runs all of the functions registered with sideboard.lib.on_startup and then
- waits for shutdown, at which point it runs all functions registered with
- sideboard.lib.on_shutdown.
- """
- _run_startup()
- try:
- while not stopped.is_set():
- try:
- stopped.wait(config['thread_wait_interval'])
- except KeyboardInterrupt:
- break
- finally:
- _run_shutdown()
-
-
-def ajax(method):
- """
- Decorator for CherryPy page handler methods which sets the Content-Type
- to application/json and serializes your function's return value to json.
- """
- @wraps(method)
- def to_json(self, *args, **kwargs):
- cherrypy.response.headers['Content-Type'] = 'application/json'
- return json.dumps(method(self, *args, **kwargs), cls=sideboard.lib.serializer)
- return to_json
-
-
-def restricted(x):
- """
- Decorator for CherryPy page handler methods. This can either be called
- to provide an authenticator ident or called directly as a decorator, e.g.
-
- @restricted
- def some_page(self): ...
-
- is equivalent to
-
- @restricted(sideboard.lib.config['default_authenticator'])
- def some_page(self): ...
- """
- def make_decorator(ident):
- def decorator(func):
- @cherrypy.expose
- @wraps(func)
- def with_checking(*args, **kwargs):
- if not auth_registry[ident]['check']():
- raise cherrypy.HTTPRedirect(auth_registry[ident]['login_path'])
- else:
- return func(*args, **kwargs)
- return with_checking
- return decorator
-
- if hasattr(x, '__call__'):
- return make_decorator(config['default_authenticator'])(x)
- else:
- return make_decorator(x)
-
-
-def renders_template(method):
- """
- Decorator for CherryPy page handler methods implementing default behaviors:
- - if your page handler returns a string, return that un-modified
- - if your page handler returns a non-jsonrpc dictionary, render a template
- with that dictionary; the function my_page will render my_page.html
- """
- @cherrypy.expose
- @wraps(method)
- def renderer(self, *args, **kwargs):
- output = method(self, *args, **kwargs)
- if isinstance(output, dict) and output.get('jsonrpc') != '2.0':
- return self.env.get_template(method.__name__ + '.html').render(**output)
- else:
- return output
- return renderer
-
-
-# Lifted from Jinja2 docs. See http://jinja.pocoo.org/docs/api/#autoescaping
-def _guess_autoescape(template_name):
- if template_name is None or '.' not in template_name:
- return False
- ext = template_name.rsplit('.', 1)[1]
- return ext in ('html', 'htm', 'xml')
-
-
-class render_with_templates(object):
- """
- Class decorator for CherryPy application objects which causes all of your page
- handler methods which return dictionaries to render Jinja templates found in this
- directory using those dictionaries. So if you have a page handler called my_page
- which returns a dictionary, the template my_page.html in the template_dir
- directory will be rendered with that dictionary. An "env" attribute gets added
- to the class which is a Jinja environment.
-
- For convenience, if the optional "restricted" parameter is passed, this class is
- also passed through the @all_restricted class decorator.
- """
- def __init__(self, template_dir, restricted=False):
- self.template_dir, self.restricted = template_dir, restricted
-
- def __call__(self, klass):
- klass.env = jinja2.Environment(autoescape=_guess_autoescape, loader=jinja2.FileSystemLoader(self.template_dir))
- klass.env.filters['jsonify'] = lambda x: klass.env.filters['safe'](json.dumps(x, cls=serializer))
-
- if self.restricted:
- all_restricted(self.restricted)(klass)
-
- for name, func in list(klass.__dict__.items()):
- if hasattr(func, '__call__'):
- setattr(klass, name, renders_template(func))
-
- return klass
-
-
-class all_restricted(object):
- """Invokes the @restricted decorator on all methods of a class."""
- def __init__(self, ident):
- self.ident = ident
- assert ident in auth_registry, '{!r} is not a recognized authenticator'.format(ident)
-
- def __call__(self, klass):
- for name, func in list(klass.__dict__.items()):
- if hasattr(func, '__call__'):
- setattr(klass, name, restricted(self.ident)(func))
- return klass
-
-
-def register_authenticator(ident, login_path, checker):
- """
- Register a new authenticator, which consists of three things:
- - A string ident, used to identify the authenticator in @restricted calls.
- - The path to the login page we should redirect to when not authenticated.
- - A function callable with no parameters which returns a truthy value if the
- user is logged in and a falsey value if they are not.
- """
- assert ident not in auth_registry, '{} is already a registered authenticator'.format(ident)
- auth_registry[ident] = {
- 'check': checker,
- 'login_path': login_path
- }
-
-register_authenticator('default', '/login', lambda: 'username' in cherrypy.session)
diff --git a/sideboard/lib/_profiler.py b/sideboard/lib/_profiler.py
deleted file mode 100644
index f189a92..0000000
--- a/sideboard/lib/_profiler.py
+++ /dev/null
@@ -1,268 +0,0 @@
-"""
-Adds profiling tools and a web interface for viewing profiling results.
-
-The Sideboard profiler borrows heavily from the `CherryPy profiler
-`_,
-but with a few added features and nicer formatting.
-
- * Adds the ability to sort results by different columns.
- * Adds the ability to cleanup profile data files.
- * Uses a better naming scheme for profile data files.
- * Uses `cProfile` instead of `profile` for better performance.
-
-Profiling data can be collected using the @profile decorator on functions and
-methods. The profiling results can be viewed at http://servername/profile/.
-
-Good candidates for profiling are the outermost functions that generate your
-web pages, usually exposed as cherrypy endpoints via @cherrypy.expose::
-
- import cherrypy
- from sideboard.lib import profile
-
- class Root(object):
- @cherrypy.expose
- @profile
- def index(self):
- # Create and return the index page
- return ''
-
-
-But any regular function can be profiled using the @profile decorator::
-
- from sideboard.lib import profile
-
- @profile
- def some_interesting_function():
- # Do some stuff
-
-
-The following config options control how the profiler operates, see
-configspec.ini for more details::
-
- [cherrypy]
- profiling.on = True
- profiling.path = "%(root)s/data/profiler"
- profiling.aggregate = False
- profiling.strip_dirs = False
-
-"""
-from __future__ import unicode_literals
-import io
-import os
-import os.path
-import cProfile
-import pstats
-from datetime import datetime
-from functools import wraps
-from glob import glob
-
-import cherrypy
-from sideboard.lib import config, entry_point, listify
-
-
-def _new_func_strip_path(func_name):
- """
- Adds the parent module to profiler output for `__init__.py` files.
-
- Copied verbatim from cherrypy/lib/profiler.py.
- """
- filename, line, name = func_name
- if filename.endswith('__init__.py'):
- return os.path.basename(filename[:-12]) + filename[-12:], line, name
- return os.path.basename(filename), line, name
-
-pstats.func_strip_path = _new_func_strip_path
-
-
-@entry_point
-def cleanup_profiler():
- """
- Deletes all `*.prof` files in the profiler's data directory.
-
- This is useful when you've created tons of profile files that you're no
- longer interested in. Exposed as a `sep` command::
-
- $ sep cleanup_profiler
-
- The profiler directory is specified in the config by::
-
- [cherrypy]
- profiling.path = 'path/to/profile/data'
-
- """
- profiling_path = config['cherrypy']['profiling.path']
- for f in glob(os.path.join(profiling_path, '*.prof')):
- os.remove(f)
-
-
-def profile(func):
- """
- Decorator to capture profile data from a method or function.
-
- If profiling is disabled then this decorator is a no-op, and the original
- function is returned unmodified. Since the original function is returned,
- this decorator does not incur any performance penalty if profiling is
- disabled. To enable or disable profiling use the following setting in your
- config::
-
- [cherrypy]
- profiling.on = True # Or False to disable
-
- Args:
- func (function): The function to profile.
-
- Returns:
- function: Either a wrapped version of `func` with profiling enabled,
- or `func` itself if profiling is disabled.
-
- See Also:
- configspec.ini
- """
- if config['cherrypy']['profiling.on']:
- profiling_path = config['cherrypy']['profiling.path']
- if config['cherrypy']['profiling.aggregate']:
- p = ProfileAggregator(profiling_path)
- else:
- p = Profiler(profiling_path)
-
- @wraps(func)
- def wrapper(*args, **kwargs):
- return p.run(func, *args, **kwargs)
- return wrapper
- else:
- return func
-
-
-class Profiler(object):
- """
- Mostly copied from cherrypy/lib/profiler.py.
-
- * Adds the ability to sort results by different columns.
- * Adds the ability to cleanup profile data files.
- * Uses a better naming scheme for profile data files.
- """
-
- # https://docs.python.org/3/library/profile.html#pstats.Stats.sort_stats
- sort_fields = [
- ('cumulative', 'Cumulative Time'),
- ('filename', 'File Name'),
- ('ncalls', 'Call Count'),
- ('pcalls', 'Primitive Call Count'),
- ('line', 'Line Number'),
- ('name', 'Function Name'),
- ('nfl', 'Function/File/Line'),
- ('stdname', 'Standard Name'),
- ('tottime', 'Total Time')]
-
- def __init__(self, path=config['cherrypy']['profiling.path']):
- self.path = path
- if not os.path.exists(path):
- os.makedirs(path)
-
- def new_filename(self, func):
- date = datetime.now().strftime("%Y-%m-%d_%H:%M:%S.%f")
- name = func.__name__ if func.__name__ else 'unknown'
- return '{}_{}.prof'.format(date, name)
-
- def run(self, func, *args, **params):
- """Dump profile data into self.path."""
- path = os.path.join(self.path, self.new_filename(func))
- prof = cProfile.Profile()
- result = prof.runcall(func, *args, **params)
- prof.dump_stats(path)
- return result
-
- def statfiles(self):
- """:rtype: list of available profiles.
- """
- return [f for f in os.listdir(self.path) if f.endswith('.prof')]
-
- def stats(self, filename, sortby='cumulative'):
- """:rtype stats(index): output of print_stats() for the given profile.
- """
- sio = io.StringIO()
- s = pstats.Stats(os.path.join(self.path, filename), stream=sio)
- if config['cherrypy']['profiling.strip_dirs']:
- s.strip_dirs()
- s.sort_stats(sortby)
- s.print_stats()
- response = sio.getvalue()
- sio.close()
- return response
-
- @cherrypy.expose
- def index(self):
- return '''
- Sideboard Profiler
-
-
- '''
-
- @cherrypy.expose
- def menu(self):
- yield '
Profiling Runs
'
- runs = self.statfiles()
- if not runs:
- yield 'No profiling runs'
- else:
- yield '
'
- runs.sort()
- for run in runs:
- yield '{0}' \
- ' '.format(run)
- yield '
'
-
- @cherrypy.expose
- def cleanup(self):
- """
- Deletes all `*.prof` files in the profiler's data directory.
-
- To delete all profile data files hit
- http://servername/profile/cleanup/.
-
- The profiler directory is specified by::
-
- [cherrypy]
- profiling.path = 'path/to/profile/data'
-
- See Also:
- `cleanup_profiler`
- """
- cleanup_profiler()
- raise cherrypy.HTTPRedirect('.')
-
-
-class ProfileAggregator(Profiler):
- """
- Mostly copied from cherrypy/lib/profiler.py.
-
- * Uses a better naming scheme for profile data files.
- """
-
- def __init__(self, path=None):
- super(ProfileAggregator, self).__init__(path)
- self.profiler = cProfile.Profile()
-
- def run(self, func, *args, **params):
- path = os.path.join(self.path, self.new_filename(func))
- result = self.profiler.runcall(func, *args, **params)
- self.profiler.dump_stats(path)
- return result
diff --git a/sideboard/lib/_redissession.py b/sideboard/lib/_redissession.py
index 548778d..39cb651 100644
--- a/sideboard/lib/_redissession.py
+++ b/sideboard/lib/_redissession.py
@@ -7,6 +7,7 @@
from cherrypy.lib.sessions import Session
import redis
+import cherrypy
from redis import Sentinel
class RedisSession(Session):
@@ -85,4 +86,6 @@ def acquire_lock(self):
def release_lock(self):
"""Release the lock on the currently-loaded session data."""
self.locks[self.prefix+self.id].release()
- self.locked = False
\ No newline at end of file
+ self.locked = False
+
+cherrypy.lib.sessions.RedisSession = RedisSession
diff --git a/sideboard/lib/_services.py b/sideboard/lib/_services.py
deleted file mode 100644
index 38b3080..0000000
--- a/sideboard/lib/_services.py
+++ /dev/null
@@ -1,221 +0,0 @@
-from __future__ import unicode_literals
-import os
-import ssl
-
-from rpctools.jsonrpc import ServerProxy
-
-from sideboard.lib import log, config, threadlocal, WebSocket
-
-
-class _ServiceDispatcher(object):
- def __init__(self, services, name):
- self.services, self.name = services, name
-
- def __getattr__(self, method):
- from sideboard.lib import is_listy
- assert self.name in self.services, '{} is not registered as a service'.format(self.name)
- service = self.services[self.name]
- assert not is_listy(getattr(service, '__all__', None)) or method in service.__all__, 'unable to call non-whitelisted method {}.{}'.format(self.name, method)
- func = service.make_caller('{}.{}'.format(self.name, method)) if isinstance(service, WebSocket) else getattr(service, method, None)
- assert func and hasattr(func, '__call__') and not method.startswith('_'), 'no such method {}.{}'.format(self.name, method)
- return func
-
-
-class _JsonrpcServices(object):
- def __init__(self, services):
- self.services = services
-
- def __getattr__(self, name):
- return _ServiceDispatcher(self.services, name)
-
-
-class _Services(object):
- """
- This class is used by plugins to register services, and to call services
- registered by other plugins. You call services by attribute lookup, e.g.
-
- >>> from sideboard.lib import services
- >>> services.foo.bar()
- 'Hello World!'
-
- You may get a service which has not yet been registered; you'll only get
- an exception when calling a method on the service if it doesn't exist yet;
- this is to facilitate getting a namespace before the relevant plugin has
- been imported by Sideboard:
-
- >>> foo, baz = services.foo, services.baz
- >>> foo.bar()
- 'Hello World!'
- >>> baz.baf()
- AssertionError: baz is not registered as a service
-
- Services may be local or websocket, but they're called in the same way.
- If you know that service is remote, and you want to use Jsonrpc, you can
- use the .jsonrpc attribute of this class, e.g.
-
- >>> services.jsonrpc.foo.bar()
- 'Hello World!'
- >>> foo = services.jsonrpc.foo
- >>> foo.bar()
- 'Hello World!'
- """
- def __init__(self):
- self._services, self._jsonrpc, self._websockets = {}, {}, {}
- self.jsonrpc = _JsonrpcServices(self._jsonrpc)
-
- def register(self, service, name=None, _jsonrpc=None, _override=False):
- """
- Register an object with methods (usually a module) to be exposed under
- the given name. An exception is raised if you use a name already used
- by another service.
-
- This method takes the following parameters:
- - service: the object being registered; this is typically a module but
- can be anything with functions (e.g. a class instance)
- - name: the name of the service being registered; if omitted, this will
- default to the __name__ of the service object
- - _jsonrpc: this should probably never be called by plugins; Sideboard
- uses this to register both WebSoket and Jsonrpc RPC clients
- """
- name = name or service.__name__
- if not _jsonrpc:
- assert name not in self._services, '{} has already been registered'.format(name)
- self._services[name] = service
- if _jsonrpc:
- self._jsonrpc[name] = _jsonrpc
-
- def get_services(self):
- """
- Returns the dictionary we use to store our registered services. This
- is NOT a copy, so it is NOT safe to modify this dictionary without
- copying; this is intentional because it means that once you call this
- method, the dictionary which is returned will contain all known services,
- even ones registered after you called this method.
- """
- return self._services
-
- def _register_websocket(self, url=None, connect_immediately=True, **ws_kwargs):
- if url not in self._websockets:
- self._websockets[url] = WebSocket(url, connect_immediately=connect_immediately, **ws_kwargs)
- return self._websockets[url]
-
- def get_websocket(self, service_name=None):
- """
- Return the websocket connection to the machine that the specified service
- is running on, or a websocket connection to localhost if the service is
- unknown or not provided.
- """
- for name, service in self._services.items():
- if name == service_name and isinstance(service, WebSocket):
- return service
- else:
- return self._register_websocket()
-
- def __getattr__(self, name):
- return _ServiceDispatcher(self._services, name)
-
-services = _Services()
-
-
-def _rpc_opts(host, service_config=None):
- """
- Sideboard uses client certs for backend service authentication. There's a
- global set of config options which determine the SSL settings we pass to our
- RPC libraries, but sometimes different services require client certs issued
- by different CAs. In those cases, we define a config subsection of the main
- [rpc_services] section to override those settings.
-
- This function takes a hostname and for each config option, it returns either
- the hostname-specific config option if it exists, or the global config option
- if it doesn't. Specifically, this returns a dict of option names/values.
-
- If the service_config parameter is passed, it uses that as the config section
- from which to draw the hostname-specific options. Otherwise it searches
- the [rpc_services] config section for Sideboard and for all Sideboard plugins
- which have a "config" object defined in order to find options for that host.
- """
- from sideboard.internal.imports import plugins
- section = service_config
- if service_config is not None: # check explicitly for None because service_config might be {}
- section = service_config
- else:
- rpc_sections = {host: section for host, section in config['rpc_services'].items() if isinstance(section, dict)}
- for plugin in plugins.values():
- plugin_config = getattr(plugin, 'config', None)
- if isinstance(plugin_config, dict):
- rpc_sections.update({host: section for host, section in plugin_config.get('rpc_services', {}).items() if isinstance(section, dict)})
- section = rpc_sections.get(host, {})
-
- opts = {}
- for setting in ['client_key', 'client_cert', 'ca', 'ssl_version']:
- path = section.get(setting, config[setting])
- if path and setting != 'ssl_version':
- assert os.path.exists(path), '{} config option set to path not found on the filesystem: {}'.format(setting, path)
-
- opts[setting] = path
- return opts
-
-
-def _ssl_opts(rpc_opts):
- """
- Given a dict of config options returned by _rpc_opts, return a dict of
- options which can be passed to the ssl module.
- """
- ssl_opts = {
- 'ca_certs': rpc_opts['ca'],
- 'keyfile': rpc_opts['client_key'],
- 'certfile': rpc_opts['client_cert'],
- 'cert_reqs': ssl.CERT_REQUIRED if rpc_opts['ca'] else None,
- 'ssl_version': getattr(ssl, rpc_opts['ssl_version'])
- }
- return {k: v for k, v in ssl_opts.items() if v}
-
-
-def _ws_url(host, rpc_opts):
- """
- Given a hostname and set of config options returned by _rpc_opts, return the
- standard URL websocket endpoint for a Sideboard remote service.
- """
- return '{protocol}://{host}/ws'.format(host=host, protocol='wss' if rpc_opts['ca'] else 'ws')
-
-
-def _register_rpc_services(rpc_services):
- """
- Sideboard has a config file, and it provides a parse_config method for its
- plugins to parse their own config files. In both cases, we check for the
- presence of an [rpc_services] config section, which we use to register any
- services defined there with our sideboard.lib.services API. Note that this
- means a server can provide information about a remote service in either the
- main Sideboard config file OR the config file of any plugin.
-
- This function takes the [rpc_services] config section from either Sideboard
- itself or one of its plugins and registers all remote services found there.
- """
- for service_name, host in rpc_services.items():
- if not isinstance(host, dict):
- rpc_opts = _rpc_opts(host, rpc_services.get(host, {}))
- ssl_opts = _ssl_opts(rpc_opts)
-
- jsonrpc_url = '{protocol}://{host}/jsonrpc'.format(host=host, protocol='https' if rpc_opts['ca'] else 'http')
- jproxy = ServerProxy(jsonrpc_url, ssl_opts=ssl_opts, validate_cert_hostname=bool(rpc_opts['ca']))
- jservice = getattr(jproxy, service_name)
- if rpc_services.get(host, {}).get('jsonrpc_only'):
- service = jservice
-
- services.register(service, service_name, _jsonrpc=jservice, _override=True)
-
-_register_rpc_services(config['rpc_services'])
-
-
-class _SideboardCoreServices(object):
- """
- Location of rpc methods we want Sideboard itself to expose in the "sideboard"
- namespace. Currently this only contains "poll" but we may add more
- methods, especially ones which allow you to list plugins, get version
- numbers, etc.
- """
- def poll(self):
- """empty method which exists only to help keep WebSockets alive"""
- log.debug('sideboard.poll by user %s', threadlocal.get('username'))
-
-services.register(_SideboardCoreServices(), 'sideboard')
diff --git a/sideboard/lib/_threads.py b/sideboard/lib/_threads.py
index 4d79c66..0b95fbd 100644
--- a/sideboard/lib/_threads.py
+++ b/sideboard/lib/_threads.py
@@ -1,205 +1,55 @@
from __future__ import unicode_literals
import sys
-import time
-import heapq
-import ctypes
-import platform
+import ctypes, ctypes.util
+import psutil
import traceback
import threading
-from warnings import warn
-from threading import Thread, Timer, Event, Lock
-import six
-from six.moves.queue import Queue, Empty
-
-from sideboard.lib import log, config, on_startup, on_shutdown
from sideboard.debugging import register_diagnostics_status_function
-try:
- import prctl
- import psutil
-except ImportError:
- prctl = psutil = None # For platforms without this support.
-
-
-def _get_linux_thread_tid():
- """
- Get the current linux thread ID as it appears in /proc/[pid]/task/[tid]
- :return: Linux thread ID if available, or -1 if any errors / not on linux
- """
- try:
- if not platform.system().startswith('Linux'):
- raise ValueError('Can only get thread id on Linux systems')
- syscalls = {
- 'i386': 224, # unistd_32.h: #define __NR_gettid 224
- 'x86_64': 186, # unistd_64.h: #define __NR_gettid 186
- }
- syscall_num = syscalls[platform.machine()]
- tid = ctypes.CDLL('libc.so.6').syscall(syscall_num)
- except:
- tid = -1
- return tid
+# Replaces the prior prctl implementation with a direct call to pthread to change thread names
+libpthread_path = ctypes.util.find_library("pthread")
+pthread_setname_np = None
+if libpthread_path:
+ libpthread = ctypes.CDLL(libpthread_path)
+ if hasattr(libpthread, "pthread_setname_np"):
+ pthread_setname_np = libpthread.pthread_setname_np
+ pthread_setname_np.argtypes = [ctypes.c_void_p, ctypes.c_char_p]
+ pthread_setname_np.restype = ctypes.c_int
def _set_current_thread_ids_from(thread):
# thread ID part 1: set externally visible thread name in /proc/[pid]/tasks/[tid]/comm to our internal name
- if prctl and thread.name:
+ if pthread_setname_np and thread.name:
# linux doesn't allow thread names > 15 chars, and we ideally want to see the end of the name.
# attempt to shorten the name if we need to.
shorter_name = thread.name if len(thread.name) < 15 else thread.name.replace('CP Server Thread', 'CPServ')
- prctl.set_name(shorter_name)
-
- # thread ID part 2: capture linux-specific thread ID (TID) and store it with this thread object
- # if TID can't be obtained or system call fails, tid will be -1
- thread.linux_tid = _get_linux_thread_tid()
+ if thread.ident is not None:
+ pthread_setname_np(thread.ident, shorter_name.encode('ASCII'))
-# inject our own code at the start of every thread's start() method which sets the thread name via prctl().
+# inject our own code at the start of every thread's start() method which sets the thread name via pthread().
# Python thread names will now be shown in external system tools like 'top', '/proc', etc.
def _thread_name_insert(self):
_set_current_thread_ids_from(self)
threading.Thread._bootstrap_inner_original(self)
-if six.PY3:
threading.Thread._bootstrap_inner_original = threading.Thread._bootstrap_inner
threading.Thread._bootstrap_inner = _thread_name_insert
-else:
- threading.Thread._bootstrap_inner_original = threading.Thread._Thread__bootstrap
- threading.Thread._Thread__bootstrap = _thread_name_insert
# set the ID's of the main thread
threading.current_thread().name = 'sideboard_main'
_set_current_thread_ids_from(threading.current_thread())
-class DaemonTask(object):
- def __init__(self, func, interval=None, threads=1, name=None):
- self.lock = Lock()
- self.threads = []
- self.stopped = Event()
- self.func, self.interval, self.thread_count = func, interval, threads
- self.name = name or self.func.__name__
-
- on_startup(self.start)
- on_shutdown(self.stop)
-
- @property
- def running(self):
- return any(t.is_alive() for t in self.threads)
-
- def run(self):
- while not self.stopped.is_set():
- try:
- self.func()
- except:
- log.error('unexpected error', exc_info=True)
-
- interval = config['thread_wait_interval'] if self.interval is None else self.interval
- if interval:
- self.stopped.wait(interval)
-
- def start(self):
- with self.lock:
- if not self.running:
- self.stopped.clear()
- del self.threads[:]
- for i in range(self.thread_count):
- t = Thread(target=self.run)
- t.name = '{}-{}'.format(self.name, i + 1)
- t.daemon = True
- t.start()
- self.threads.append(t)
-
- def stop(self):
- with self.lock:
- if self.running:
- self.stopped.set()
- for i in range(50):
- self.threads[:] = [t for t in self.threads if t.is_alive()]
- if self.threads:
- time.sleep(0.1)
- else:
- break
- else:
- log.warning('not all daemons have been joined: %s', self.threads)
- del self.threads[:]
-
-
-class TimeDelayQueue(Queue):
- def __init__(self, maxsize=0):
- self.delayed = []
- Queue.__init__(self, maxsize)
- self.task = DaemonTask(self._put_and_notify)
-
- def put(self, item, block=True, timeout=None, delay=0):
- Queue.put(self, (delay, item), block, timeout)
-
- def _put(self, item):
- delay, item = item
- if delay:
- if self.task.running:
- heapq.heappush(self.delayed, (time.time() + delay, item))
- else:
- message = 'TimeDelayQueue.put called with a delay parameter without background task having been started'
- log.warning(message)
- warn(message)
- else:
- Queue._put(self, item)
-
- def _put_and_notify(self):
- with self.not_empty:
- while self.delayed:
- when, item = heapq.heappop(self.delayed)
- if when <= time.time():
- Queue._put(self, item)
- self.not_empty.notify()
- else:
- heapq.heappush(self.delayed, (when, item))
- break
-
-
-class Caller(DaemonTask):
- def __init__(self, func, interval=0, threads=1, name=None):
- self.q = Queue()
- DaemonTask.__init__(self, self.call, interval=interval, threads=threads, name=name or func.__name__)
- self.callee = func
-
- def call(self):
- try:
- args, kwargs = self.q.get(timeout=config['thread_wait_interval'])
- self.callee(*args, **kwargs)
- except Empty:
- pass
-
- def defer(self, *args, **kwargs):
- self.q.put([args, kwargs])
-
-
-class GenericCaller(DaemonTask):
- def __init__(self, interval=0, threads=1, name=None):
- DaemonTask.__init__(self, self.call, interval=interval, threads=threads, name=name)
- self.q = Queue()
-
- def call(self):
- try:
- func, args, kwargs = self.q.get(timeout=config['thread_wait_interval'])
- func(*args, **kwargs)
- except Empty:
- pass
-
- def defer(self, func, *args, **kwargs):
- self.q.put([func, args, kwargs])
-
-
def _get_thread_current_stacktrace(thread_stack, thread):
out = []
- linux_tid = getattr(thread, 'linux_tid', -1)
status = '[unknown]'
- if psutil and linux_tid != -1:
- status = psutil.Process(linux_tid).status()
+ if psutil and thread.native_id != -1:
+ status = psutil.Process(thread.native_id).status()
out.append('\n--------------------------------------------------------------------------')
out.append('# Thread name: "%s"\n# Python thread.ident: %d\n# Linux Thread PID (TID): %d\n# Run Status: %s'
- % (thread.name, thread.ident, linux_tid, status))
+ % (thread.name, thread.ident, thread.native_id, status))
for filename, lineno, name, line in traceback.extract_stack(thread_stack):
out.append('File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
diff --git a/sideboard/lib/_utils.py b/sideboard/lib/_utils.py
index 3f774bb..1225d0e 100644
--- a/sideboard/lib/_utils.py
+++ b/sideboard/lib/_utils.py
@@ -1,29 +1,6 @@
from __future__ import unicode_literals
-import os
import json
-from functools import wraps
from datetime import datetime, date
-from contextlib import contextmanager
-from threading import RLock, Condition, current_thread
-from collections.abc import Sized, Iterable, Mapping
-from collections import defaultdict
-
-
-def is_listy(x):
- """
- returns a boolean indicating whether the passed object is "listy",
- which we define as a sized iterable which is not a map or string
- """
- return isinstance(x, Sized) and isinstance(x, Iterable) and not isinstance(x, (Mapping, type(b''), type('')))
-
-
-def listify(x):
- """
- returns a list version of x if x is a non-string iterable, otherwise
- returns a list with x as its only element
- """
- return list(x) if is_listy(x) else [x]
-
class serializer(json.JSONEncoder):
"""
@@ -72,56 +49,6 @@ def register(cls, type, preprocessor):
serializer.register(set, lambda s: sorted(list(s)))
-def cached_property(func):
- """decorator for making readonly, memoized properties"""
- pname = '_cached_{}'.format(func.__name__)
-
- @property
- @wraps(func)
- def caching(self, *args, **kwargs):
- if not hasattr(self, pname):
- setattr(self, pname, func(self, *args, **kwargs))
- return getattr(self, pname)
- return caching
-
-
-def request_cached_property(func):
- """
- Sometimes we want a property to be cached for the duration of a request,
- with concurrent requests each having their own cached version. This does
- that via the threadlocal class, such that each HTTP request CherryPy serves
- and each RPC request served via websocket or JSON-RPC will have its own
- cached value, which is cleared and then re-generated on later requests.
- """
- from sideboard.lib import threadlocal
- name = func.__module__ + '.' + func.__name__
-
- @property
- @wraps(func)
- def with_caching(self):
- val = threadlocal.get(name)
- if val is None:
- val = func(self)
- threadlocal.set(name, val)
- return val
- return with_caching
-
-
-class _class_property(property):
- def __get__(self, cls, owner):
- return self.fget.__get__(None, owner)()
-
-
-def class_property(cls):
- """
- For whatever reason, the @property decorator isn't smart enough to recognize
- classmethods and behave differently on them than on instance methods. This
- property may be used to create a class-level property, useful for singletons
- and other one-per-class properties. Class properties are read-only.
- """
- return _class_property(classmethod(cls))
-
-
def entry_point(func):
"""
Decorator used to define entry points for command-line scripts. Sideboard
@@ -151,113 +78,3 @@ def some_action():
return func
_entry_points = {}
-
-
-class RWGuard(object):
- """
- This utility class provides the ability to perform read/write locking, such
- that we can have any number of readers OR a single writer. We give priority
- to writers, who will get the lock before any readers.
-
- These locks are reentrant, meaning that the same thread can acquire a read
- or write lock multiple times, and will then need to release the lock the
- same number of times it was acquired. A thread with an acquired read lock
- cannot acquire a write lock, or vice versa. Locks can only be released by
- the threads which acquired them.
-
- This class is named RWGuard rather than RWLock because it is not itself a
- lock, e.g. it doesn't have an acquire method, it cannot be directly used as
- a context manager, etc.
- """
- def __init__(self):
- self.lock = RLock()
- self.waiting_writer_count = 0
- self.acquired_writer = defaultdict(int)
- self.acquired_readers = defaultdict(int)
- self.ready_for_reads = Condition(self.lock)
- self.ready_for_writes = Condition(self.lock)
-
- @property
- @contextmanager
- def read_locked(self):
- """
- Context manager which acquires a read lock on entrance and releases it
- on exit. Any number of threads may acquire a read lock.
- """
- self.acquire_for_read()
- try:
- yield
- finally:
- self.release()
-
- @property
- @contextmanager
- def write_locked(self):
- """
- Context manager which acquires a write lock on entrance and releases it
- on exit. Only one thread may acquire a write lock at a time.
- """
- self.acquire_for_write()
- try:
- yield
- finally:
- self.release()
-
- def acquire_for_read(self):
- """
- NOTE: consumers are encouraged to use the "read_locked" context manager
- instead of this method where possible.
-
- This method acquires the read lock for the current thread, blocking if
- necessary until there are no other threads with the write lock acquired
- or waiting for the write lock to be available.
- """
- tid = current_thread().ident
- assert tid not in self.acquired_writer, 'Threads which have already acquired a write lock may not lock for reading'
- with self.lock:
- while self.acquired_writer or (self.waiting_writer_count and tid not in self.acquired_readers):
- self.ready_for_reads.wait()
- self.acquired_readers[tid] += 1
-
- def acquire_for_write(self):
- """
- NOTE: consumers are encouraged to use the "write_locked" context manager
- instead of this method where possible.
-
- This method acquires the write lock for the current thread, blocking if
- necessary until no other threads have the write lock acquired and no
- thread has the read lock acquired.
- """
- tid = current_thread().ident
- assert tid not in self.acquired_readers, 'Threads which have already acquired a read lock may not lock for writing'
- with self.lock:
- while self.acquired_readers or (self.acquired_writer and tid not in self.acquired_writer):
- self.waiting_writer_count += 1
- self.ready_for_writes.wait()
- self.waiting_writer_count -= 1
- self.acquired_writer[tid] += 1
-
- def release(self):
- """
- Release the read or write lock held by the current thread. Since these
- locks are reentrant, this method must be called once for each time the
- lock was acquired. This method raises an exception if called by a
- thread with no read or write lock acquired.
- """
- tid = current_thread().ident
- assert tid in self.acquired_readers or tid in self.acquired_writer, 'this thread does not hold a read or write lock'
- with self.lock:
- for counts in [self.acquired_readers, self.acquired_writer]:
- counts[tid] -= 1
- if counts[tid] <= 0:
- del counts[tid]
-
- wake_readers = not self.waiting_writer_count
- wake_writers = self.waiting_writer_count and not self.acquired_readers
-
- if wake_writers:
- with self.ready_for_writes:
- self.ready_for_writes.notify()
- elif wake_readers:
- with self.ready_for_reads:
- self.ready_for_reads.notify_all()
diff --git a/sideboard/lib/_websockets.py b/sideboard/lib/_websockets.py
deleted file mode 100644
index 4868565..0000000
--- a/sideboard/lib/_websockets.py
+++ /dev/null
@@ -1,638 +0,0 @@
-from __future__ import unicode_literals
-import os
-import sys
-import json
-from copy import deepcopy
-from itertools import count
-from threading import RLock, Event
-from datetime import datetime, timedelta
-from collections.abc import Mapping, MutableMapping
-
-import six
-from ws4py.client.threadedclient import WebSocketClient
-
-import sideboard.lib
-from sideboard.lib import log, config, stopped, on_startup, on_shutdown, DaemonTask, Caller
-
-
-class _WebSocketClientDispatcher(WebSocketClient):
- def __init__(self, dispatcher, url, ssl_opts=None):
- self.connected = False
- self.dispatcher = dispatcher
- WebSocketClient.__init__(self, url, ssl_options=ssl_opts)
-
- def pre_connect(self):
- pass
-
- def connect(self, *args, **kwargs):
- self.pre_connect()
- WebSocketClient.connect(self, *args, **kwargs)
- self.connected = True
-
- def close(self, code=1000, reason=''):
- try:
- WebSocketClient.close(self, code=code, reason=reason)
- except:
- pass
- try:
- WebSocketClient.close_connection(self)
- except:
- pass
- self.connected = False
-
- def send(self, data):
- log.debug('sending %s', data)
- assert self.connected, 'tried to send data on closed websocket {!r}'.format(self.url)
- if isinstance(data, Mapping):
- data = json.dumps(data)
- return WebSocketClient.send(self, data)
-
- def received_message(self, message):
- message = message.data if isinstance(message.data, six.text_type) else message.data.decode('utf-8')
- log.debug('received %s', message)
- try:
- message = json.loads(message)
- except:
- log.debug('failed to parse incoming message', exc_info=True)
- finally:
- self.dispatcher.defer(message)
-
-
-class _Subscriber(object):
- def __init__(self, method, src_client, dst_client, src_ws, dest_ws):
- self.method, self.src_ws, self.dest_ws, self.src_client, self.dst_client = method, src_ws, dest_ws, src_client, dst_client
-
- def unsubscribe(self):
- self.dest_ws.unsubscribe(self.dst_client)
-
- def callback(self, data):
- self.src_ws.send(data=data, client=self.src_client)
-
- def errback(self, error):
- self.src_ws.send(error=error, client=self.src_client)
-
- def __call__(self, *args, **kwargs):
- self.dest_ws.subscribe({
- 'client': self.dst_client,
- 'callback': self.callback,
- 'errback': self.errback
- }, self.method, *args, **kwargs)
- return self.src_ws.NO_RESPONSE
-
- def __del__(self):
- self.unsubscribe()
-
-
-class WebSocket(object):
- """
- Utility class for making websocket connections. This improves on the ws4py
- websocket client classes mainly by adding several features:
- - automatically detecting dead connections and re-connecting
- - utility methods for making synchronous rpc calls and for making
- asynchronous subscription calls with callbacks
- - adding locking to make sending messages thread-safe
- """
- poll_method = 'sideboard.poll'
- WebSocketDispatcher = _WebSocketClientDispatcher
-
- def __init__(self, url=None, ssl_opts=None, connect_immediately=True, max_wait=2):
- self.ws = None
- self.url = url or 'ws://127.0.0.1:{}/ws'.format(config['cherrypy']['server.socket_port'])
- self._lock = RLock()
- self._callbacks = {}
- self._counter = count()
- self.ssl_opts = ssl_opts
- self._reconnect_attempts = 0
- self._last_poll, self._last_reconnect_attempt = None, None
- self._dispatcher = Caller(self._dispatch, threads=1)
- self._checker = DaemonTask(self._check, interval=1)
- if connect_immediately:
- self.connect(max_wait=max_wait)
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- self.close()
-
- def preprocess(self, method, params):
- """
- Each message we send has its parameters passed to this function and
- the actual parameters sent are whatever this function returns. By
- default this just returns the message unmodified, but plugins can
- override this to add whatever logic is needed. We pass the method
- name in its full "service.method" form in case the logic depends on
- the service being invoked.
- """
- return params
-
- @property
- def _should_reconnect(self):
- interval = min(config['ws.reconnect_interval'], 2 ** self._reconnect_attempts)
- cutoff = datetime.now() - timedelta(seconds=interval)
- return not self.connected and (self._reconnect_attempts == 0 or self._last_reconnect_attempt < cutoff)
-
- @property
- def _should_poll(self):
- cutoff = datetime.now() - timedelta(seconds=config['ws.poll_interval'])
- return self.connected and (self._last_poll is None or self._last_poll < cutoff)
-
- def _check(self):
- if self._should_reconnect:
- self._reconnect()
- if self._should_poll:
- self._poll()
-
- def _poll(self):
- assert self.ws and self.ws.connected, 'cannot poll while websocket is not connected'
- try:
- self.call(self.poll_method)
- except:
- log.warning('no poll response received from %s, closing connection, will attempt to reconnect', self.url, exc_info=True)
- self.ws.close()
- else:
- self._last_poll = datetime.now()
-
- def _refire_subscriptions(self):
- try:
- for cb in self._callbacks.values():
- if 'client' in cb:
- params = cb['paramback']() if 'paramback' in cb else cb['params']
- self._send(method=cb['method'], params=params, client=cb['client'])
- except:
- pass # self._send() already closes and logs on error
-
- def _reconnect(self):
- with self._lock:
- assert not self.connected, 'connection is still active'
- try:
- self.ws = self.WebSocketDispatcher(self._dispatcher, self.url, ssl_opts=self.ssl_opts)
- self.ws.connect()
- except Exception as e:
- log.warning('failed to connect to %s: %s', self.url, str(e))
- self._last_reconnect_attempt = datetime.now()
- self._reconnect_attempts += 1
- else:
- self._reconnect_attempts = 0
- self._refire_subscriptions()
-
- def _next_id(self, prefix):
- return '{}-{}'.format(prefix, next(self._counter))
-
- def _send(self, **kwargs):
- log.debug('sending %s', kwargs)
- with self._lock:
- assert self.connected, 'tried to send data on closed websocket {!r}'.format(self.url)
- try:
- return self.ws.send(kwargs)
- except:
- log.warning('failed to send %s on %s, closing websocket and will attempt to reconnect', kwargs, self.url)
- self.ws.close()
- raise
-
- def _dispatch(self, message):
- log.debug('dispatching %s', message)
- try:
- assert isinstance(message, Mapping), 'incoming message is not a dictionary'
- assert 'client' in message or 'callback' in message, 'no callback or client in message {}'.format(message)
- id = message.get('client') or message.get('callback')
- assert id in self._callbacks, 'unknown dispatchee {}'.format(id)
- except AssertionError:
- self.fallback(message)
- else:
- if 'error' in message:
- self._callbacks[id]['errback'](message['error'])
- else:
- self._callbacks[id]['callback'](message.get('data'))
-
- def fallback(self, message):
- """
- Handler method which is called for incoming websocket messages which
- aren't valid responses to an outstanding call or subscription. By
- default this just logs an error message. You can override this by
- subclassing this class, or just by assigning a hander method, e.g.
-
- >>> ws = WebSocket()
- >>> ws.fallback = some_handler_function
- >>> ws.connect()
- """
- _, exc, _ = sys.exc_info()
- log.error('no callback registered for message %s, message ignored: %s', message, exc)
-
- @property
- def connected(self):
- """boolean indicating whether or not this connection is currently active"""
- return bool(self.ws) and self.ws.connected
-
- def connect(self, max_wait=0):
- """
- Start the background threads which connect this websocket and handle RPC
- dispatching. This method is safe to call even if the websocket is already
- connected. You may optionally pass a max_wait parameter if you want to
- wait for up to that amount of time for the connection to go through; if
- that amount of time elapses without successfully connecting, a warning
- message is logged.
- """
- self._checker.start()
- self._dispatcher.start()
- for i in range(10 * max_wait):
- if not self.connected:
- stopped.wait(0.1)
- else:
- break
- else:
- if max_wait:
- log.warning('websocket %s not connected after %s seconds', self.url, max_wait)
-
- def close(self):
- """
- Closes the underlying websocket connection and stops background tasks.
- This method is always safe to call; exceptions will be swallowed and
- logged, and calling close on an already-closed websocket is a no-op.
- """
- self._checker.stop()
- self._dispatcher.stop()
- if self.ws:
- self.ws.close()
-
- def subscribe(self, callback, method, *args, **kwargs):
- """
- Send a websocket request which you expect to subscribe you to a channel
- with a callback which will be called every time there is new data, and
- return the client id which uniquely identifies this subscription.
-
- Callback may be either a function or a dictionary in the form
- {
- 'callback': ,
- 'errback': , # optional
- 'paramback: , # optional
- 'client': # optional
- }
- Both callback and errback take a single argument; for callback, this is
- the return value of the method, for errback it is the error message
- returning. If no errback is specified, we will log errors at the ERROR
- level and do nothing further.
-
- The paramback function exists for subscriptions where we might want to
- pass different parameters every time we reconnect. This might be used
- for e.g. time-based parameters. This function takes no arguments and
- returns the parameters which should be passed every time we connect
- and fire (or re-fire) all of our subscriptions.
-
- The client id is automatically generated if omitted, and you should not
- set this yourself unless you really know what you're doing.
-
- The positional and keyword arguments passed to this function will be
- used as the arguments to the remote method, unless paramback is passed,
- in which case that will be used to generate the params, and args/kwargs
- will be ignored.
- """
- client = self._next_id('client')
- if isinstance(callback, Mapping):
- assert 'callback' in callback, 'callback is required'
- client = callback.setdefault('client', client)
- self._callbacks[client] = callback
- else:
- self._callbacks[client] = {
- 'client': client,
- 'callback': callback
- }
-
- paramback = self._callbacks[client].get('paramback')
- params = self.preprocess(method, paramback() if paramback else (args or kwargs))
- self._callbacks[client].setdefault('errback', lambda result: log.error('%s(*%s, **%s) returned an error: %s', method, args, kwargs, result))
- self._callbacks[client].update({
- 'method': method,
- 'params': params
- })
-
- try:
- self._send(method=method, params=params, client=client)
- except:
- log.warning('initial subscription to %s at %s failed, will retry on reconnect', method, self.url)
-
- return client
-
- def unsubscribe(self, client):
- """
- Cancel the websocket subscription identified by the specified client id.
- This id is returned from the subscribe() method, e.g.
-
- >>> client = ws.subscribe(some_callback, 'foo.some_function')
- >>> ws.unsubscribe(client)
- """
- self._callbacks.pop(client, None)
- try:
- self._send(action='unsubscribe', client=client)
- except:
- pass
-
- def call(self, method, *args, **kwargs):
- """
- Send a websocket rpc method call, then wait for and return the eventual
- response, or raise an exception if we get back an error. This method
- will raise an AssertionError after 10 seconds if no response of any
- kind was received. The positional and keyword arguments to this method
- are used as the arguments to the rpc function call.
- """
- finished = Event()
- result, error = [], []
- callback = self._next_id('callback')
- self._callbacks[callback] = {
- 'callback': lambda response: (result.append(response), finished.set()),
- 'errback': lambda response: (error.append(response), finished.set())
- }
- params = self.preprocess(method, args or kwargs)
- try:
- self._send(method=method, params=params, callback=callback)
- except:
- self._callbacks.pop(callback, None)
- raise
-
- wait_until = datetime.now() + timedelta(seconds=config['ws.call_timeout'])
- while datetime.now() < wait_until:
- finished.wait(0.1)
- if stopped.is_set() or result or error:
- break
- self._callbacks.pop(callback, None)
- assert not stopped.is_set(), 'websocket closed before response was received'
- assert result, error[0] if error else 'no response received for 10 seconds'
- return result[0]
-
- def make_caller(self, method):
- """
- Returns a function which calls the specified method; useful for creating
- callbacks, e.g.
-
- >>> authenticate = ws.make_caller('auth.authenticate')
- >>> authenticate('username', 'password')
- True
-
- Sideboard supports "passthrough subscriptions", e.g.
- -> a browser makes a subscription for the "foo.bar" method
- -> the server has "foo" registered as a remote service
- -> the server creates its own subscription to "foo.bar" on the remote
- service and passes all results back to the client as they arrive
-
- This method implements that by checking whether it was called from a
- thread with an active websocket as part of a subscription request. If
- so then in addition to returning a callable, it also registers the
- new subscription with the client websocket so it can be cleaned up when
- the client websocket closes and/or when its subscription is canceled.
- """
- client = sideboard.lib.threadlocal.get_client()
- originating_ws = sideboard.lib.threadlocal.get('websocket')
- if client and originating_ws:
- sub = originating_ws.passthru_subscriptions.get(client)
- if sub:
- sub.method = method
- else:
- sub = _Subscriber(method=method, src_client=client, dst_client=self._next_id('client'), src_ws=originating_ws, dest_ws=self)
- originating_ws.passthru_subscriptions[client] = sub
- return sub
- else:
- return lambda *args, **kwargs: self.call(method, *args, **kwargs)
-
-
-class Model(MutableMapping):
- """
- Utility class for representing database objects found in the databases of
- other Sideboard plugins. Instances of this class can have their values accessed
- as either attributes or dictionary keys.
- """
- _prefix = None
- _unpromoted = ()
- _defaults = None
-
- def __init__(self, data, prefix=None, unpromoted=None, defaults=None):
- assert prefix or self._prefix
- object.__setattr__(self, '_data', deepcopy(data))
- object.__setattr__(self, '_orig_data', deepcopy(data))
- object.__setattr__(self, '_prefix', (prefix or self._prefix) + '_')
- object.__setattr__(self, '_project_key', self._prefix + 'data')
- object.__setattr__(self, '_unpromoted', self._unpromoted if unpromoted is None else unpromoted)
- object.__setattr__(self, '_defaults', defaults or self._defaults or {})
-
- @property
- def query(self):
- assert self.id, 'id was not set'
- assert self._model, '_model was not set'
- return {'_model': self._model, 'field': 'id', 'value': self.id}
-
- @property
- def dirty(self):
- return {k: v for k, v in self._data.items() if v != self._orig_data.get(k)}
-
- def to_dict(self):
- data = deepcopy(self._data)
- serialized = {k: v for k, v in data.pop(self._project_key, {}).items()}
- for k in list(data.get('extra_data', {}).keys()):
- if k.startswith(self._prefix):
- serialized[k[len(self._prefix):]] = data['extra_data'].pop(k)
- elif k in self._unpromoted:
- serialized[k] = data['extra_data'].pop(k)
- serialized.update(data)
- return serialized
-
- @property
- def _extra_data(self):
- return self._data.setdefault('extra_data', {})
-
- def _extra_data_key(self, key):
- return ('' if key in self._unpromoted else self._prefix) + key
-
- def __len__(self):
- return len(self._data) + len(self._extra_data) + len(self._data.get(self._project_key, {}))
-
- def __setitem__(self, key, value):
- assert key != 'id' or value == self.id, 'id is not settable'
- if key in self._data:
- self._data[key] = value
- elif self._project_key in self._data:
- self._extra_data.pop(self._prefix + key, None)
- self._data[self._project_key][key] = value
- else:
- self._extra_data[self._extra_data_key(key)] = value
-
- def __getitem__(self, key):
- if key in self._data:
- return self._data[key]
- elif key in self._data.get(self._project_key, {}):
- return self._data[self._project_key][key]
- else:
- return self._extra_data.get(self._extra_data_key(key), self._defaults.get(key))
-
- def __delitem__(self, key):
- if key in self._data:
- del self._data[key]
- elif key in self._data.get(self._project_key, {}):
- del self._data[self._project_key][key]
- else:
- self._extra_data.pop(self._extra_data_key(key), None)
-
- def __iter__(self):
- return iter(k for k in self.to_dict() if k != 'extra_data')
-
- def __repr__(self):
- return repr(dict(self.items()))
-
- def __getattr__(self, name):
- return self.__getitem__(name)
-
- def __setattr__(self, name, value):
- return self.__setitem__(name, value)
-
- def __delattr__(self, name):
- self.__delitem__(name)
-
-
-class Subscription(object):
- """
- Utility class for opening a websocket to a given destination, subscribing to an rpc call,
- and processing the response.
-
- >>> logged_in_users = Subscription('admin.get_logged_in_users')
- >>> logged_in_users.result # this will always be the latest return value of your rpc method
-
- If you want to do postprocessing on the results, you can override the "callback" method:
-
- >>> class UserList(Subscription):
- ... def __init__(self):
- ... self.usernames = []
- ... Subscription.__init__(self, 'admin.get_logged_in_users')
- ...
- ... def callback(self, users):
- ... self.usernames = [user['username'] for user in users]
- ...
- >>> users = UserList()
-
- The above code gives you a "users" object with a "usernames" attribute; when Sideboard
- starts, it opens a websocket connection to whichever remote server defines the "admin"
- service (as defined in the rpc_services config section), then subscribes to the
- "admin.get_logged_in_users" method and calls the "callback" method on every response.
- """
-
- def __init__(self, rpc_method, *args, **kwargs):
- self.result = None
- connect_immediately = kwargs.pop('connect_immediately', False)
- self.method, self.args, self.kwargs = rpc_method, args, kwargs
- self.ws = sideboard.lib.services.get_websocket(rpc_method.split('.')[0])
- on_startup(self._subscribe)
- on_shutdown(self._unsubscribe)
- if connect_immediately:
- self.ws.connect(max_wait=2)
- self._subscribe()
-
- def _subscribe(self):
- self._client_id = self.ws.subscribe(self._callback, self.method, *self.args, **self.kwargs)
-
- def _unsubscribe(self):
- self.ws.unsubscribe(self._client_id)
-
- def refresh(self):
- """
- re-fire your subscription method and invoke the callback method with
- the response; this will manually check for changes if you are
- subscribed to a method which by design doesn't re-fire on every change
- """
- assert self.ws.connected, 'cannot refresh {}: websocket not connected'.format(self.method)
- self._callback(self.ws.call(self.method, *self.args, **self.kwargs))
-
- def _callback(self, response_data):
- self.result = response_data
- self.callback(response_data)
-
- def callback(self, response_data):
- """override this to define what to do with your rpc method return values"""
-
-
-class MultiSubscription(object):
- """
- A version of the Subscription utility class which subscribes to an arbitrary
- number of remote servers and aggregates the results from each. You invoke
- this similarly to Subscription class, with two main differences:
-
- 1) The first parameter is a list of hostnames to which we should connect.
- Each hostname will have a websocket registered for it if one does not
- already exist, using the standard config options under [rpc_services].
-
- 2) Unlike the Subscription class, we do not support the connect_immediately
- parameter. Because this class looks in the [rpc_services] config section
- of every plugin to find the client cert settings, we need to wait for all
- plugins to be loaded before trying to connect.
-
- Like the Subscription class, you can instantiate this class directly, e.g.
-
- >>> logged_in_users = MultiSubscription(['host1', 'host2'], 'admin.get_logged_in_users')
- >>> logged_in_users.results # this will always be the latest return values of your rpc method
-
- The "results" attribute is a dictionary whose keys are the websocket objects
- used to connect to each host, and whose values are the latest return values
- from each of those websockets. Hosts for which we have not yet received a
- response will have no key/value pair in the "results" dictionary.
-
- If you want to do postprocessing on the results, you can subclass this and
- override the "callback" method, e.g.
-
- >>> class UserList(MultiSubscription):
- ... def __init__(self):
- ... self.usernames = set()
- ... MultiSubscription.__init__(self, ['host1', 'host2'], 'admin.get_logged_in_users')
- ...
- ... def callback(self, users, ws):
- ... self.usernames.update(user['username'] for user in users)
- ...
- >>> users = UserList()
-
- The above code gives you a "users" object with a "usernames" attribute; when Sideboard
- starts, it opens websocket connections to 'host1' and 'host2', then subscribes to the
- "admin.get_logged_in_users" method and calls the "callback" method on every response.
- """
- def __init__(self, hostnames, rpc_method, *args, **kwargs):
- from sideboard.lib import listify
- self.hostnames, self.method, self.args, self.kwargs = listify(hostnames), rpc_method, args, kwargs
- self.results, self.websockets, self._client_ids = {}, {}, {}
- on_startup(self._subscribe)
- on_shutdown(self._unsubscribe)
-
- def _websocket(self, url, ssl_opts):
- from sideboard.lib import services
- return services._register_websocket(url, ssl_opts=ssl_opts)
-
- def _subscribe(self):
- from sideboard.lib._services import _ws_url, _rpc_opts, _ssl_opts
- for hostname in self.hostnames:
- rpc_opts = _rpc_opts(hostname)
- self.websockets[hostname] = self._websocket(_ws_url(hostname, rpc_opts), _ssl_opts(rpc_opts))
-
- for ws in self.websockets.values():
- self._client_ids[ws] = ws.subscribe(self._make_callback(ws), self.method, *self.args, **self.kwargs)
-
- def _unsubscribe(self):
- for ws in self.websockets.values():
- ws.unsubscribe(self._client_ids.get(ws))
-
- def _make_callback(self, ws):
- return lambda result_data: self._callback(result_data, ws)
-
- def _callback(self, response_data, ws):
- self.results[ws] = response_data
- self.callback(response_data, ws)
-
- def callback(self, result_data, ws):
- """override this to define what to do with your rpc method return values"""
-
- def refresh(self):
- """
- Sometimes we want to manually re-fire all of our subscription methods to
- get the latest data. This is useful in cases where the remote server
- isn't necessarily programmed to always push the latest data as soon as
- it's available, usually for performance reasons. This method allows the
- client to get the latest data more often than the server is programmed
- to provide it.
- """
- for ws in self.websockets.values():
- try:
- self._callback(self.ws.call(self.method, *self.args, **self.kwargs), ws)
- except:
- log.warning('failed to fetch latest data from %s on %s', self.method, ws.url)
diff --git a/sideboard/lib/sa/__init__.py b/sideboard/lib/sa/__init__.py
deleted file mode 100644
index dd8dce7..0000000
--- a/sideboard/lib/sa/__init__.py
+++ /dev/null
@@ -1,331 +0,0 @@
-from __future__ import unicode_literals
-import re
-import json
-import uuid
-import types
-import inspect
-
-import six
-import sqlalchemy
-from sqlalchemy import event
-from sqlalchemy.ext import declarative
-from sqlalchemy.dialects import postgresql
-from sqlalchemy.orm import Query, sessionmaker, configure_mappers
-from sqlalchemy.orm.decl_base import _declarative_constructor
-from sqlalchemy.types import TypeDecorator, String, DateTime, CHAR, Unicode
-
-from sideboard.lib import log, config
-
-__all__ = ['UUID', 'JSON', 'CoerceUTF8', 'declarative_base', 'SessionManager',
- 'CrudException', 'crudable', 'crud_validation', 'text_length_validation', 'regex_validation']
-
-
-def _camelcase_to_underscore(value):
- """ Converts camelCase string to underscore_separated (aka joined_lower).
-
- >>> _camelcase_to_underscore('fooBarBaz')
- 'foo_bar_baz'
- >>> _camelcase_to_underscore('fooBarBazXYZ')
- 'foo_bar_baz_xyz'
- """
- s1 = re.sub(r'(.)([A-Z][a-z]+)', r'\1_\2', value)
- return re.sub(r'([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
-
-
-def _underscore_to_camelcase(value, cap_segment=None):
- """ Converts underscore_separated string (aka joined_lower) into camelCase string.
-
- >>> _underscore_to_camelcase('foo_bar_baz')
- 'FooBarBaz'
- >>> _underscore_to_camelcase('foo_bar_baz', cap_segment=0)
- 'FOOBarBaz'
- >>> _underscore_to_camelcase('foo_bar_baz', cap_segment=1)
- 'FooBARBaz'
- >>> _underscore_to_camelcase('foo_bar_baz', cap_segment=1000)
- 'FooBarBaz'
- """
- return "".join([s.title() if idx != cap_segment else s.upper() for idx, s in enumerate(value.split('_'))])
-
-
-class CoerceUTF8(TypeDecorator):
- """
- Safely coerce Python bytestrings to Unicode
- before passing off to the database.
- """
- impl = Unicode
- cache_ok = True
-
- def process_bind_param(self, value, dialect):
- if isinstance(value, type(b'')):
- value = value.decode('utf-8')
- return value
-
-
-class UUID(TypeDecorator):
- """
- Platform-independent UUID type.
- Uses Postgresql's UUID type, otherwise uses
- CHAR(32), storing as stringified hex values.
- """
- impl = CHAR
- cache_ok = True
-
- def load_dialect_impl(self, dialect):
- if dialect.name == 'postgresql':
- return dialect.type_descriptor(postgresql.UUID())
- else:
- return dialect.type_descriptor(CHAR(32))
-
- def process_bind_param(self, value, dialect):
- if value is None:
- return value
- elif dialect.name == 'postgresql':
- return str(value)
- else:
- if not isinstance(value, uuid.UUID):
- return uuid.UUID(value).hex
- else:
- return value.hex
-
- def process_result_value(self, value, dialect):
- if value is None:
- return value
- else:
- return str(uuid.UUID(value))
-
-
-class JSON(TypeDecorator):
- impl = String
- cache_ok = True
-
- def __init__(self, comparator=None):
- self.comparator = comparator
- super(JSON, self).__init__()
-
- def process_bind_param(self, value, dialect):
- if value is None:
- return None
- elif isinstance(value, six.string_types):
- return value
- else:
- return json.dumps(value)
-
- def process_result_value(self, value, dialect):
- if value is None:
- return None
- return json.loads(str(value))
-
- def copy_value(self, value):
- if self.mutable:
- return json.loads(json.dumps(value))
- else:
- return value
-
- def compare_values(self, x, y):
- if self.comparator:
- return self.comparator(x, y)
- else:
- return x == y
-
-
-try:
- from pytz import UTC
-except ImportError:
- pass
-else:
- class UTCDateTime(TypeDecorator):
- impl = DateTime
- cache_ok = True
-
- def process_bind_param(self, value, engine):
- if value is not None:
- return value.astimezone(UTC).replace(tzinfo=None)
-
- def process_result_value(self, value, engine):
- if value is not None:
- return value.replace(tzinfo=UTC)
-
- __all__.append('UTCDateTime')
-
-
-def check_constraint_naming_convention(constraint, table):
- """Creates a unique name for an unnamed CheckConstraint.
-
- The generated name is the SQL text of the CheckConstraint with
- non-alphanumeric, non-underscore operators converted to text, and all
- other non-alphanumeric, non-underscore substrings replaced by underscores.
-
- If the generated name is longer than 32 characters, a uuid5 based on the
- generated name will be returned instead.
-
- >>> check_constraint_naming_convention(CheckConstraint('failed_logins > 3'), Table('account', MetaData()))
- 'failed_logins_gt_3'
-
- See: http://docs.sqlalchemy.org/en/latest/core/constraints.html#configuring-constraint-naming-conventions
- """
- # The text of the replacements doesn't matter, so long as it's unique
- replacements = [
- ('||/', 'cr'), ('<=', 'le'), ('>=', 'ge'), ('<>', 'nq'), ('!=', 'ne'),
- ('||', 'ct'), ('<<', 'ls'), ('>>', 'rs'), ('!!', 'fa'), ('|/', 'sr'),
- ('@>', 'cn'), ('<@', 'cb'), ('&&', 'an'), ('<', 'lt'), ('=', 'eq'),
- ('>', 'gt'), ('!', 'ex'), ('"', 'qt'), ('#', 'hs'), ('$', 'dl'),
- ('%', 'pc'), ('&', 'am'), ('\'', 'ap'), ('(', 'lpr'), (')', 'rpr'),
- ('*', 'as'), ('+', 'pl'), (',', 'cm'), ('-', 'da'), ('.', 'pd'),
- ('/', 'sl'), (':', 'co'), (';', 'sc'), ('?', 'qn'), ('@', 'at'),
- ('[', 'lbk'), ('\\', 'bs'), (']', 'rbk'), ('^', 'ca'), ('`', 'tk'),
- ('{', 'lbc'), ('|', 'pi'), ('}', 'rbc'), ('~', 'td')]
-
- constraint_name = str(constraint.sqltext).strip()
- for operator, text in replacements:
- constraint_name = constraint_name.replace(operator, text)
-
- constraint_name = re.sub(r'[\W\s]+', '_', constraint_name)
- if len(constraint_name) > 32:
- constraint_name = uuid.uuid5(uuid.NAMESPACE_OID, str(constraint_name)).hex
- return constraint_name
-
-
-def declarative_base(*orig_args, **orig_kwargs):
- """
- Replacement for SQLAlchemy's declarative_base, which adds these features:
- 1) This is a decorator.
- 2) This allows your base class to set a constructor.
- 3) This provides a default constructor which automatically sets defaults
- instead of waiting to do that until the object is committed.
- 4) Automatically setting __tablename__ to snake-case.
- 5) Automatic integration with the SessionManager class.
- """
- orig_args = list(orig_args)
-
- def _decorate_base_class(klass):
-
- class Mixed(klass, CrudMixin):
- def __init__(self, *args, **kwargs):
- """
- Variant on SQLAlchemy model __init__ which sets default values on
- initialization instead of immediately before the model is saved.
- """
- if '_model' in kwargs:
- assert kwargs.pop('_model') == self.__class__.__name__
- _declarative_constructor(self, *args, **kwargs)
- for attr, col in self.__table__.columns.items():
- if kwargs.get(attr) is None and col.default:
- self.__dict__.setdefault(attr, col.default.arg(col))
-
- orig_kwargs['cls'] = Mixed
- if 'name' not in orig_kwargs:
- orig_kwargs['name'] = klass.__name__
- if 'constructor' not in orig_kwargs:
- orig_kwargs['constructor'] = klass.__init__ if '__init__' in klass.__dict__ else Mixed.__init__
-
- Mixed = declarative.declarative_base(*orig_args, **orig_kwargs)
- Mixed.BaseClass = _SessionInitializer._base_classes[klass.__module__] = Mixed
- Mixed.__tablename__ = declarative.declared_attr(lambda cls: _camelcase_to_underscore(cls.__name__))
- return Mixed
-
- is_class_decorator = not orig_kwargs and \
- len(orig_args) == 1 and \
- inspect.isclass(orig_args[0]) and \
- not isinstance(orig_args[0], sqlalchemy.engine.Connectable)
-
- if is_class_decorator:
- return _decorate_base_class(orig_args.pop())
- else:
- return _decorate_base_class
-
-
-class _SessionInitializer(type):
- _base_classes = {}
-
- def __new__(cls, name, bases, attrs):
- SessionClass = type.__new__(cls, name, bases, attrs)
- if hasattr(SessionClass, 'engine'):
- if not hasattr(SessionClass, 'BaseClass'):
- for module, bc in _SessionInitializer._base_classes.items():
- if module == SessionClass.__module__:
- SessionClass.BaseClass = bc
- break
- else:
- raise AssertionError('no BaseClass specified and @declarative_base was never invoked in {}'.format(SessionClass.__module__))
- if not hasattr(SessionClass, 'session_factory'):
- SessionClass.session_factory = sessionmaker(bind=SessionClass.engine, autoflush=False, autocommit=False,
- query_cls=SessionClass.QuerySubclass)
- SessionClass.initialize_db()
- SessionClass.crud = make_crud_service(SessionClass)
- return SessionClass
-
-
-@six.add_metaclass(_SessionInitializer)
-class SessionManager(object):
- class SessionMixin(object):
- pass
-
- class QuerySubclass(Query):
- pass
-
- def __init__(self):
- self.session = self.session_factory()
- for name, val in self.SessionMixin.__dict__.items():
- if not name.startswith('__'):
- assert not hasattr(self.session, name) and hasattr(val, '__call__')
- setattr(self.session, name, types.MethodType(val, self.session))
-
- def __enter__(self):
- return self.session
-
- def __exit__(self, exc_type, exc_value, traceback):
- try:
- if exc_type is None:
- self.session.commit()
- finally:
- self.session.close()
-
- def __del__(self):
- if self.session.transaction._connections:
- log.error('SessionManager went out of scope without underlying connection being closed; did you forget to use it as a context manager?')
- self.session.close()
-
- @classmethod
- def initialize_db(cls, drop=False, create=True):
- configure_mappers()
- cls.BaseClass.metadata.bind = cls.engine
- if drop:
- cls.BaseClass.metadata.drop_all(cls.engine, checkfirst=True)
- if create:
- cls.BaseClass.metadata.create_all(cls.engine, checkfirst=True)
-
- @classmethod
- def all_models(cls):
- return cls.BaseClass.__subclasses__() # TODO: subclasses of subclasses; this needs to be recursive or something
-
- @classmethod
- def resolve_model(cls, name):
- if inspect.isclass(name) and issubclass(name, cls.BaseClass):
- return name
-
- subclasses = {ModelClass.__name__: ModelClass for ModelClass in cls.all_models()}
- permutations = [name, _underscore_to_camelcase(name), _underscore_to_camelcase(name, cap_segment=0)]
- for name in permutations:
- if name in subclasses:
- return subclasses[name]
-
- if name.lower().endswith('s'):
- singular = name.rstrip('sS')
- if singular in subclasses:
- return subclasses[singular]
-
- if name.lower().endswith('ies'):
- singular = name[:-3] + 'y'
- if singular in subclasses:
- return subclasses[singular]
-
- for name in permutations:
- if name in cls.BaseClass.metadata.tables:
- return cls.BaseClass.metadata.tables[name]
-
- raise ValueError('Unrecognized model: {}'.format(name))
-
-if six.PY2:
- __all__ = [s.encode('ascii') for s in __all__]
-
-from sideboard.lib.sa._crud import CrudMixin, make_crud_service, crudable, CrudException, crud_validation, text_length_validation, regex_validation
diff --git a/sideboard/lib/sa/_crud.py b/sideboard/lib/sa/_crud.py
deleted file mode 100644
index de9ef47..0000000
--- a/sideboard/lib/sa/_crud.py
+++ /dev/null
@@ -1,1958 +0,0 @@
-"""
-The crud module defines a number of functions for finding SQLAlchemy model objects via a query parameter and displaying a desired portion of the resulting object graph via a data specification parameter, optionally limiting the total number returned, potentially with an offset to support paging
-
-
-QUERY PARAMETER
----------------
-The format of the query parameter needs to support logical operators and a certain amount of introspection into which model objects are involved in a give query. For this writeup, a "query" is any set of search parameters that will result in a known SQL search string capable of returning the desired model objects. Python syntax will be used to represent the expected format of the method parameters, with allowances for representing infinite nesting/lists as appropriate. Unless explicitly stated, pluralized forms like "queries" can be read as "query or queries" due to the support of one or more queries in all cases
-
-The comprehensive form of the query parameter is as follows:
-
-query = [{
- '_model': ,
- '_data': ,
- '_label': ,
- # Either provide OR the items after
- : [[, ]*],
- # used IF AND ONLY IF is not provided
- 'comparison':
- 'field': ,
- 'value':
-}]
-
-meaning an array of one or more dictionaries (a dictionary is equivalent to an array of length 1) of queries, one for each type of SQLAlchemy model object expected to be returned
-
-where:
-- '' - the string corresponding to the SQLAlchemy model class name which extends your @sideboard.lib.sa.declarative_base
-- '' - the optional string that signifies the purpose of this query and is only used as a convenience for the consumer of the crud method. This primarily supports counts, but can used in client code to help cue the display of those results, defaults to the contents of _model
-- '' - the key is one of the following logical operators (with the value being one of more queries in a list):
--- and ("intersection")
--- or ("union")
---- meaning that the results of the provided queries will be the corresponding intersection/union of all the results of an individual query. Imagining a Venn Diagram is useful in this instance.
-- - is a dictionary identical the dictionary taken in by the query parameter EXCEPT that _model is not included
-- - a comparison operator function used to find the objects that would return "True" for the provided comparison for the value in the model_field_name. Some examples are:
--- 'lt' - is the field less than value?
--- 'gt' - is the field greater than value?
--- 'eq' - is the field equal to value? (default)
--- 'ne' - is the field not equal value?
--- 'le' - is the field less than or equal to value?
--- 'ge' - is the field greater than or equal to value?
--- 'isnull' - does the field have a null value? (does not use the query's value parameter)
--- 'isnotnull' - does the field have a non null value? (does not use the query's value parameter)
--- 'in' - does the field appear in the given value? (value should be an array)
--- 'contains' - does the field contain this value? (would allow other characters before or after the value)
--- 'like' - same as contains, but case sensitive
--- 'ilike' - same as contains, case insensitive
--- 'startswith' - does the field start with value?
--- 'istartswith' - case insensitive startswith
--- 'endswith' - does the field end with value?
--- 'iendswith' - case insensitive endswith
-
-- - the name of the field for the provided _model at the top level. Supports dot notation, e.g.:
--- making a comparison based off all of a Team's players' names would use an 'field' of 'player.name'
-- - the value that the field comparison will be made against, e.g. a value of 'text' and a comparison of 'eq' will return all matching models with fields equal to 'text'.
-- - specifying what parts of the results get returned, the following section covers the format the data specification parameter
-
-
-DATA SPECIFICATION
-------------------
-Where the query parameter is only used to search existing objects in the database, the data specification parameter has two separate meanings: in the 'read' function as the _data key in the query dictionary: what information is returned in the results, in the 'update' and 'create' functions, what model type will be created/updated with what values. This is encompassed in one format, so there is some amount of redundancy depending on what actions you're performing.
-
-The comprehensive form of the data specification parameter is as follows:
-
-data = [{
- '_model': ,
- # a non-foreign key field
- '': True (or the value of the field if the data parameter is used to create or update objects)
- # a foreign key field is a special case and additional forms are supported)
- '': True (all readable fields of the reference model object will be read. Has no meaning if the data parameter is used to create of update objects)
- '': {, , , , ]
-}
-
-To prevent the client from always being forced to deal with entire query result, there are three parameters in place for the crud.read method to simplify only receiving the information that's desired. At a high level:
-
-- 'Limit' takes a positive integer 'L' and when provided, the crud.read method will return at most L results, defaults to no limit
-- 'Ordering' takes a list of ordering specification dictionaries for sorting by specific fields and in a specified direction (ascending or descending), defaults to no reordering after being returned from the database
-- 'Offset' takes a positive integer 'F' and when provided, the crud.read method will return at most L results, after skipping the first (based on the ordering specification) F results.
-
-Used with the crud.read method to only return only a subset of information, allowing the client to only receive the amount of information it's interested in. Useful in conjunction with the offset and ordering parameter to finely-tune the information received.
-
-The comprehensive form of the ordering parameter is as follows:
-
-ordering = [{
- 'dir': <'asc'/'desc'> # either in ascending (default) or descending order
- 'fields': [['.]']+
-}] +
-
-A single string in 'fields' is equivalent to a list with the string as the only element. If no model_object name is provided, the model_field_name is interpreted as the catch-all key for all model objects. If model_field_name isn't present on a model, or no catch-all is specified, 'id' will be used
-
-The list of dictionaries are interpreted as being ordered in decreasing priority. An example:
-
-The 'offset' parameter is used with the crud.read method to only return only a subset of information, allowing the client to only receive the amount of information it's interested in. Useful in conjunction with the limit and ordering parameter to finely-tune the information received.
-
-Using the 4 records in the ordering example (including the ordering specification):
-- a limit of 1 with an offset of 0 (the default if unspecified) would return only the John Depp Human.
-- a limit of 0 (unlimited, which is the default if unspecified) and an offset of 0 would be identical to the table in the ordering-only example
-- a limit of 0 and an offset of 1 would return everything except for the first result, so in this case, the last 3 results
-- a limit of 2 and an offset of 1 would return the 2nd and 3rd results, so in this case, the middle 2 results
-"""
-from __future__ import unicode_literals
-import functools
-import re
-import sys
-import json
-import uuid
-import inspect
-import collections
-from copy import deepcopy
-from collections.abc import Mapping
-from collections import defaultdict
-from datetime import datetime, date, time
-from itertools import chain
-from functools import wraps
-
-import six
-from sqlalchemy import orm, union, select, func
-from sqlalchemy.ext.associationproxy import AssociationProxy
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm.attributes import InstrumentedAttribute
-from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
-from sqlalchemy.orm.mapper import Mapper
-from sqlalchemy.orm.properties import ColumnProperty, RelationshipProperty
-from sqlalchemy.orm.util import class_mapper
-from sqlalchemy.schema import UniqueConstraint
-from sqlalchemy.sql import text, ClauseElement
-from sqlalchemy.sql.expression import alias, cast, label, bindparam, and_, or_, asc, desc, literal, text, union, join
-from sqlalchemy.types import Boolean, Text, Integer, String, UnicodeText, DateTime
-
-from sideboard.lib import log, notify, listify, threadlocal, serializer, is_listy, class_property
-
-
-class CrudException(Exception):
- pass
-
-
-def listify_with_count(x, count=None):
- x = listify(x)
- if count and len(x) < count:
- x.extend([None for i in range(count - len(x))])
- return x
-
-
-def mappify(value):
- if isinstance(value, six.string_types):
- return {value: True}
- elif isinstance(value, collections.abc.Mapping):
- return value
- elif isinstance(value, collections.abc.Iterable):
- return {v: True for v in value}
- else:
- raise TypeError('unknown datatype: {}', value)
-
-
-def generate_date_series(startDate=None, endDate=None, interval='1 month', granularity='day'):
- if granularity:
- granularity = '1 %s' % granularity
- else:
- granularity = '1 day'
-
- generate_series = None
- if startDate:
- if endDate:
- # If the startDate and the endDate are defined then we use those
- generate_series = func.generate_series(startDate, endDate, granularity)
- elif interval:
- # If the startDate and the interval are defined then we use those
- generate_series = func.generate_series(startDate,
- text("DATE :start_date_param_1 + INTERVAL :interval_param_1",
- bindparams=[
- bindparam("start_date_param_1", startDate),
- bindparam("interval_param_1", interval)]),
- granularity)
- else:
- # If ONLY the startDate is defined then we just use that
- generate_series = func.generate_series(startDate, datetime.utcnow(), granularity)
- elif endDate:
- if interval:
- # If the endDate and the interval are defined then we use those
- generate_series = func.generate_series(
- text("DATE :current_date_param_1 - INTERVAL :interval_param_1",
- bindparams=[
- bindparam("current_date_param_1", endDate),
- bindparam("interval_param_1", interval)]),
- endDate, granularity)
- else:
- # If ONLY the endDate is defined then we just use that
- generate_series = func.generate_series(
- text("DATE :current_date_param_1 - INTERVAL :interval_param_1",
- bindparams=[
- bindparam("current_date_param_1", endDate),
- bindparam("interval_param_1", "1 month")]),
- endDate, granularity)
- elif interval:
- # If ONLY the interval is defined then we default to the current date
- # minus the interval
- generate_series = func.generate_series(
- text("DATE :current_date_param_1 - INTERVAL :interval_param_1",
- bindparams=[
- bindparam("current_date_param_1", datetime.utcnow()),
- bindparam("interval_param_1", interval)]),
- datetime.utcnow(), granularity)
- else:
- # If NOTHING is defined then we return the query unmodified
- generate_series = func.generate_series(
- text("DATE :current_date_param_1 - INTERVAL :interval_param_1",
- bindparams=[
- bindparam("current_date_param_1", datetime.utcnow()),
- bindparam("interval_param_1", "1 month")]),
- datetime.utcnow(), granularity)
-
- return generate_series
-
-
-def normalize_date_query(query, dateLabel, reportLabel, startDate=None, endDate=None, interval='1 month', granularity='day'):
- series = generate_date_series(startDate, endDate, interval, granularity)
- seriesQuery = select([
- series.label(dateLabel),
- literal(0).label(reportLabel)
- ])
-
- query = union(query, seriesQuery).alias()
- query = select([
- text(dateLabel),
- func.max(text(reportLabel)).label(reportLabel)
- ], from_obj=query).group_by(dateLabel).order_by(dateLabel)
-
- return query
-
-
-def normalize_object_graph(graph):
- """
- Returns a normalized object graph given a variety of different inputs.
-
- If graph is a string, we assume it is a single property of an object,
- and return a dict with just that property set to True.
-
- If graph is a dict, we assume it is already a normalized graph.
-
- If graph is iterable (and not a string), we assume that it's simple a
- list of properties, and we return a dict with those properties set to
- True.
-
- NOTE: This function is NOT recursive. It is intended to be repeatedly
- called from an external library as it traverses the object graph. We do
- this for performance reasons in case the caller decides not to traverse
- the entire graph.
-
- >>> normalize_object_graph('prop')
- {u'prop': True}
-
- >>> normalize_object_graph(['prop_one', 'prop_two'])
- {'prop_two': True, 'prop_one': True}
-
- >>> normalize_object_graph({'prop_one':'test_one', 'prop_two':'test_two'})
- {u'prop_two': u'test_two', u'prop_one': u'test_one'}
- """
- if isinstance(graph, six.string_types):
- return {graph: True}
- elif isinstance(graph, dict):
- return graph
- elif isinstance(graph, collections.abc.Iterable):
- return dict([(str(i), True) for i in graph])
- else:
- return None
-
-
-def collect_ancestor_classes(cls, terminal_cls=None, module=None):
- """
- Collects all the classes in the inheritance hierarchy of the given class,
- including the class itself.
-
- If module is an object or list, we only return classes that are in one
- of the given module/modules.This will exclude base classes that come
- from external libraries.
-
- If terminal_cls is encountered in the hierarchy, we stop ascending
- the tree.
- """
- if terminal_cls is None:
- terminal_cls = []
- elif not isinstance(terminal_cls, (list, set, tuple)):
- terminal_cls = [terminal_cls]
-
- if module is not None:
- if not isinstance(module, (list, set, tuple)):
- module = [module]
- module_strings = []
- for m in module:
- if isinstance(m, six.string_types):
- module_strings.append(m)
- else:
- module_strings.append(m.__name__)
- module = module_strings
-
- ancestors = []
- if (module is None or cls.__module__ in module) and cls not in terminal_cls:
- ancestors.append(cls)
- for base in cls.__bases__:
- ancestors.extend(collect_ancestor_classes(base, terminal_cls, module))
-
- return ancestors
-
-
-def collect_ancestor_attributes(cls, terminal_cls=None, module=None):
- """
- Collects all the attribute names of every class in the inheritance
- hierarchy of the given class, including the class itself.
- """
- classes = collect_ancestor_classes(cls, terminal_cls, module)
- attr_names = []
- for cls in classes:
- for attr_name in cls.__dict__.keys():
- attr_names.append(attr_name)
- return list(set(attr_names))
-
-
-def constrain_date_query(query, column, startDate=None, endDate=None, interval='1 month'):
- if startDate:
- if endDate:
- # If the startDate and the endDate are defined then we use those
- query = query.where(and_(column >= startDate, column <= endDate))
- return query
- elif interval:
- # If the startDate and the interval are defined then we use those
- query = query.where(and_(
- column >= startDate,
- column <= text("DATE :start_date_param_1 + INTERVAL :interval_param_1",
- bindparams=[
- bindparam("start_date_param_1", startDate),
- bindparam("interval_param_1", interval)])))
- return query
- else:
- # If ONLY the startDate is defined then we just use that
- query = query.where(column >= startDate)
- return query
- elif endDate:
- if interval:
- # If the endDate and the interval are defined then we use those
- query = query.where(and_(
- column <= endDate,
- column >= text("DATE :end_date_param_1 - INTERVAL :interval_param_1",
- bindparams=[
- bindparam("end_date_param_1", endDate),
- bindparam("interval_param_1", interval)])))
- return query
- else:
- # If ONLY the endDate is defined then we just use that
- query = query.where(column <= endDate)
- return query
- elif interval:
- # If ONLY the interval is defined then we default to the current date
- # minus the interval
- query = query.where(and_(
- column >= text("DATE :current_date_param_1 - INTERVAL :interval_param_1",
- bindparams=[
- bindparam("current_date_param_1", datetime.utcnow()),
- bindparam("interval_param_1", interval)])))
- return query
- else:
- # If NOTHING is defined then we return the query unmodified
- return query
-
-
-def extract_sort_field(model, value):
- field = None
- fields = listify(value)
- for f in fields:
- if isinstance(f, six.string_types):
- parts = f.split('.')
- if len(parts) == 1 and field is None:
- if not model or (model and hasattr(model, parts[0])):
- field = parts[0]
- elif len(parts) > 1 and model and parts[0] == model.__name__:
- field = parts[1]
- else:
- field = f
-
- if field and isinstance(field, six.string_types) and model:
- attr = getattr(model, field)
- if (not (isinstance(attr, InstrumentedAttribute) and isinstance(attr.property, ColumnProperty)) and
- not isinstance(attr, ClauseElement)):
- raise ValueError('SQLAlchemy model classes may only be sorted '
- 'by columns that exist in the database. '
- 'Provided: {}.{}'.format(model.__name__, field))
- return field or 'id'
-
-
-def normalize_sort(model, sort):
- if sort and isinstance(sort, six.string_types) and (sort.lstrip()[0] == '[' or sort.lstrip()[0] == '{'):
- sort = json.loads(sort)
-
- if isinstance(sort, six.string_types):
- return [{'field': extract_sort_field(model, sort), 'dir': 'asc'}]
- elif is_listy(sort):
- sorters = []
- for s in sort:
- sorters.extend(normalize_sort(model, s))
- return sorters
- elif isinstance(sort, dict):
- field = sort.get('property', sort.get('fields', sort.get('field', [])))
- direction = sort.get('direction', sort.get('dir', 'asc')).lower()
- return [{
- 'field': extract_sort_field(model, field),
- 'dir': direction
- }]
- else:
- return [{'field': 'id', 'dir': 'asc'}]
-
-
-def normalize_data(data, count=1):
- """
- A singular data can be a string, a list of strings, or a dict:
- 'attr'
- ['attr1', 'attr2']
- {'attr1':True, 'attr2':True}
-
- A plural data must be specified as a list of lists or a list of dicts:
- [['attr1', 'attr2'], ['attr1', 'attr2']]
- [{'attr1':True, 'attr2':True}, {'attr1':True, 'attr2':True}]
-
- Note that if data is specified as a list of strings, it is
- considered to be singular. Only a list of lists or a list of
- dicts is considered plural.
-
- Returns the plural form of data as the comprehensive form of a list of
- dictionaries mapping to True, extended to count length. If a
- singular data is given, the result will be padded by repeating
- that value. If a plural data is given, it will be padded with
- None, for example:
- >>> normalize_data('attr', 1)
- [{'attr': True}]
- >>> normalize_data('attr', 3)
- [{'attr': True}, {'attr': True}, {'attr': True}]
- >>> normalize_data(['attr1', 'attr2'], 1)
- [{'attr2': True, 'attr1': True}]
- >>> normalize_data(['attr1', 'attr2'], 3)
- [{'attr2': True, 'attr1': True}, {'attr2': True, 'attr1': True}, {'attr2': True, 'attr1': True}]
- >>> normalize_data({'attr1':True, 'attr2':True}, 1)
- [{'attr2': True, 'attr1': True}]
- >>> normalize_data({'attr1':True, 'attr2':True}, 3)
- [{'attr2': True, 'attr1': True}, {'attr2': True, 'attr1': True}, {'attr2': True, 'attr1': True}]
- >>> normalize_data([['attr1', 'attr2'], ['attr1', 'attr2']], 1)
- [{'attr2': True, 'attr1': True}, {'attr2': True, 'attr1': True}]
- >>> normalize_data([['attr1', 'attr2'], ['attr1', 'attr2']], 4)
- [{'attr2': True, 'attr1': True}, {'attr2': True, 'attr1': True}, None, None]
- >>> normalize_data([{'attr1':True, 'attr2':True}, {'attr1':True, 'attr2':True}], 1)
- [{'attr2': True, 'attr1': True}, {'attr2': True, 'attr1': True}]
- >>> normalize_data([{'attr1':True, 'attr2':True}, {'attr1':True, 'attr2':True}], 4)
- [{'attr2': True, 'attr1': True}, {'attr2': True, 'attr1': True}, None, None]
- >>> normalize_data([{'attr1':True, 'attr2':True}, ['attr1', 'attr2']], 4)
- [{'attr2': True, 'attr1': True}, {'attr2': True, 'attr1': True}, None, None]
- """
- if not data:
- return listify_with_count(None, count)
- else:
- if isinstance(data, six.string_types):
- data = [{data: True}]
- elif isinstance(data, collections.abc.Mapping):
- data = [data]
- elif isinstance(data, collections.abc.Iterable):
- if any(isinstance(element, six.string_types) for element in data):
- # this is the singular list of strings case, so wrap it and
- # go from there
- data = [data]
- # is this a list of strings?
- data = [mappify(v) for v in data]
- else:
- raise TypeError('unknown datatype: {}', data)
-
- if len(data) < count:
- if len(data) == 1:
- data.extend([deepcopy(data[0]) for i in range(count - len(data))])
- else:
- data.extend([None for i in range(count - len(data))])
- return data
-
-
-def normalize_query(query, top_level=True, supermodel=None):
- """
- Normalizes a variety of query formats to a known standard query format.
-
- The comprehensive form of the query parameter is as follows:
- {code:python}
- query = [{
- '_model': ,
- '_label': Optional identifier
- # Either provide OR the items after
- : [[, ]*],
- # used IF AND ONLY IF is not provided
- 'comparison':
- 'field': ,
- 'value':
- }]+
- {code}
- """
- if query is None:
- raise ValueError('None passed for query parameter')
-
- query = listify(deepcopy(query))
-
- queries = []
- for q in query:
- if isinstance(q, six.string_types):
- queries.append({'_model': q, '_label': q})
- elif isinstance(q, dict):
- if 'distinct' in q:
- if isinstance(q['distinct'], six.string_types):
- q['distinct'] = [q['distinct']]
- if 'groupby' in q:
- if isinstance(q['groupby'], six.string_types):
- q['groupby'] = [q['groupby']]
- if 'and' in q or 'or' in q:
- op = 'or'
- if 'and' in q:
- op = 'and'
- if not isinstance(q[op], (list, set, tuple)):
- raise ValueError('Clause must be of type list, set, or tuple not {}, given {}'.format(type(q[op]), q[op]))
- q[op] = normalize_query(q[op], False, q.get('_model', supermodel))
- if len(q[op]) == 1:
- q = q[op][0]
- elif '_model' not in q:
- # Pull the _model up from the sub clauses. Technically the
- # query format requires the _model be declared in the
- # clause, but we are going to be liberal in what we accept.
- model = supermodel
- for clause in q[op]:
- if '_model' in clause:
- model = clause['_model']
- break
- if model is None:
- raise ValueError('Clause objects must have a "_model" attribute')
- q['_model'] = model
-
- if '_model' in q:
- queries.append(q)
- elif supermodel is not None:
- q['_model'] = supermodel
- queries.append(q)
- else:
- raise ValueError('Query objects must have a "_model" attribute')
- else:
- raise ValueError('Query objects must be either a dict or string')
- return queries
-
-
-def collect_fields(d):
- if 'field' in d:
- return {d['field']}
- elif 'and' in d or 'or' in d:
- attrs = set()
- for comp in ['and', 'or']:
- for subquery in d.get(comp, []):
- attrs.update(collect_fields(subquery))
- return attrs
- elif 'comparison' in d or 'value' in d:
- return {'id'}
- else:
- return d.keys()
-
-
-def get_queries(x):
- queries = []
- if isinstance(x, (list, tuple)):
- for e in x:
- queries.extend(get_queries(e))
- elif isinstance(x, dict):
- queries.append(x)
- for e in x.values():
- queries.extend(get_queries(e))
- return [d for d in queries if isinstance(d.get("_model"), six.string_types)]
-
-
-def crud_exceptions(fn):
- """A decorator designed to catch exceptions from the crud api methods."""
- @wraps(fn)
- def wrapped(*args, **kwargs):
- try:
- return fn(*args, **kwargs)
- except:
- a = [x for x in (args or [])]
- kw = {k: v for k, v in (kwargs or {}).items()}
- log.error('Error calling %s.%s %s %s'.format(fn.__module__, fn.__name__, a, kw), exc_info=True)
- exc_class, exc, tb = sys.exc_info()
- raise six.reraise(CrudException, CrudException(str(exc)), tb)
- return wrapped
-
-
-def make_crud_service(Session):
-
- class Crud(object):
- @staticmethod
- def crud_subscribes(func):
- func = crud_exceptions(func)
-
- class subscriber(object):
- @property
- def subscribes(self):
- message = threadlocal.get('message', {})
- return Crud._get_models(message.get('params')) if message else []
-
- def __call__(self, *args, **kwargs):
- return func(*args, **kwargs)
-
- return wraps(func)(subscriber())
-
- @staticmethod
- def crud_notifies(func, **settings):
- func = crud_exceptions(func)
-
- class notifier(object):
- def __call__(self, *args, **kwargs):
- try:
- return func(*args, **kwargs)
- finally:
- models = Crud._get_models(args, kwargs)
- notify(models, trigger=func.__name__)
-
- return wraps(func)(notifier())
-
- @classmethod
- def _collect_models(cls, query):
- models = set()
- for d in listify(query):
- try:
- model = Session.resolve_model(d['_model'])
- except:
- log.debug('unable to resolve model %s in query %s', d.get('_model'), d)
- else:
- models.add(model)
- for attr_name in collect_fields(d):
- curr_model = model
- for prop_name in attr_name.split('.'):
- if hasattr(curr_model, prop_name):
- prop = getattr(curr_model, prop_name)
- if isinstance(prop, InstrumentedAttribute) and hasattr(prop.property, 'mapper'):
- curr_model = prop.property.mapper.class_
- models.update([curr_model])
- if prop_name in d:
- subquery = deepcopy(d[prop_name])
- if isinstance(subquery, (list, set, tuple)) and not filter(lambda x: isinstance(x, dict), subquery):
- subquery = {i: True for i in subquery}
- elif isinstance(subquery, six.string_types):
- subquery = {subquery: True}
- if isinstance(subquery, dict):
- subquery['_model'] = curr_model.__name__
- models.update(cls._collect_models(subquery))
- else:
- break
- return models
-
- @classmethod
- def _get_models(cls, *args, **kwargs):
- return {model.__name__ for model in cls._collect_models(get_queries([args, kwargs]))}
-
- @classmethod
- def _sort_query(cls, query, model, sort):
- sort = normalize_sort(model, sort)
- for sorter in sort:
- dir = {'asc': asc, 'desc': desc}[sorter['dir']]
- field = sorter['field']
- if model:
- field = getattr(model, field)
- if issubclass(type(field.__clause_element__().type), String):
- field = func.lower(field)
- query = query.order_by(dir(field))
- return query
-
- @classmethod
- def _limit_query(cls, query, limit, offset):
- if offset is not None:
- query = query.offset(offset)
- if limit is not None and limit != 0:
- query = query.limit(limit)
- return query
-
- # this only works in postgresql
- @classmethod
- def _distinct_query(cls, query, filters):
- distinct_clause = filters.get('distinct', None)
- if distinct_clause:
- if isinstance(distinct_clause, bool):
- query = query.distinct()
- else:
- model = Session.resolve_model(filters.get('_model'))
- columns = [getattr(model, field) for field in distinct_clause]
- query = query.distinct(*columns)
- return query
-
- @classmethod
- def _groupby_query(cls, query, filters):
- groupby_clause = filters.get('groupby', None)
- if groupby_clause:
- model = Session.resolve_model(filters.get('_model'))
- columns = [getattr(model, field) for field in groupby_clause]
- query = query.group_by(*columns)
- return query
-
- @classmethod
- def _filter_query(cls, query, model, filters=None, limit=None, offset=None, sort=None):
- if filters:
- query = cls._distinct_query(query, filters)
- query = cls._groupby_query(query, filters)
- filters = cls._resolve_filters(filters, model)
- if filters is not None:
- query = query.filter(filters)
- if sort:
- query = cls._sort_query(query, model, sort)
- query = cls._limit_query(query, limit, offset)
- return query
-
- @classmethod
- def _resolve_comparison(cls, comparison, column, value):
- if isinstance(value, dict):
- model_class = Session.resolve_model(value.get('_model'))
- field = value.get('select', 'id')
- value = select(getattr(model_class, field)).where(cls._resolve_filters(value))
-
- return {
- 'eq': lambda field, val: field == val,
- 'ne': lambda field, val: field != val,
- 'lt': lambda field, val: field < val,
- 'le': lambda field, val: field <= val,
- 'gt': lambda field, val: field > val,
- 'ge': lambda field, val: field >= val,
- 'in': lambda field, val: field.in_(val),
- 'notin': lambda field, val: ~field.in_(val),
- 'isnull': lambda field, val: field == None,
- 'isnotnull': lambda field, val: field != None,
- 'contains': lambda field, val: field.like('%'+val+'%'),
- 'icontains': lambda field, val: field.ilike('%'+val+'%'),
- 'like': lambda field, val: field.like('%'+val+'%'),
- 'ilike': lambda field, val: field.ilike('%'+val+'%'),
- 'startswith': lambda field, val: field.startswith(val),
- 'endswith': lambda field, val: field.endswith(val),
- 'istartswith': lambda field, val: field.ilike(val+'%'),
- 'iendswith': lambda field, val: field.ilike('%'+val)
- }[comparison](column, value)
-
- @classmethod
- def _resolve_filters(cls, filters, model=None):
- model = Session.resolve_model(filters.get('_model', model))
- table = class_mapper(model).persist_selectable
- and_clauses = filters.get('and', None)
- or_clauses = filters.get('or', None)
- if and_clauses:
- return and_(*[cls._resolve_filters(c, model) for c in and_clauses])
- elif or_clauses:
- return or_(*[cls._resolve_filters(c, model) for c in or_clauses])
- elif 'field' in filters or 'value' in filters or 'comparison' in filters:
- field = filters.get('field', 'id').split('.')
- value = filters.get('value')
- comparison = filters.get('comparison', 'eq')
-
- if len(field) == 1:
- column = getattr(model, field[0])
- return cls._resolve_comparison(comparison, column, value)
- elif len(field) == 2:
- property = field[0]
- field = field[1]
- related_table = class_mapper(model).get_property(property)
- related_model = related_table.argument
- if isinstance(related_model, Mapper):
- related_model = related_model.class_
- elif callable(related_model):
- related_model = related_model()
- related_field = getattr(related_model, field)
-
- clause = cls._resolve_comparison(comparison, related_field, value)
- if getattr(related_table, 'primaryjoin', None) is not None:
- clause = and_(
- clause,
- related_table.primaryjoin)
- if getattr(related_table, 'secondaryjoin', None) is not None:
- clause = and_(
- clause,
- related_table.secondaryjoin)
- return clause
- else:
- property = field[0]
- join_property = field[1]
- field = field[2]
-
- join_table = class_mapper(model).get_property(property)
- join_model = join_table.argument
-
- if isinstance(join_model, Mapper):
- join_model = join_model.class_
- elif callable(join_model):
- join_model = join_model()
-
- related_table = class_mapper(join_model).get_property(join_property)
- related_model = related_table.argument
- if isinstance(related_model, Mapper):
- related_model = related_model.class_
- elif callable(related_model):
- related_model = related_model()
- related_field = getattr(related_model, field)
-
- clause = cls._resolve_comparison(comparison, related_field, value)
- if getattr(join_table, 'primaryjoin', None) is not None:
- clause = and_(
- clause,
- join_table.primaryjoin)
- if getattr(join_table, 'secondaryjoin', None) is not None:
- clause = and_(
- clause,
- join_table.secondaryjoin)
-
- if getattr(related_table, 'primaryjoin', None) is not None:
- clause = and_(
- clause,
- related_table.primaryjoin)
- if getattr(related_table, 'secondaryjoin', None) is not None:
- clause = and_(
- clause,
- related_table.secondaryjoin)
-
- return clause
- else:
- return None
-
- def get_time_format_string(self):
- """
- returns the python formatting string that is used to communicate datetime
- objects to and from a subscription via the crud API
- """
- return serializer._datetime_format
-
- @crud_subscribes.__func__
- def count(query):
- """
- Count the model objects matching the supplied query parameters
-
- @param query: Specifies the model types to count. May be a string, a list
- of strings, or a list of dicts with a "_model" key specified.
- @return: The count of each of the supplied model types, in a list of
- dicts, like so:
- [{
- '_model' : 'Player',
- '_label' : 'Player on a Team',
- 'count' : 12
- }]
- @rtype: [c{dict}]
- """
- filters = normalize_query(query)
- results = []
- with Session() as session:
- for filter in filters:
- model = Session.resolve_model(filter['_model'])
- result = {'_model': filter['_model'],
- '_label': filter.get('_label', filter['_model'])}
- if getattr(model, '_crud_perms', {}).get('read', True):
- if filter.get('groupby', False):
- columns = []
- for attr in filter['groupby']:
- columns.append(getattr(model, attr))
-
- rows = Crud._filter_query(session.query(func.count(columns[0]), *columns), model, filter).all()
- result['count'] = []
- for row in rows:
- count = {'count': row[0]}
- index = 1
- for attr in filter['groupby']:
- count[attr] = row[index]
- index += 1
- result['count'].append(count)
- else:
- result['count'] = Crud._filter_query(session.query(model), model, filter).count()
- results.append(result)
- return results
-
- @crud_subscribes.__func__
- def read(query, data=None, order=None, limit=None, offset=0):
- """
- Get the model objects matching the supplied query parameters,
- optionally setting which part of the objects are in the returned dictionary
- using the supplied data parameter
-
- @param query: one or more queries (as c{dict} or [c{dict}]), corresponding
- to the format of the query parameter described in the module-level
- docstrings. This query parameter will be normalized
- @param data: one or more data specification (as c{dict} or [c{dict}]),
- corresponding to the format of the data specification parameter
- described in the module-level docstrings. The length of the data
- parameter should either be 1 which will be the spec for each query
- specified, OR of length N, where N is the number of queries after
- normalization. If not provided the _data parameter will be expected
- in each query
- @param limit: The limit parameter, when provided with positive integer "L"
- at most "L" results will be returned. Defaults to no limit
- @param offset: The offset parameter, when provided with positive integer
- "F", at most "L" results will be returned after skipping the first "F"
- results (first based on ordering)
- @return: one or more data specification dictionaries with models that
- match the provided queries including all readable fields without
- following foreign keys (the default if no data parameter is included),
- OR the key/values specified by the data specification parameter. The
- number of items returned and the order in which they appear are
- controlled by the limit, offset and order parameters. Represented as:
- return {
- total: # count of ALL matching objects, separate from
- results: [c{dict}, c{dict}, ... , c{dict}] # subject to
- }
- """
- with Session() as session:
- filters = normalize_query(query)
- data = normalize_data(data, len(filters))
- if len(filters) == 1:
- filter = filters[0]
- model = Session.resolve_model(filter['_model'])
- total = 0
- results = []
- if getattr(model, '_crud_perms', {}).get('read', True):
- total = Crud._filter_query(session.query(model), model, filter).count()
- results = Crud._filter_query(session.query(model), model, filter, limit, offset, order).all()
-
- return {'total': total, 'results': [r.crud_read(data[0]) for r in results]}
-
- elif len(filters) > 1:
- queries = []
- count_queries = []
- queried_models = []
- sort_field_types = {}
- for filter_index, filter in enumerate(filters):
- model = Session.resolve_model(filter['_model'])
- if getattr(model, '_crud_perms', {}).get('read', True):
- queried_models.append(model)
- query_fields = [model.id, cast(literal(model.__name__), Text).label("_table_name"), cast(literal(filter_index), Integer)]
- for sort_index, sort in enumerate(normalize_sort(model, order)):
- sort_field = getattr(model, sort['field'])
- sort_field_types[sort_index] = type(sort_field.__clause_element__().type)
- query_fields.append(sort_field.label('anon_sort_{}'.format(sort_index)))
- queries.append(Crud._filter_query(session.query(*query_fields), model, filter))
- count_queries.append(Crud._filter_query(session.query(model.id), model, filter))
-
- total = count_queries[0].union(*(count_queries[1:])).count()
- query = queries[0].union(*(queries[1:]))
- normalized_sort_fields = normalize_sort(None, order)
- for sort_index, sort in enumerate(normalized_sort_fields):
- dir = {'asc': asc, 'desc': desc}[sort['dir']]
- sort_field = 'anon_sort_{}'.format(sort_index)
- if issubclass(sort_field_types[sort_index], String):
- sort_field = 'lower({})'.format(sort_field)
- query = query.order_by(dir(sort_field))
- if normalized_sort_fields:
- query = query.order_by("_table_name")
- rows = Crud._limit_query(query, limit, offset).all()
-
- result_table = {}
- result_order = {}
- query_index_table = {}
- for i, row in enumerate(rows):
- id = str(row[0])
- model = Session.resolve_model(row[1])
- query_index = row[2]
- result_table.setdefault(model, []).append(id)
- result_order[id] = i
- query_index_table[id] = query_index
-
- for model, ids in result_table.items():
- result_table[model] = session.query(model).filter(model.id.in_(ids)).all()
-
- ordered_results = len(result_order) * [None]
- for model, instances in result_table.items():
- for instance in instances:
- ordered_results[result_order[instance.id]] = instance
- results = [r for r in ordered_results if r is not None]
-
- return {'total': total, 'results': [r.crud_read(data[query_index_table[r.id]]) for r in results]}
- else:
- return {'total': 0, 'results': []}
-
- @crud_notifies.__func__
- def create(data):
- """
- Create a model object using the provided data specifications.
-
- @param data: one or more data specification (as c{dict} or [c{dict}]),
- corresponding to the format of the data specification parameter
- described in the module-level docstrings. A new object will be created
- for each data specification dictionary provided.
- @return: True if the objects were successfully created
- """
- data = normalize_data(data)
- if any('_model' not in attrs for attrs in data):
- raise CrudException('_model is required to create a new item')
-
- created = []
- with Session() as session:
- for attrs in data:
- model = Session.resolve_model(attrs['_model'])
- instance = model()
- session.add(instance)
- instance.crud_create(**attrs)
- session.flush() # any items that were created should now be queryable
- created.append(instance.crud_read())
- return created
-
- @crud_notifies.__func__
- def update(query, data):
- """
- Get the model objects matching the supplied query parameters,
- setting the fields of the resulting objects to the values specified in
- the data specification parameter
-
- @param query: one of more queries (as c{dict} or [c{dict}]), corresponding
- to the format of the query parameter described in the module-level
- docstrings. This query parameter will be normalized
- @param data: one or more data specification (as c{dict} or [c{dict}]),
- corresponding to the format of the data specification parameter
- described in the module-level docstrings. The length of the data
- parameter should be N, where N is the number of queries after
- normalization
- @return: True if the objects were successfully updated
- """
- filters = normalize_query(query)
- data = normalize_data(data, len(filters))
- with Session() as session:
- for filter, attrs in zip(filters, data):
- model = Session.resolve_model(filter['_model'])
- for instance in Crud._filter_query(session.query(model), model, filter):
- instance.crud_update(**attrs)
- # any items that were created should now be queryable
- session.flush()
- return True
-
- @crud_notifies.__func__
- def delete(query):
- """
- Delete the model objects matching the supplied query parameters
-
- @param id: one of more queries (as c{dict} or [c{dict}]), corresponding
- to the format of the query parameter described in the module-level
- docstrings. This query parameter will be normalized
- @return: True if the objects were successfully updated
- """
- deleted = 0
- filters = normalize_query(query)
- with Session() as session:
- for filter in filters:
- model = Session.resolve_model(filter['_model'])
- if getattr(model, '_crud_perms', {}).get('can_delete', False):
- to_delete = Crud._filter_query(session.query(model), model, filter)
- count = to_delete.count()
- assert count in [0, 1], "each query passed to crud.delete must return at most 1 item"
- if count == 1:
- # don't log if there wasn't actually a deletion
- item_to_delete = to_delete.one()
- session.delete(item_to_delete)
- deleted += count
- return deleted
-
- return Crud()
-
-
-class memoized(object):
- """
- Decorator. Caches a function's return value each time it is called.
- If called later with the same arguments, the cached value is returned
- (not reevaluated).
-
- from http://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
- """
- def __init__(self, func):
- self.func = func
- self.cache = {}
-
- def __call__(self, *args):
- try:
- return self.cache[args]
- except KeyError:
- value = self.func(*args)
- self.cache[args] = value
- return value
- except TypeError:
- # uncachable -- for instance, passing a list as an argument.
- # Better to not cache than to blow up entirely.
- return self.func(*args)
-
- def __repr__(self):
- """Return the function's docstring."""
- return self.func.__doc__
-
- def __get__(self, obj, objtype):
- """Support instance methods."""
- return functools.partial(self.__call__, obj)
-
-
-class CrudMixin(object):
- extra_defaults = []
- type_casts = {uuid.UUID: str}
- type_map = {}
- type_map_defaults = {
- int: 'int',
- six.binary_type: 'string',
- six.text_type: 'string',
- float: 'float',
- datetime: 'date',
- date: 'date',
- time: 'date',
- bool: 'boolean',
- uuid.UUID: 'string',
- String: 'string',
- UnicodeText: 'string',
- Text: 'string',
- DateTime: 'date',
- Integer: 'int',
- Boolean: 'boolean',
- }
-
- # override what attribute names will show in the repr (defaults to primary keys and unique constraints)
- _repr_attr_names = ()
- # in addition to any default attributes, also show these in the repr
- _additional_repr_attr_names = ()
-
- @classmethod
- def _get_unique_constraint_column_names(cls):
- """
- Utility function for getting and then caching the column names
- associated with all the unique constraints for a given model object.
- This assists in fetching an existing object using the value of unique
- constraints in addition to the primary key of id.
- """
- if not hasattr(cls, '_unique_constraint_attributes'):
- cls._unique_constraint_attributes = [[column.name for column in constraint.columns]
- for constraint in cls.__table__.constraints
- if isinstance(constraint, UniqueConstraint)]
- return cls._unique_constraint_attributes
-
- @classmethod
- def _get_primary_key_names(cls):
- if not hasattr(cls, '_pk_names'):
- cls._pk_names = [column.name for column in cls.__table__.primary_key.columns]
- return cls._pk_names
-
- @classmethod
- def _create_or_fetch(cls, session, value, **backref_mapping):
- """
- Fetch an existing or create a new instance of this class. Fetching uses
- the values from the value positional argument (the id if available, or
- if any keys that correspond to unique constraints are present). In both
- cases the instance will still need to be updated using whatever new
- values you want.
-
- @param cls: The class object we're going to fetch or create a new one of
- @param session: the session object
- @param value: the dictionary value to fetch with
- @param backref_mapping: the backref key name and value of the "parent"
- object of the object you're fetching or about to create. If the
- backref value of a fetched instance is not the same as the value
- of what's passed in, we will instead create a new instance. This is
- because we want to prevent "stealing" an existing object in a
- one-to-one relationship unless an id is explicitly passed
- @return: a previously existing or new (and added to the session) model
- instance
- """
- assert len(backref_mapping) <= 1, 'only one backref key is allowed at this time: {}'.format(backref_mapping)
- if backref_mapping:
- backref_name = list(backref_mapping.keys())[0]
- parent_id = backref_mapping[backref_name]
- else:
- backref_name, parent_id = None, None
-
- id = None
- if isinstance(value, Mapping):
- id = value.get('id', None)
- elif isinstance(value, six.string_types):
- id = value
-
- instance = None
- if id is not None:
- try:
- instance = session.query(cls).filter(cls.id == id).first()
- except:
- log.error('Unable to fetch instance based on id value %s', value, exc_info=True)
- raise TypeError('Invalid instance ID type for relation: {0.__name__} (value: {1})'.format(cls, value))
- elif isinstance(value, Mapping):
- # if there's no id, check to see if we're provided a dictionary
- # that includes all of the columns associated with a UniqueConstraint.
- for column_names in cls._get_unique_constraint_column_names():
- if all((name in value and value[name]) for name in column_names):
- # all those column names are provided,
- # use that to query by chaining together all the necessary
- # filters to construct that query
- q = session.query(cls)
- filter_kwargs = {name: value[name] for name in column_names}
- try:
- instance = q.filter_by(**filter_kwargs).one()
- except NoResultFound:
- continue
- except MultipleResultsFound:
- log.error('multiple results found for %s unique constraint: %s', cls.__name__, column_names)
- raise
- else:
- break
- else:
- log.debug('unable to search using unique constraints: %s with %s', column_names, value)
-
- if instance and id is None and backref_mapping and getattr(instance, backref_name, None) != parent_id:
- log.warning('attempting to change the owner of %s without an explicitly passed id; a new %s instance will be used instead', instance, cls.__name__)
- instance = None
-
- if not instance:
- log.debug('creating new: %s with id %s', cls.__name__, id)
- if id is None:
- instance = cls()
- else:
- instance = cls(id=id)
- session.add(instance)
- return instance
-
- @property
- def _type_casts_for_to_dict(self):
- if not hasattr(self, '_to_dict_type_cast_mapping'):
- type_casts = CrudMixin.type_casts.copy()
- type_casts.update(self.type_casts)
- self._to_dict_type_cast_mapping = defaultdict(lambda: lambda x: x, type_casts)
- return self._to_dict_type_cast_mapping
-
- @class_property
- def to_dict_default_attrs(cls):
- attr_names = []
- for name in collect_ancestor_attributes(cls, terminal_cls=cls.BaseClass):
- if not name.startswith('_') or name in cls.extra_defaults:
- attr = getattr(cls, name)
-
- is_column_property = isinstance(attr, InstrumentedAttribute) and isinstance(attr.property, ColumnProperty)
- is_hybrid_property = isinstance(getattr(attr, 'descriptor', None), hybrid_property)
- is_property = isinstance(attr, (property, InstrumentedAttribute, ClauseElement, AssociationProxy))
- is_callable = callable(attr)
-
- if is_column_property or not (is_hybrid_property or is_property or is_callable):
- attr_names.append(name)
- return attr_names
-
- def to_dict(self, attrs=None, validator=lambda self, name: True):
- obj = {}
- attrs = normalize_object_graph(attrs)
-
- # it's still possible for the client to blacklist this, but by default
- # we're going to include them
- if attrs is None or attrs.get('_model', True):
- obj['_model'] = self.__class__.__name__
- if attrs is None or attrs.get('id', True):
- obj['id'] = self.id
-
- def cast_type(value):
- # ensure that certain types are cast appropriately for daily usage
- # e.g. we want the result of HashedPasswords to be the string
- # representation instead of the object
- return self._type_casts_for_to_dict[value.__class__](value)
-
- if attrs is None:
- for name in self.to_dict_default_attrs:
- if validator(self, name):
- obj[name] = cast_type(getattr(self, name))
- else:
- for name in self.extra_defaults + list(attrs.keys()):
- # if we're not supposed to get the attribute according to the validator,
- # OR the client intentionally blacklisted it, skipped this value
- if not validator(self, name) or not attrs.get(name, True):
- continue
- attr = getattr(self, name, None)
- if isinstance(attr, self.BaseClass):
- obj[name] = attr.to_dict(attrs[name], validator)
- elif isinstance(attr, (list, set, tuple, frozenset)):
- obj[name] = []
- for item in attr:
- if isinstance(item, self.BaseClass):
- obj[name].append(item.to_dict(attrs[name], validator))
- else:
- obj[name].append(item)
- elif callable(attr):
- obj[name] = cast_type(attr())
- else:
- obj[name] = cast_type(attr)
-
- return obj
-
- def from_dict(self, attrs, validator=lambda self, name, val: True):
- relations = []
- # merge_relations modifies the dictionaries that are passed to it in
- # order to support updates in deeply-nested object graphs. To ensure
- # that we don't have dirty state between applying updates to different
- # model objects, we need a fresh copy
- attrs = deepcopy(attrs)
- for name, value in attrs.items():
- if not name.startswith('_') and validator(self, name, value):
- attr = getattr(self.__class__, name)
- if isinstance(attr, InstrumentedAttribute) and isinstance(attr.property, RelationshipProperty):
- relations.append((name, value))
- else:
- setattr(self, name, value)
-
- def required(kv):
- cols = list(getattr(self.__class__, kv[0]).property.local_columns)
- return len(cols) != 1 or cols[0].primary_key or cols[0].nullable
- relations.sort(key=required)
-
- for name, value in relations:
- self._merge_relations(name, value, validator)
-
- return self
-
- @classmethod
- @memoized
- def _get_one_to_many_foreign_key_attr_name_if_applicable(cls, name):
- attr = getattr(cls, name, None)
- if attr is None:
- return None
-
- remote_side = getattr(attr.property, 'remote_side', None)
- if remote_side is None:
- return None
-
- if len(remote_side) != 1:
- # there's a lookup table involved here, and we're not going to handle that
- return None
- [remote_column] = remote_side
-
- if not getattr(remote_column, 'foreign_keys', set()):
- # tags don't actually have foreign keys set, but they need to be treated as the same
- if name == 'tags':
- log.debug('special-case handling for tags, returning: %s', remote_column.name)
- return remote_column.name
- else:
- # the implication here could be that we're the many side of a
- # many to one or many to many. That hasn't been born out in testing
- # but we'll log it just in case
- return None
- else:
- # return "our" attribute name for the remote model object
- return remote_column.name
-
- def _merge_relations(self, name, value, validator=lambda self, name, val: True):
- attr = getattr(self.__class__, name)
- if (not isinstance(attr, InstrumentedAttribute) or
- not isinstance(attr.property, RelationshipProperty)):
- return
-
- session = orm.Session.object_session(self)
- assert session, "cannot call _merge_relations on objects not attached to a session"
-
- property = attr.property
- relation_cls = property.mapper.class_
-
- # e.g., if this a Team with many Players, and we're handling the attribute name
- # "players," we want to set the team_id on all dictionary representations of those players.
- backref_id_name = self._get_one_to_many_foreign_key_attr_name_if_applicable(name)
- original_value = getattr(self, name)
-
- if is_listy(original_value):
- new_insts = []
- if value is None:
- value = []
-
- if isinstance(value, six.string_types):
- value = [value]
-
- for i in value:
- if backref_id_name is not None and isinstance(i, dict) and not i.get(backref_id_name):
- i[backref_id_name] = self.id
- relation_inst = relation_cls._create_or_fetch(session, i, **{backref_id_name: self.id} if backref_id_name else {})
- if isinstance(i, dict):
- relation_inst.from_dict(i, _crud_write_validator if relation_inst._sa_instance_state.identity else _crud_create_validator)
- new_insts.append(relation_inst)
-
- relation = original_value
- remove_insts = [stale_inst for stale_inst in relation if stale_inst not in new_insts]
-
- for stale_inst in remove_insts:
- relation.remove(stale_inst)
- if property.cascade.delete_orphan:
- session.delete(stale_inst)
-
- for new_inst in new_insts:
- if new_inst.id is None or new_inst not in relation:
- relation.append(new_inst)
-
- elif isinstance(value, (collections.abc.Mapping, six.string_types)):
- if backref_id_name is not None and not value.get(backref_id_name):
- # if this is a dictionary, it's possible we're going to be
- # creating a new thing, if so, we'll add a backref to the
- # "parent" if one isn't already set
- value[backref_id_name] = self.id
-
- relation_inst = relation_cls._create_or_fetch(session, value)
- stale_inst = original_value
- if stale_inst is None or stale_inst.id != relation_inst.id:
- if stale_inst is not None and property.cascade.delete_orphan:
- session.delete(stale_inst)
-
- if isinstance(value, collections.abc.Mapping):
- relation_inst.from_dict(value, validator)
- session.flush([relation_inst]) # we want this this to be queryable for other things
-
- setattr(self, name, relation_inst)
-
- elif value is None:
- # the first branch handles the case of setting a many-to-one value
- # to None. So this is for the one-to-one-mapping case
- # Setting a relation to None is nullifying the relationship, which
- # has potential side effects in the case of cascades, etc.
- setattr(self, name, value)
- stale_inst = original_value
- if stale_inst is not None and property.cascade.delete_orphan:
- session.delete(stale_inst)
-
- else:
- raise TypeError('merging relations on {1} not support for values '
- 'of type: {0.__class__.__name__} '
- '(value: {0})'.format(value, name))
-
- def __setattr__(self, name, value):
- if name in getattr(self, '_validators', {}):
- for val_dict in self._validators[name]:
- if not val_dict['model_validator'](self, value):
- raise ValueError('validation failed for {.__class__.__name__}'
- '.{} with value {!r}: {}'.format(self, name, value,
- val_dict.get('validator_message')))
- object.__setattr__(self, name, value)
-
- def crud_read(self, attrs=None):
- return self.to_dict(attrs, validator=_crud_read_validator)
-
- def crud_create(self, **kwargs):
- return self.from_dict(kwargs, validator=_crud_create_validator)
-
- def crud_update(self, **kwargs):
- return self.from_dict(kwargs, validator=_crud_write_validator)
-
- def __repr__(self):
- """
- useful string representation for logging. Reprs do NOT return unicode
- on Python 2, since python decodes it using the default encoding:
- http://bugs.python.org/issue5876
- """
- # if no repr attr names have been set, default to the set of all
- # unique constraints. This is unordered normally, so we'll order and
- # use it here
- if not self._repr_attr_names:
- # this flattens the unique constraint list
- _unique_attrs = chain.from_iterable(self._get_unique_constraint_column_names())
- _primary_keys = self._get_primary_key_names()
-
- attr_names = tuple(sorted(set(chain(_unique_attrs,
- _primary_keys,
- self._additional_repr_attr_names))))
- else:
- attr_names = self._repr_attr_names
-
- if not attr_names and hasattr(self, 'id'):
- # there should be SOMETHING, so use id as a fallback
- attr_names = ('id',)
-
- if attr_names:
- _kwarg_list = ' '.join('%s=%s' % (name, repr(getattr(self, name, 'undefined')))
- for name in attr_names)
- kwargs_output = ' %s' % _kwarg_list
- else:
- kwargs_output = ''
-
- # specifically using the string interpolation operator and the repr of
- # getattr so as to avoid any "hilarious" encode errors for non-ascii
- # characters
- u = '<%s%s>' % (self.__class__.__name__, kwargs_output)
- return u if six.PY3 else u.encode('utf-8')
-
-
-def _crud_read_validator(self, name):
- _crud_perms = getattr(self, '_crud_perms', None)
- if _crud_perms is not None and not _crud_perms.get('read', True):
- raise ValueError('Attempt to read non-readable model {}'.format(self.__class__.__name__))
- elif name in self.extra_defaults:
- return True
- elif _crud_perms is None:
- return not name.startswith('_')
- else:
- return name in _crud_perms.get('read', {})
-
-
-def _crud_write_validator(self, name, value=None):
- _crud_perms = getattr(self, '_crud_perms', None)
- if getattr(self, name, None) == value:
- return True
- elif not _crud_perms or not _crud_perms.get('update', False):
- raise ValueError('Attempt to update non-updateable model {}'.format(self.__class__.__name__))
- elif name not in _crud_perms.get('update', {}):
- raise ValueError('Attempt to update non-updateable attribute {}.{}'.format(self.__class__.__name__, name))
- else:
- return name in _crud_perms.get("update", {})
-
-
-def _crud_create_validator(self, name, value=None):
- _crud_perms = getattr(self, '_crud_perms', {})
- if not _crud_perms or not _crud_perms.get('can_create', False):
- raise ValueError('Attempt to create non-createable model {}'.format(self.__class__.__name__))
- else:
- return name in _crud_perms.get("create", {})
-
-
-def _isdata(obj):
- """
- Stolen from inspect.classify_class_attrs function, basically is the
- provided object just something that we're providing at the class level.
- If True, it will be assumed that this obj does not have a meaningful
- __doc__ attribute and it should be provided via the data_spec
- initialization argument
- """
- # Classify the object.
- if isinstance(obj, staticmethod):
- return False
- elif isinstance(obj, classmethod):
- return False
- elif isinstance(obj, property):
- return False
- elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj):
- return False
- else:
- return True
-
-
-class crudable(object):
- """
- Convenience decorator for specifying what methods of a model object
- instance can be interacted with via the CRUD API
-
- Intended to be used in the sa module for SQLAlchemy model classes i.e.:
- @crudable(
- create=True,
- read=['__something'],
- no_read=['password'],
- update=[],
- no_update=[],
- delete=True,
- data_spec={
- attr={
- read=True,
- update=True,
- desc="description"
- defaultValue=
- validators={
- '',
- }
- })
- class MyModelObject(Base):
- ...
-
-
- and the resulting object will have a class attribute of "crud_spec" holding
- a dictionary of:
-
- {create: True/False,
- read: {, },
- update: {, },
- delete: True/False,
- data_spec: {
- manually_specified_attr: {
- desc: "description",
- type: ""
- read: True/False # only needed if attribute is unspecified
- update": True/False
- }
-
- attr_with_manual_description: {
- desc: "description",
- type: ""
- }
- }
-
- @cvar never_read: a tuple of attribute names that default to being
- not readable
- @cvar never_update: a tuple of attribute names that default to being
- not updatable
- @cvar always_create: a tuple of attribute names that default to being
- always creatable
- @cvar default_labels: a dict of attribute name and desired label pairs,
- to simplify setting the same label for each and every instance of an
- attribute name
- """
-
- never_read = ('metadata',)
- never_update = ('id',)
- always_create = ('id',)
- default_labels = {'addr': 'Address'} # TODO: allow plugins to define this; Sideboard core is not the place to encode addr/Address
-
- def __init__(self, can_create=True,
- create=None, no_create=None,
- read=None, no_read=None,
- update=None, no_update=None,
- can_delete=True,
- data_spec=None):
- """
- @param can_create: if True (default), the decorated class can be
- created
- @type can_create: C{bool}
- @param create: if provided, interpreted as the attribute names that can
- be specified when the object is created in addition to the items are
- updateable. If not provided (default) all attributes that can be
- updated plus id are allowed to be passed to the create method
- @param no_create: if provided, interpreted as the attribute names that
- will not be allowed to be passed to create, taking precedence over
- anything specified in the create parameter. If not provided
- (default) everything allowed by the create parameter will be
- acceptable.
- @param read: if provided, interpreted as the attribute names that can
- be read, and ONLY these names can be read. If not provided
- (default) all attributes not starting with an underscore
- (e.g. __str__, or _hidden) will be readable
- @type read: C{collections.abc.Iterable}
- @param no_read: if provided, interpreted as the attribute names that
- can't be read, taking precedence over anything specified in the
- read parameter. If not provided (default) everything allowed by
- the read parameter will be readable
- @type no_read: C{collections.abc.Iterable}
- @param update: if provided, interpreted as the attribute names that can
- be updated, in addition to the list of items are readable. If None
- (default) default to the list of readable attributes. Pass an empty
- iterable to use the default behavior listed under the read
- docstring if there were attributes passed to read that you don't
- want update to default to
- @type update: C{collections.abc.Iterable}
- @param no_update: if provided, interpreted as the attribute names that
- can't be updated, taking precedence over anything specified in the
- update parameter. If None (default) default to the list of
- non-readable attributes. Pass an empty iterable to use the default
- behavior listed under the no_read docstring if there were
- attributes passed to no_read that you don't want no_update to
- default to
- @type no_update: C{collections.abc.Iterable}
- @param can_delete: if True (default), the decorated class can be
- deleted
- @type can_delete: C{bool}
- @param data_spec: any additional information that should be added to
- the L{model.get_crud_definition}. See that function for
- complete documentation, but key items are:
- "desc" - Human-readable description, will default to docstrings if
- available, else not be present in the final spec
- "label" - a Human-readable short label to help remember the purpose
- of a particular field, without going into detail. If not
- specifically provided, it will not be present in the spec
- "type" - the human-readable "type" for an attribute meaning that a
- conversion to this type will be performed on the server. If
- possible this will be determined automatically using
- isinstance(), otherwise "auto" will be set:
- auto (default) - no type conversion
- string - C{str}
- boolean - C{bool}
- int - C{int}
- float - C{float}
- "defaultValue" - the value that is considered the default, either
- because a model instance will use this default value if
- unspecified, or a client should present this option as the
- default for a user
- "validators" - a c{dict} mapping a validator name (e.g. "max") and
- the value to be used in validation (e.g. 1000, for a max value
- of 1000). This is intended to support client side validation
- """
-
- self.can_create = can_create
- self.can_delete = can_delete
- if no_update is not None and create is None:
- create = deepcopy(no_update)
- self.read = read or []
- self.no_read = no_read or []
- self.update = update or []
- self.no_update = no_update or [x for x in self.no_read if x not in self.update]
- self.create = create or []
- self.no_create = no_create or [x for x in self.no_update if x not in self.create]
-
- self.no_read.extend(self.never_read)
- self.no_update.extend(self.never_update)
-
- self.data_spec = data_spec or {}
-
- def __call__(self, cls):
- def _get_crud_perms(cls):
- if getattr(cls, '_cached_crud_perms', False):
- return cls._cached_crud_perms
-
- crud_perms = {
- 'can_create': self.can_create,
- 'can_delete': self.can_delete,
- 'read': [],
- 'update': [],
- 'create': []
- }
-
- read = self.read
- for name in collect_ancestor_attributes(cls):
- if not name.startswith('_'):
- attr = getattr(cls, name)
- if (isinstance(attr, (InstrumentedAttribute, property, ClauseElement)) or
- isinstance(attr, (int, float, bool, datetime, date, time, six.binary_type, six.text_type, uuid.UUID))):
- read.append(name)
- read = list(set(read))
- for name in read:
- if not self.no_read or name not in self.no_read:
- crud_perms['read'].append(name)
-
- update = self.update + deepcopy(crud_perms['read'])
- update = list(set(update))
- for name in update:
- if not self.no_update or name not in self.no_update:
- if name in cls.__table__.columns:
- crud_perms['update'].append(name)
- else:
- attr = getattr(cls, name)
- if isinstance(attr, property) and getattr(attr, 'fset', False):
- crud_perms['update'].append(name)
- elif (isinstance(attr, InstrumentedAttribute) and
- isinstance(attr.property, RelationshipProperty) and
- attr.property.viewonly != True):
- crud_perms['update'].append(name)
-
- create = self.create + deepcopy(crud_perms['update'])
- for name in self.always_create:
- create.append(name)
- if name in self.no_create:
- self.no_create.remove(name)
- create = list(set(create))
- for name in create:
- if not self.no_create or name not in self.no_create:
- crud_perms['create'].append(name)
-
- cls._cached_crud_perms = crud_perms
- return cls._cached_crud_perms
-
- def _get_crud_spec(cls):
- if getattr(cls, '_cached_crud_spec', False):
- return cls._cached_crud_spec
-
- crud_perms = cls._crud_perms
-
- field_names = list(set(crud_perms['read']) | set(crud_perms['update']) |
- set(crud_perms['create']) | set(self.data_spec.keys()))
- fields = {}
- for name in field_names:
- # json is implicitly unicode, and since this will eventually
- # be serialized as json, it's convenient to have it in that
- # form early
-
- # if using different validation decorators or in the data spec
- # causes multiple spec
- # kwargs to be specified, we're going to error here for
- # duplicate keys in dictionaries. Since we don't want to allow
- # two different expected values for maxLength being sent in a
- # crud spec for example
- field_validator_kwargs = {
- spec_key_name: spec_value
- # collect each spec_kwarg for all validators of an attribute
- for crud_validator_dict in getattr(cls, '_validators', {}).get(name, [])
- for spec_key_name, spec_value in crud_validator_dict.get('spec_kwargs', {}).items()
- }
-
- if field_validator_kwargs:
- self.data_spec.setdefault(name, {})
- # manually specified crud validator keyword arguments
- # overwrite the decorator-supplied keyword arguments
- field_validator_kwargs.update(self.data_spec[name].get('validators', {}))
- self.data_spec[name]['validators'] = field_validator_kwargs
-
- name = six.text_type(name)
- field = deepcopy(self.data_spec.get(name, {}))
- field['name'] = name
- try:
- attr = getattr(cls, name)
- except AttributeError:
- # if the object doesn't have the attribute, AND it's in the field
- # list, that means we're assuming it was manually specified in the
- # data_spec argument
- fields[name] = field
- continue
-
- field['read'] = name in crud_perms['read']
- field['update'] = name in crud_perms['update']
- field['create'] = name in crud_perms['create']
-
- if field['read'] or field['update'] or field['create']:
- fields[name] = field
- elif name in fields:
- del fields[name]
- continue
-
- if 'desc' not in field and not _isdata(attr):
- # no des specified, and there's a relevant docstring, so use it
-
- # if there's 2 consecutive newlines, assume that there's a
- # separator in the docstring and that the top part only
- # is the description, if there's not, use the whole thing.
- # Either way, replace newlines with spaces since docstrings often
- # break the same sentence over new lines due to space
- doc = inspect.getdoc(attr)
- if doc:
- doc = doc.partition('\n\n')[0].replace('\n', ' ').strip()
- field['desc'] = doc
-
- if 'type' not in field:
- if isinstance(attr, InstrumentedAttribute) and isinstance(attr.property, ColumnProperty):
- field['type'] = cls._type_map.get(type(attr.property.columns[0].type), 'auto')
- field_default = getattr(attr.property.columns[0], 'default', None)
- # only put the default here if it exists, and it's not an automatic thing like "time.utcnow()"
- if field_default is not None and field['type'] != 'auto' and not isinstance(field_default.arg, (collections.abc.Callable, property)):
- field['defaultValue'] = field_default.arg
- elif hasattr(attr, "default"):
- field['defaultValue'] = attr.default
- else:
- field['type'] = cls._type_map.get(type(attr), 'auto')
- # only set a default if this isn't a property or some other kind of "constructed attribute"
- if field['type'] != 'auto' and not isinstance(attr, (collections.abc.Callable, property)):
- field['defaultValue'] = attr
- if isinstance(attr, InstrumentedAttribute) and isinstance(attr.property, RelationshipProperty):
- field['_model'] = attr.property.mapper.class_.__name__
-
- crud_spec = {'fields': fields}
- cls._cached_crud_spec = crud_spec
- return cls._cached_crud_spec
-
- def _type_map(cls):
- return dict(cls.type_map_defaults, **cls.type_map)
-
- cls._type_map = class_property(_type_map)
- cls._crud_spec = class_property(_get_crud_spec)
- cls._crud_perms = class_property(_get_crud_perms)
- return cls
-
-
-class crud_validation(object):
- """
- Base class for adding validators to a model, supporting adding to the crud
- spec, or to the save action
- """
- def __init__(self, attribute_name, model_validator, validator_message, **spec_kwargs):
- """
- @param attribute_name: the name of the attribute to set this validator
- for
- @param model_validator: the c{collections.Callable) that will accept
- the value of the attribute and return False or None if invalid,
- True if the value is valid. This is used on setting the attribute
- name with the python instance
- @param validator_message: message to print if the model validation fails
- @param spec_kwargs: the key/value pairs that should be added to the
- the crud spec for this attribute name. This generally supports
- making the same sorts of validations in a client (e.g. javascript)
- """
- self.attribute_name = attribute_name
- self.model_validator = model_validator
- self.validator_message = validator_message
- self.spec_kwargs = spec_kwargs
-
- def __call__(self, cls):
- if not hasattr(cls, '_validators'):
- cls._validators = {}
- else:
- # in case we subclass something with a _validators attribute
- cls._validators = deepcopy(cls._validators)
-
- cls._validators.setdefault(self.attribute_name, []).append({
- 'model_validator': self.model_validator,
- 'validator_message': self.validator_message,
- 'spec_kwargs': self.spec_kwargs
- })
- return cls
-
-
-class text_length_validation(crud_validation):
- def __init__(self, attribute_name, min_length=None, max_length=None,
- min_text='The minimum length of this field is {0}.',
- max_text='The maximum length of this field is {0}.',
- allow_none=True):
-
- def model_validator(instance, text):
- if not text:
- return allow_none
- text_length = len(six.text_type(text))
- return all([min_length is None or text_length >= min_length,
- max_length is None or text_length <= max_length])
-
- kwargs = {}
- if min_length is not None:
- kwargs['minLength'] = min_length
- if max_text is not None:
- kwargs['minLengthText'] = min_text
- if max_length is not None:
- kwargs['maxLength'] = max_length
- if max_text is not None:
- kwargs['maxLengthText'] = max_text
-
- message = 'Length of value should be between {} and {} (inclusive; None means no min/max).'.format(min_length, max_length)
- crud_validation.__init__(self, attribute_name, model_validator, message, **kwargs)
-
-
-class regex_validation(crud_validation):
- def __init__(self, attribute_name, regex, message):
-
- def regex_validator(instance, text):
- # if the field isn't nullable, that will trigger an error later at the sqla level,
- # but since None can't be passed to a re.search we want to pass this validation check
- if text is None:
- return True
-
- # we don't want to actually send across the match object if it did match,
- # so leverage the fact that failing searches or matches return None types
- return re.search(regex, text) is not None
-
- crud_validation.__init__(self, attribute_name, regex_validator, message,
- regexText=message, regexString=regex)
diff --git a/sideboard/run_debug_server.py b/sideboard/run_debug_server.py
deleted file mode 100644
index 9f80019..0000000
--- a/sideboard/run_debug_server.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from __future__ import unicode_literals
-from sideboard.debugging import debugger_helpers_all_init
-
-import cherrypy
-
-if __name__ == '__main__':
- debugger_helpers_all_init()
-
- cherrypy.engine.start()
- cherrypy.engine.block()
diff --git a/sideboard/run_mainloop.py b/sideboard/run_mainloop.py
deleted file mode 100644
index 2b3b59d..0000000
--- a/sideboard/run_mainloop.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from __future__ import unicode_literals
-import os
-import argparse
-
-from sideboard.lib import mainloop, entry_point, log
-
-parser = argparse.ArgumentParser(description='Run Sideboard as a daemon without starting CherryPy')
-parser.add_argument('--pidfile', required=True, help='absolute path of file where process pid will be stored')
-
-
-@entry_point
-def mainloop_daemon():
- log.info('starting Sideboard daemon process')
- args = parser.parse_args()
- if os.fork() == 0:
- pid = os.fork()
- if pid == 0:
- mainloop()
- else:
- log.debug('writing pid (%s) to pidfile (%s)', pid, args.pidfile)
- try:
- with open(args.pidfile, 'w') as f:
- f.write('{}'.format(pid))
- except:
- log.error('unexpected error writing pid (%s) to pidfile (%s)', pid, args.pidfile, exc_info=True)
-
-
-@entry_point
-def mainloop_foreground():
- mainloop()
diff --git a/sideboard/server.py b/sideboard/server.py
index 4606020..c89fc45 100755
--- a/sideboard/server.py
+++ b/sideboard/server.py
@@ -1,26 +1,14 @@
from __future__ import unicode_literals
-import os
-import sys
-
-import six
import cherrypy
-from sideboard.lib import config, threadlocal
-
-
-def reset_threadlocal():
- threadlocal.reset(**{field: cherrypy.session.get(field) for field in config['ws.session_fields']})
-
-cherrypy.tools.reset_threadlocal = cherrypy.Tool('before_handler', reset_threadlocal, priority=51)
+from sideboard.lib import config
cherrypy_config = {}
for setting, value in config['cherrypy'].items():
- if isinstance(value, six.string_types):
+ if isinstance(value, str):
if value.isdigit():
value = int(value)
elif value.lower() in ['true', 'false']:
value = value.lower() == 'true'
- elif six.PY2:
- value = value.encode('utf-8')
cherrypy_config[setting] = value
cherrypy.config.update(cherrypy_config)
diff --git a/sideboard/static/angular/sideboard.js b/sideboard/static/angular/sideboard.js
deleted file mode 100644
index e67fd95..0000000
--- a/sideboard/static/angular/sideboard.js
+++ /dev/null
@@ -1,284 +0,0 @@
-
-angular.module('sideboard', [])
- .factory('WebSocketService', function ($q, $log, $window, $timeout, $rootScope) {
- var self = {
- WS_URL: ($window.location.protocol === 'https:' ? 'wss' : 'ws') + '://' + $window.location.host + '/ws',
-
- CONNECTING: WebSocket.CONNECTING,
- OPEN: WebSocket.OPEN,
- CLOSING: WebSocket.CLOSING,
- CLOSED: WebSocket.CLOSED,
-
- POLL_TIMEOUT: 3000,
- POLL_INTERVAL: 30000,
- CALL_TIMEOUT: 10000,
-
- currReconnectWait: 1000,
- MIN_RECONNECT_WAIT: 1000,
- MAX_RECONNECT_WAIT: 30000,
-
- requests: {},
-
- currId: 1,
- nextId: function () {
- return self.currId++;
- },
-
- objectify: function (x) {
- return typeof(x) === 'string' ? {client: x} :
- angular.isObject(x) && !angular.isArray(x) ? x : {};
- },
-
- removeIgnoredField: function (request, field) {
- if (request[field]) {
- $log.warn('ignoring "' + field + '" field in WebSocket RPC request');
- }
- delete request[field];
- },
-
- normalizeRequest: function (request, opts) {
- opts = opts || {};
- request = angular.extend({
- error: opts.error || angular.identity,
- callback: opts.callback || angular.identity
- }, self.objectify(request));
- if (opts.single) {
- self.removeIgnoredField(request, 'client');
- request.callbackId = request.callbackId || ('callback-' + self.nextId());
- } else {
- self.removeIgnoredField(request, 'callbackId');
- request.client = request.client || ('client-' + self.nextId());
- }
- return request;
- },
-
- getStatus: function() {
- if (self.ws) {
- return self.ws.readyState;
- } else {
- return self.CLOSED;
- }
- },
- getStatusString: function() {
- return self.isConnecting() ? 'CONNECTING' :
- self.isOpen() ? 'OPEN' :
- self.isClosing() ? 'CLOSING' : 'CLOSED';
- },
- isOpen: function () { return self.getStatus() === self.OPEN; },
- isConnecting: function () { return self.getStatus() === self.CONNECTING; },
- isClosing: function () { return self.getStatus() === self.CLOSING; },
- isClosed: function () { return self.getStatus() === self.CLOSED; },
-
- onNext: function (eventName, callback) {
- var un = $rootScope.$on('WebSocketService.' + eventName, function () {
- try {
- callback();
- } catch(ex) {
- $log.error('error invoking', eventName, 'callback', ex);
- }
- un();
- });
- },
-
- poll: function () {
- self.call({
- method: 'sideboard.poll',
- timeout: self.POLL_TIMEOUT
- }).then(self.schedulePoll, function () {
- $log.error('closing websocket due to poll failure; will attempt to reconnect');
- self.close(1002, 'poll failed');
- self.connect();
- });
- },
- schedulePoll: function () {
- self.stopPolling();
- self._poller = $timeout(self.poll, self.POLL_INTERVAL);
- },
- stopPolling: function () {
- $timeout.cancel(self._poller);
- },
-
- _connect: function () {
- self.ws = new WebSocket(self.WS_URL);
- self.ws.onopen = self.onOpen;
- self.ws.onclose = self.onClose;
- self.ws.onerror = self.onError;
- self.ws.onmessage = self.onMessage;
- },
- connect: function(callback) {
- callback = callback || angular.noop;
- if (self.isConnecting()) {
- self.onNext('open', callback);
- } else if (self.isClosing()) {
- self.onNext('close', function () {
- self.connect(callback);
- });
- } else if (self.isClosed()) {
- self._connect();
- self.onNext('open', callback);
- } else if (self.isOpen()) {
- callback();
- } else {
- $log.error('Error which should never happen: websocket is in an unknown state', self.getStatus());
- }
- },
-
- close: function (code, reason) {
- if (self.ws) {
- try {
- if (!self.isClosed()) {
- self.ws.onopen = self.ws.onclose = self.ws.onerror = self.ws.onmessage = null;
- self.ws.close(code || 1000, reason || 'manual close');
- }
- self.onClose();
- } catch (ex) {
- $log.error('error calling close on', self.getStatusString(), 'websocket', ex);
- }
- delete self.ws;
- }
- },
-
- refireSubscriptions: function () {
- angular.forEach(self.requests, function (request) {
- if (request.method && request.client) {
- self.send(request);
- }
- });
- },
-
- onOpen: function () {
- self.currReconnectWait = self.MIN_RECONNECT_WAIT;
- self.schedulePoll();
- self.refireSubscriptions();
- $rootScope.$broadcast('WebSocketService.open');
- },
- onError: function (event) {
- $log.error('websocket error', event);
- self.close();
- },
- onClose: function () {
- self.stopPolling();
- $timeout(self.connect, self.currReconnectWait);
- self.currReconnectWait = Math.min(self.MAX_RECONNECT_WAIT, 2 * self.currReconnectWait);
- $rootScope.$broadcast('WebSocketService.close');
- },
- onMessage: function (event) {
- var json;
- try {
- json = JSON.parse(event.data || 'null');
- } catch (ex) {
- $log.error('websocket message parse error', event, ex);
- return;
- }
- if (!json || !angular.isObject(json)) {
- $log.error('websocket message parsed to a non-object', json);
- } else {
- self.handleMessage(json);
- }
- },
-
- handleMessage: function (message) {
- var request = self.requests[message.client || message.callback];
- if (request) {
- $log.debug('websocket received', message);
- var funcAttr = message.error ? 'error' : 'callback',
- dataAttr = message.error ? 'error' : 'data';
- try {
- request[funcAttr](message[dataAttr]);
- } catch(ex) {
- $log.error('Error executing websocket', funcAttr, 'function:', ex);
- }
- if (request.callbackId) {
- delete self.requests[request.callbackId];
- }
- $rootScope.$digest();
- } else {
- $log.error('unknown client and/or callback id', message);
- }
- },
-
- send: function(request) {
- if (request.method && (request.client || request.callbackId)) {
- self.requests[request.client || request.callbackId] = request;
- }
- var message = JSON.stringify({
- action: request.action,
- method: request.method,
- params: request.params,
- client: request.client,
- callback: request.callbackId
- });
- $log.debug('websocket send', message);
- self.ws.send(message);
- },
- connectAndSend: function (request) {
- self.connect(function () {
- self.send(request);
- });
- },
-
- subscribe: function(request) {
- request = self.normalizeRequest(request, {single: false});
- if (request.method) {
- self.connectAndSend(request);
- return request.client;
- } else {
- $log.error('"method" is a required field for WebSocketService.subscribe()');
- }
- },
-
- unsubscribe: function() {
- var clients = [];
- angular.forEach(arguments, function (request) {
- request = self.objectify(request);
- if (request.client && self.requests[request.client]) {
- if (request.callback) {
- $log.warn('ignoring callback field, which is invalid for unsubscribe', request);
- }
- clients.push(request.client);
- delete self.requests[request.client];
- } else {
- $log.error('Unsubscribe called with unknown client id', request);
- }
- });
- if (self.isOpen() && clients.length) {
- self.send({action: 'unsubscribe', client: clients});
- }
- },
-
- call: function(request) {
- if (typeof(request) === 'string') {
- request = {
- method: request,
- params: Array.prototype.slice.call(arguments, 1)
- };
- }
- request = self.objectify(request);
- var errorMessage = !request.method ? '"method" required for WebSocketService.call()' :
- request.callback ? '"callback" is not a valid field for WebSocketService.call()' :
- request.error ? '"error" is not a valid field for WebSocketService.call()' : null;
- if (!errorMessage) {
- var deferred = $q.defer();
- request = self.normalizeRequest(request, {
- single: true,
- error: deferred.reject,
- callback: deferred.resolve
- });
- request.timeout = request.timeout || self.CALL_TIMEOUT;
- var rejectAfterTimeout = $timeout(function () {
- $log.error('no response received for', request.timeout, 'milliseconds', request);
- deferred.reject('websocket call timed out');
- }, request.timeout);
- self.connectAndSend(request);
- return deferred.promise.finally(function () {
- $timeout.cancel(rejectAfterTimeout);
- delete self.requests[request.callbackId];
- });
- } else {
- $log.error(errorMessage);
- return $q.reject(errorMessage);
- }
- }
- };
- return self;
- });
diff --git a/sideboard/templates/connections.html b/sideboard/templates/connections.html
deleted file mode 100644
index f62ac69..0000000
--- a/sideboard/templates/connections.html
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-
- Sideboard Connection Tests
-
-
-
Sideboard Connection Tests
- {% for service, results in connections.items()|sort %}
-