diff --git a/config.bat b/config.bat new file mode 100644 index 00000000..7b003886 --- /dev/null +++ b/config.bat @@ -0,0 +1,9 @@ +@echo off + +IF "%~1"=="" ( + echo "No docker container name given!" + EXIT /B 2 +) ELSE ( + docker run -it --name configuration-inator %1 python config/make_config.py + docker commit configuration-inator %1 +) \ No newline at end of file diff --git a/config.sh b/config.sh new file mode 100644 index 00000000..aa9cea1d --- /dev/null +++ b/config.sh @@ -0,0 +1,7 @@ +#! /bin/bash +if [$# -eq 0]; then + echo "No docker image name given!" +else + docker run -it --name configuration-inator $1 python config/make_config.py; + docker commit configuration-inator $1; +fi \ No newline at end of file diff --git a/config/make_config.py b/config/make_config.py index 50eaacc2..cd4ed6b8 100644 --- a/config/make_config.py +++ b/config/make_config.py @@ -1,8 +1,9 @@ #!/usr/bin/python ''' -Executable script to create the configuration file for the BMI3D code, a text file called '$BMI3D/config_files/config' +Executable script to create the configuration file for the BMI3D code, a text file called '$BMI3D/config/config' ''' import os +import sys from collections import OrderedDict stuff = OrderedDict() @@ -14,24 +15,32 @@ stuff['plexon IP address'] = dict(addr='10.0.0.13', port=6000) stuff['update_rates'] = dict(hdf_hz=60) +# Add an optional commandline flag to make setup non-interactive +use_defaults = '-y' in sys.argv or '--use-defaults' in sys.argv + from db import settings databases = list(settings.DATABASES.keys()) -for dbname in databases: - stuff['db_config_%s' % dbname] = dict(data_path='/storage') +for db_name in databases: + stuff[f'db_config_{db_name}'] = dict(data_path='/storage') -config_filename = '$BMI3D/config_files/config' +config_filename = '$BMI3D/config/config' config_fh = open(os.path.expandvars(config_filename), 'w') for system_name, system_opts in list(stuff.items()): - config_fh.write('[%s]\n' % system_name) + config_fh.write(f'[{system_name}]\n') print(system_name) for option, default in list(system_opts.items()): - print(option, default) - opt_val = input("Enter value for '%s' (default=%s): " % (option, str(default))) - if opt_val == '': + + if use_defaults: + print(f" Using default ({default}) for {option}") opt_val = default - config_fh.write('%s = %s\n' % (option, opt_val)) + else: + opt_val = input(f" Enter value for '{option}' (default={default}): ") + if opt_val == '': + opt_val = default + + config_fh.write(f'{option} = {opt_val}\n') config_fh.write('\n') print() diff --git a/config/namelist.py b/config/namelist.py index de2c86c2..3f72c3d7 100644 --- a/config/namelist.py +++ b/config/namelist.py @@ -2,12 +2,17 @@ Lookup table for features, generators and tasks for experiments ''' -## Get the list of experiment features -from .featurelist import features - -## Get the list of tasks -from .tasklist import tasks +# Get the list of experiment features +try: + from .featurelist import features +except (ImportError, ModuleNotFoundError): + features = {} +# Get the list of tasks +try: + from .tasklist import tasks +except (ImportError, ModuleNotFoundError): + tasks = {} # Derive generator functions from the tasklist (all generatorfunctions should be staticmethods of a task) generator_names = [] @@ -67,4 +72,4 @@ def __getitem__(self, name): ################################################################################ ################################################################################ -from .bmilist import * \ No newline at end of file +# from .bmilist import * \ No newline at end of file diff --git a/db/__init__.py b/db/__init__.py index bb35accc..18ab444a 100644 --- a/db/__init__.py +++ b/db/__init__.py @@ -1,2 +1,8 @@ # from . import websocket -from .tracker import tasktrack \ No newline at end of file +from .tracker import tasktrack + +# This will make sure the app is always imported when +# Django starts so that shared_task will use this app. +from .celery_base import app as celery_app + +__all__ = ('celery_app',) \ No newline at end of file diff --git a/db/celery_base.py b/db/celery_base.py new file mode 100644 index 00000000..7d159d64 --- /dev/null +++ b/db/celery_base.py @@ -0,0 +1,22 @@ +from __future__ import absolute_import, unicode_literals +import os +from celery import Celery + +# set the default Django settings module for the 'celery' program. +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'db.settings') + +app = Celery('db') + +# Using a string here means the worker doesn't have to serialize +# the configuration object to child processes. +# - namespace='CELERY' means all celery-related configuration keys +# should have a `CELERY_` prefix. +app.config_from_object('django.conf:settings', namespace='CELERY') + +# Load task modules from all registered Django app configs. +app.autodiscover_tasks() + + +@app.task(bind=True) +def debug_task(self): + print('Request: {0!r}'.format(self.request)) diff --git a/db/runserver.sh b/db/runserver.sh index d0e37a2d..06bfc14b 100755 --- a/db/runserver.sh +++ b/db/runserver.sh @@ -15,7 +15,7 @@ if [ -z "$BMI3D" ] fi #Check /storage (exist ) -storage=$(python $BMI3D/config_files/check_storage.py 2>&1) +storage=$(python $BMI3D/config/check_storage.py 2>&1) if [ $storage == 'False' ]; then echo "/storage does not exist --> if on Ismore, must mount" exit 1 @@ -28,8 +28,8 @@ if [ `ps aux | grep "manage.py runserver" | grep python | wc -l` -gt 0 ]; then fi # Check that a config file is in the correct place, $BMI3D/config -if [ ! -e $BMI3D/config_files/config ]; then - echo "ERROR: cannot find config file! Did you run $BMI3D/config_files/make_config.py?" +if [ ! -e $BMI3D/config/config ]; then + echo "ERROR: cannot find config file! Did you run $BMI3D/config/make_config.py?" exit 1 fi diff --git a/db/settings.py b/db/settings.py index 61b45a75..66a386c0 100644 --- a/db/settings.py +++ b/db/settings.py @@ -1,4 +1,3 @@ - ''' Django config file mostly auto-generated when a django project is created. See https://docs.djangoproject.com/en/dev/intro/tutorial01/ for an introduction @@ -8,10 +7,7 @@ import os cwd = os.path.split(os.path.abspath(__file__))[0] -import djcelery -djcelery.setup_loader() # Django settings for db project. - DEBUG = True TEMPLATE_DEBUG = DEBUG @@ -126,7 +122,6 @@ 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', - 'djcelery', 'tracker.apps.TrackerConfig', 'trainbmi' ) diff --git a/db/tracker/__init__.py b/db/tracker/__init__.py index a3bd7ac2..9d2ae1d6 100644 --- a/db/tracker/__init__.py +++ b/db/tracker/__init__.py @@ -1 +1,2 @@ -default_app_config = "tracker.apps.TrackerConfig" \ No newline at end of file +default_app_config = "tracker.apps.TrackerConfig" + diff --git a/db/tracker/admin.py b/db/tracker/admin.py index f2e83f3d..ad1f3ffb 100644 --- a/db/tracker/admin.py +++ b/db/tracker/admin.py @@ -1,20 +1,21 @@ -''' +""" Declare which database tables are visible from Django's default admin interface. This file was initially created by Django -''' -from tracker.models import Task, Feature, System, TaskEntry, Calibration, DataFile, Subject, Sequence, Generator, AutoAlignment, Decoder -from django.contrib import admin - +""" +from db.tracker.models import Task, Feature, System, TaskEntry, Calibration, DataFile, Subject, Sequence, Generator, AutoAlignment, Decoder +from django.contrib import admin from django.db.models.signals import post_delete from django.dispatch.dispatcher import receiver + @receiver(post_delete, sender=DataFile) def _mymodel_delete(sender, instance, **kwargs): instance.remove() - + + admin.site.register(Task) admin.site.register(Feature) admin.site.register(System) diff --git a/db/tracker/ajax.py b/db/tracker/ajax.py index b25267c1..0bbd566b 100644 --- a/db/tracker/ajax.py +++ b/db/tracker/ajax.py @@ -1,7 +1,7 @@ -''' +""" Handlers for AJAX (Javascript) functions used in the web interface to start experiments and train BMI decoders -''' +""" import json import numpy as np @@ -10,21 +10,20 @@ from riglib import experiment -from .json_param import Parameters +from db import trainbmi +from db.tracker import exp_tracker +from db.tracker.json_param import Parameters +from db.tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder -from .models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder - -import trainbmi import logging import traceback -from . import exp_tracker http_request_queue = [] def train_decoder_ajax_handler(request, idx): - ''' + """ AJAX handler for creating a new decoder. Parameters @@ -38,8 +37,8 @@ def train_decoder_ajax_handler(request, idx): ------- Django HttpResponse Indicates 'success' if all commands initiated without error. - ''' - ## Check if the name of the decoder is already taken + """ + # Check if the name of the decoder is already taken collide = Decoder.objects.filter(entry=idx, name=request.POST['bminame']) if len(collide) > 0: return _respond(dict(status='error', msg='Name collision -- please choose a different name')) @@ -64,9 +63,9 @@ def train_decoder_ajax_handler(request, idx): class encoder(json.JSONEncoder): - ''' + """ Encoder for JSON data that defines how the data should be returned. - ''' + """ def default(self, o): if isinstance(o, np.ndarray): return o.tolist() @@ -75,8 +74,9 @@ def default(self, o): else: return super(encoder, self).default(o) + def _respond(data): - ''' + """ Generic HTTPResponse to return JSON-formatted dictionary values Parameters @@ -88,11 +88,12 @@ def _respond(data): ------- HttpResponse JSON-encoded version of the input dictionary - ''' + """ return HttpResponse(json.dumps(data, cls=encoder), content_type="application/json") + def task_info(request, idx, dbname='default'): - ''' + """ Get information about the task Parameters @@ -105,7 +106,7 @@ def task_info(request, idx, dbname='default'): Returns ------- JSON-encoded dictionary - ''' + """ task = Task.objects.using(dbname).get(pk=idx) feats = [] for name, isset in list(request.GET.items()): @@ -121,8 +122,9 @@ def task_info(request, idx, dbname='default'): return _respond(task_info) + def exp_info(request, idx, dbname='default'): - ''' + """ Get information about the task Parameters @@ -136,7 +138,7 @@ def exp_info(request, idx, dbname='default'): ------- JSON-encoded dictionary Data containing features, parameters, and any report data from the TaskEntry - ''' + """ entry = TaskEntry.objects.using(dbname).get(pk=idx) try: entry_data = entry.to_json() @@ -150,44 +152,49 @@ def exp_info(request, idx, dbname='default'): else: return _respond(entry_data) + def hide_entry(request, idx): - ''' + """ See documentation for exp_info - ''' + """ print("hide_entry") entry = TaskEntry.objects.get(pk=idx) entry.visible = False entry.save() return _respond(dict()) + def show_entry(request, idx): - ''' + """ See documentation for exp_info - ''' + """ print("hide_entry") entry = TaskEntry.objects.get(pk=idx) entry.visible = True entry.save() return _respond(dict()) + def backup_entry(request, idx): - ''' + """ See documentation for exp_info - ''' + """ entry = TaskEntry.objects.get(pk=idx) entry.backup = True entry.save() return _respond(dict()) + def unbackup_entry(request, idx): - ''' + """ See documentation for exp_info - ''' + """ entry = TaskEntry.objects.get(pk=idx) entry.backup = False entry.save() return _respond(dict()) + def gen_info(request, idx): try: gen = Generator.objects.get(pk=idx) @@ -195,6 +202,7 @@ def gen_info(request, idx): except: traceback.print_exc() + def start_next_exp(request): try: req, save = http_request_queue.pop(0) @@ -202,13 +210,14 @@ def start_next_exp(request): except IndexError: return _respond(dict(status="error", msg="No experiments in queue!")) + @csrf_exempt def start_experiment(request, save=True): - ''' + """ Handles presses of the 'Start Experiment' and 'Test' buttons in the browser interface - ''' - #make sure we don't have an already-running experiment + """ + # Make sure we don't have an already-running experiment tracker = exp_tracker.get() if len(tracker.status.value) != 0: print("exp_tracker.status.value", tracker.status.value) @@ -218,8 +227,8 @@ def start_experiment(request, save=True): try: data = json.loads(request.POST['data']) - task = Task.objects.get(pk=data['task']) - Exp = task.get(feats=list(data['feats'].keys())) + task = Task.objects.get(pk=data['task']) + exp = task.get(feats=list(data['feats'].keys())) entry = TaskEntry.objects.create(subject_id=data['subject'], task_id=task.id) params = Parameters.from_html(data['params']) @@ -228,7 +237,7 @@ def start_experiment(request, save=True): params=params) # Save the target sequence to the database and link to the task entry, if the task type uses target sequences - if issubclass(Exp, experiment.Sequence): + if issubclass(exp, experiment.Sequence): print("creating seq") print("data['sequence'] POST data") print(data['sequence']) @@ -273,8 +282,9 @@ def start_experiment(request, save=True): err.seek(0) return _respond(dict(status="error", msg=err.read())) + def rpc(fn): - ''' + """ Generic remote procedure call function Parameters @@ -286,8 +296,8 @@ def rpc(fn): Returns ------- JSON-encoded dictionary - ''' - #make sure that there exists an experiment to stop + """ + # make sure that there exists an experiment to stop tracker = exp_tracker.get() if tracker.status.value not in [b"running", b"testing"]: print("rpc not possible", str(tracker.status.value)) @@ -301,8 +311,9 @@ def rpc(fn): traceback.print_exc() return _respond_err(e) + def _respond_err(e): - ''' + """ Default error response from server to webclient Parameters @@ -314,7 +325,7 @@ def _respond_err(e): ------- JSON-encoded dictionary Sets status to "error" and provides the specific error message - ''' + """ import io import traceback err = io.StringIO() @@ -322,22 +333,27 @@ def _respond_err(e): err.seek(0) return _respond(dict(status="error", msg=err.read())) + @csrf_exempt def stop_experiment(request): return rpc(lambda tracker: tracker.stoptask()) + def enable_clda(request): return rpc(lambda tracker: tracker.task_proxy.enable_clda()) + def disable_clda(request): return rpc(lambda tracker: tracker.task_proxy.disable_clda()) + def set_task_attr(request, attr, value): - ''' + """ Generic function to change a task attribute while the task is running. - ''' + """ return rpc(lambda tracker: tracker.task_proxy.remote_set_attr(attr, value)) + @csrf_exempt def save_notes(request, idx): te = TaskEntry.objects.get(pk=idx) @@ -345,10 +361,11 @@ def save_notes(request, idx): te.save() return _respond(dict(status="success")) + def reward_drain(request, onoff): - ''' + """ Start/stop the "drain" of a solenoid reward remotely - ''' + """ from riglib import reward r = reward.Basic() @@ -360,6 +377,7 @@ def reward_drain(request, onoff): r.drain_off() return HttpResponse('Turning reward %s' % onoff) + def populate_models(request): """ Database initialization code. When 'db.tracker' is imported, it goes through the database and ensures that 1) at least one subject is present @@ -378,6 +396,7 @@ def populate_models(request): return HttpResponse("Updated Tasks, features generators, and systems") + @csrf_exempt def add_new_task(request): from . import models @@ -387,6 +406,7 @@ def add_new_task(request): return HttpResponse("Added new task: %s" % task.name) + @csrf_exempt def add_new_subject(request): from . import models @@ -396,13 +416,14 @@ def add_new_subject(request): return HttpResponse("Added new subject: %s" % subj.name) + @csrf_exempt def get_report(request): - ''' + """ Handles presses of the 'Start Experiment' and 'Test' buttons in the browser interface - ''' - #make sure we don't have an already-running experiment + """ + # make sure we don't have an already-running experiment tracker = exp_tracker.get() tracker.task_proxy.update_report_stats() reportstats = tracker.task_proxy.reportstats diff --git a/db/tracker/apps.py b/db/tracker/apps.py index 3786843d..8dd1721b 100644 --- a/db/tracker/apps.py +++ b/db/tracker/apps.py @@ -1,6 +1,6 @@ - from django.apps import AppConfig + class TrackerConfig(AppConfig): name = 'tracker' - verbose_name = "exp_log_tracker" \ No newline at end of file + verbose_name = "exp_log_tracker" diff --git a/db/tracker/dbq.py b/db/tracker/dbq.py index 47a111f3..674246a2 100644 --- a/db/tracker/dbq.py +++ b/db/tracker/dbq.py @@ -1,37 +1,34 @@ -''' +""" Methods for remotely interacting with the sqlite3 database using remote procedure call (RPC) For example, linking HDF file to a particular task entry. -''' +""" import os import time import json import shutil import datetime -from xmlrpc.server import SimpleXMLRPCDispatcher -import django +from xmlrpc.server import SimpleXMLRPCDispatcher from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt -from distutils.version import StrictVersion +from db.tracker.models import TaskEntry, Subject, Calibration, System, DataFile, Decoder -from riglib import experiment -from .models import TaskEntry, Subject, Calibration, System, DataFile, Decoder -import pickle -import tempfile def save_log(idx, log, dbname='default'): entry = TaskEntry.objects.using(dbname).get(pk=idx) entry.report = json.dumps(log) entry.save() + def save_calibration(subject, system, name, params, dbname='default'): print(subject, system) subj = Subject.objects.using(dbname).get(name=subject) sys = System.objects.using(dbname).get(name=system) Calibration(subject=subj, system=sys, name=name, params=params).save(using=dbname) + def save_data(curfile, system, entry, move=True, local=True, custom_suffix=None, dbname='default'): suffix = dict(supp_hdf="supp.hdf", eyetracker="edf", hdf="hdf", plexon="plx", bmi="pkl", bmi_params="npz", juice_log="png", video="avi") if system in suffix: @@ -82,10 +79,11 @@ def save_data(curfile, system, entry, move=True, local=True, custom_suffix=None, DataFile(local=local, path=permfile, system=sys, entry=entry).save(using=dbname) print("Saved datafile for file=%s -> %s, system=%s, id=%d)..." % (curfile, permfile, system, entry.id)) + def save_bmi(name, entry, filename, dbname='default'): - ''' + """ Save BMI objects to database - ''' + """ entry = TaskEntry.objects.using(dbname).get(pk=entry) now = entry.date today = datetime.date(now.year, now.month, now.day) @@ -101,8 +99,8 @@ def save_bmi(name, entry, filename, dbname='default'): num=num, name=name) base = System.objects.using(dbname).get(name='bmi').path - #Make sure decoder name doesn't exist already: - #Make sure new decoder name doesn't already exist: + # Make sure decoder name doesn't exist already: + # Make sure new decoder name doesn't already exist: import os.path dec_ix = 0 @@ -115,7 +113,7 @@ def save_bmi(name, entry, filename, dbname='default'): shutil.copy2(filename, os.path.join(base, pklname)) - Decoder(name=name,entry=entry,path=pklname).save(using=dbname) + Decoder(name=name, entry=entry, path=pklname).save(using=dbname) try: decoder_entry = Decoder.objects.using(dbname).get(entry=entry) except: @@ -127,15 +125,15 @@ def save_bmi(name, entry, filename, dbname='default'): print(d.pk, d.name) print("Saved decoder to %s"%os.path.join(base, pklname)) + def hide_task_entry(entry, dbname='default'): te = TaskEntry.objects.using(dbname).get(id=entry) te.visible = False te.save() - ############################################################################# -##### Register functions for remote procedure call from other processes ##### +# Register functions for remote procedure call from other processes # ############################################################################# dispatcher = SimpleXMLRPCDispatcher(allow_none=True) dispatcher.register_function(save_log, 'save_log') @@ -144,6 +142,7 @@ def hide_task_entry(entry, dbname='default'): dispatcher.register_function(save_bmi, 'save_bmi') dispatcher.register_function(hide_task_entry, 'hide_task_entry') + @csrf_exempt def rpc_handler(request): response = HttpResponse(content_type="application/xml") diff --git a/db/tracker/exp_tracker.py b/db/tracker/exp_tracker.py index 75a45335..9548cda7 100644 --- a/db/tracker/exp_tracker.py +++ b/db/tracker/exp_tracker.py @@ -1,6 +1,8 @@ """ Singleton for interactions between the browser and riglib """ + from .tasktrack import Track exp_tracker = Track(use_websock=False) + def get(): - return exp_tracker \ No newline at end of file + return exp_tracker diff --git a/db/tracker/jinja2.py b/db/tracker/jinja2.py index 881a2008..16face91 100644 --- a/db/tracker/jinja2.py +++ b/db/tracker/jinja2.py @@ -3,6 +3,7 @@ from jinja2 import Environment + def environment(**options): env = Environment(**options) env.globals.update({ diff --git a/db/tracker/json_param.py b/db/tracker/json_param.py index d19da8c3..969afa42 100644 --- a/db/tracker/json_param.py +++ b/db/tracker/json_param.py @@ -1,18 +1,19 @@ -''' +""" This module contains functions which convert parameter sets stored as JSON blobs into python dictionaries or vice versa. -''' +""" import builtins import ast import json import numpy as np from riglib import calibrations -# from . import namelist +from config import namelist import os + def param_objhook(obj): - ''' + """ A custom JSON "decoder" which can recognize certain types of serialized python objects (django models, function calls, object constructors) and re-create the objects @@ -27,13 +28,13 @@ def param_objhook(obj): If possible, a python object based on the JSON data is created. If not, the original dictionary is simply returned. - ''' + """ from . import models if '__django_model__' in obj: model = getattr(models, obj['__django_model__']) - return model(pk = obj['pk']) + return model(pk=obj['pk']) elif '__builtin__' in obj: - func = getattr(__builtin__, obj['__builtin__']) + func = getattr(builtins, obj['__builtin__']) return func(*obj['args']) elif '__class__' in obj: # look up the module @@ -41,11 +42,12 @@ def param_objhook(obj): # get the class with the 'getattr' and then run the class constructor on the class data return getattr(mod, obj['__class__'])(obj['__dict__']) - else: # the type of object is unknown, just return the original dictionary + else: # the type of object is unknown, just return the original dictionary return obj + def norm_trait(trait, value): - ''' + """ Take user input and convert to the type of the trait. For example, a user might select a decoder's name/id but the ID needs to be mapped to an object for type checking when the experiment is constructed) @@ -60,7 +62,7 @@ def norm_trait(trait, value): Returns ------- typecast value of trait - ''' + """ from . import models ttype = trait.trait_type.__class__.__name__ if ttype == 'Instance': diff --git a/db/tracker/models.py b/db/tracker/models.py index 7f8b29f5..20b1f710 100644 --- a/db/tracker/models.py +++ b/db/tracker/models.py @@ -1,14 +1,20 @@ -''' +""" Classes here which inherit from django.db.models.Model define the structure of the database Django database modules. See https://docs.djangoproject.com/en/dev/intro/tutorial01/ for a basic introduction -''' +""" import os -os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings' +# I don't believe this is needed anymore (py3): os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings' import json -import pickle, pickle +import pickle +import importlib +import subprocess +import traceback +import tables +import tempfile +import shutil import inspect from collections import OrderedDict from django.db import models @@ -18,28 +24,23 @@ from riglib import calibrations, experiment from config import config -import importlib -import subprocess -import traceback -import imp -import tables -import tempfile -import shutil -import importlib + def _get_trait_default(trait): - ''' + """ Function which tries to determine the default value for a trait in the class declaration - ''' + """ _, default = trait.default_value() if isinstance(default, tuple) and len(default) > 0: try: func, args, _ = default default = func(*args) except: + # This will catch and squash everything, including BaseException pass return default + class Task(models.Model): name = models.CharField(max_length=128) visible = models.BooleanField(default=True, blank=True) @@ -49,10 +50,9 @@ def __unicode__(self): return self.name def get_base_class(self): - if not self.import_path is None: - import importlib + if self.import_path is not None: path_components = self.import_path.split(".") - module_name = (".").join(path_components[:-1]) + module_name = ".".join(path_components[:-1]) class_name = path_components[-1] module = importlib.import_module(module_name) task_cls = getattr(module, class_name) @@ -65,12 +65,13 @@ def get_base_class(self): raise ValueError("Could not find base class for task!") def get(self, feats=(), verbose=False): - if verbose: print("models.Task.get()") + if verbose: + print("models.Task.get()") feature_classes = Feature.getall(feats) task_cls = self.get_base_class() - if not None in feature_classes: + if None not in feature_classes: try: # reload the module which contains the base task class # task_cls = tasks[self.name] @@ -105,9 +106,9 @@ def get(self, feats=(), verbose=False): @staticmethod def populate(): - ''' + """ Automatically create a new database record for any tasks added to db/namelist.py - ''' + """ from config.namelist import tasks real = set(tasks.keys()) db = set(task.name for task in Task.objects.all()) @@ -145,7 +146,7 @@ def add_new_task(task_name, class_path): Task(name=task_name, import_path=class_path).save() def params(self, feats=(), values=None): - ''' + """ Parameters ---------- @@ -154,8 +155,8 @@ def params(self, feats=(), values=None): values : dict Values for the task parameters - ''' - #from namelist import instance_to_model, instance_to_model_filter_kwargs + """ + # from namelist import instance_to_model, instance_to_model_filter_kwargs if values is None: values = dict() @@ -252,6 +253,7 @@ def get_generators(self): print("missing generator %s" % seqgen_name) return exp_generators + class Feature(models.Model): name = models.CharField(max_length=128) visible = models.BooleanField(blank=True, default=True) @@ -263,7 +265,7 @@ def __unicode__(self): @property def desc(self): feature_cls = self.get() - if not feature_cls is None: + if feature_cls is not None: return feature_cls.__doc__ else: return '' @@ -306,6 +308,7 @@ def getall(feats): feature_class_list.append(feat) return feature_class_list + class System(models.Model): name = models.CharField(max_length=128) path = models.TextField() @@ -347,11 +350,14 @@ def save_to_file(self, obj, filename, obj_name=None, entry_id=-1): df.entry_id = entry_id df.save() + class Subject(models.Model): name = models.CharField(max_length=128) + def __unicode__(self): return self.name + class Generator(models.Model): name = models.CharField(max_length=128) params = models.TextField() @@ -362,9 +368,9 @@ def __unicode__(self): return self.name def get(self): - ''' + """ Retrieve the function that can be used to construct the ..... generator? sequence? - ''' + """ from config.namelist import generators return generators[self.name] @@ -429,12 +435,13 @@ def to_json(self, values=None): return dict(name=self.name, params=params) + class Sequence(models.Model): date = models.DateTimeField(auto_now_add=True) generator = models.ForeignKey(Generator, on_delete=models.PROTECT) name = models.CharField(max_length=128) - params = models.TextField() #json data - sequence = models.TextField(blank=True) #pickle data + params = models.TextField() # json data + sequence = models.TextField(blank=True) # pickle data task = models.ForeignKey(Task, on_delete=models.PROTECT) def __unicode__(self): @@ -463,15 +470,15 @@ def to_json(self): @classmethod def from_json(cls, js): - ''' + """ Construct a models.Sequence instance from JSON data (e.g., generated by the web interface for starting experiments) - ''' + """ from .json_param import Parameters # Error handling when input argument 'js' actually specifies the primary key of a Sequence object already in the database try: seq = Sequence.objects.get(pk=int(js)) - print("retreiving sequence from POSTed ID") + print("Retrieving sequence from POSTed ID") return seq except: pass @@ -500,6 +507,7 @@ def from_json(cls, js): seq.sequence = pickle.dumps(seq_data) return seq + class TaskEntry(models.Model): subject = models.ForeignKey(Subject, on_delete=models.PROTECT) date = models.DateTimeField(auto_now_add=True) @@ -594,9 +602,9 @@ def offline_report(self): return rpt def to_json(self): - ''' + """ Create a JSON dictionary of the metadata associated with this block for display in the web interface - ''' + """ print("starting TaskEntry.to_json()") from .json_param import Parameters @@ -721,9 +729,9 @@ def to_json(self): @property def plx_file(self): - ''' + """ Returns the name of the plx file associated with the session. - ''' + """ plexon = System.objects.get(name='plexon') try: df = DataFile.objects.get(system=plexon, entry=self.id) @@ -735,9 +743,9 @@ def plx_file(self): @property def nev_file(self): - ''' + """ Return the name of the nev file associated with the session. - ''' + """ try: df = DataFile.objects.get(system__name="blackrock", path__endswith=".nev", entry=self.id) return df.get_path() @@ -754,11 +762,11 @@ def nev_file(self): @property def nsx_files(self): - '''Return a list containing the names of the nsx files (there could be more + """Return a list containing the names of the nsx files (there could be more than one) associated with the session. nsx files extensions are .ns1, .ns2, ..., .ns6 - ''' + """ try: dfs = [] for k in range(1, 7): @@ -778,15 +786,15 @@ def nsx_files(self): @property def name(self): - ''' + """ Return a string representing the 'name' of the block. Note that the block does not really have a unique name in the current implementation. Thus, the 'name' is a hack this needs to be hacked because the current way of determining a a filename depends on the number of things in the database, i.e. if after the fact a record is removed, the number might change. read from the file instead - ''' - # import config + """ + if config.recording_sys['make'] == 'plexon': try: return str(os.path.basename(self.plx_file).rstrip('.plx')) @@ -812,6 +820,7 @@ def get_decoder(self): decoder_id = params['bmi'] return Decoder.objects.get(id=decoder_id) + class Calibration(models.Model): subject = models.ForeignKey(Subject, on_delete=models.PROTECT) date = models.DateTimeField(auto_now_add=True) @@ -828,6 +837,7 @@ def get(self): from .json_param import Parameters return getattr(calibrations, self.name)(**Parameters(self.params).params) + class AutoAlignment(models.Model): date = models.DateTimeField(auto_now_add=True) name = models.TextField() @@ -879,7 +889,7 @@ def load(self, db_name=None): fh = open(decoder_fname, 'r') unpickler = pickle.Unpickler(fh) unpickler.find_global = decoder_unpickler - dec = unpickler.load() # object will now contain the new class path reference + dec = unpickler.load() # object will now contain the new class path reference fh.close() dec.name = self.name @@ -913,27 +923,28 @@ def to_json(self): return decoder_data + def parse_blackrock_file_n2h5(nev_fname, nsx_files): - ''' + """ # convert .nev file to hdf file using Blackrock's n2h5 utility (if it doesn't exist already) # this code goes through the spike_set for each channel in order to: # 1) determine the last timestamp in the file # 2) create a list of units that had spikes in this file - ''' + """ nev_hdf_fname = nev_fname + '.hdf' if not os.path.isfile(nev_hdf_fname): subprocess.call(['n2h5', nev_fname, nev_hdf_fname]) - import tables #Previously import h5py -- pytables works fine too + import tables # Previously import h5py -- pytables works fine too nev_hdf = tables.openFile(nev_hdf_fname, 'r') last_ts = 0 units = [] - #for key in [key for key in nev_hdf.get('channel').keys() if 'channel' in key]: + # for key in [key for key in nev_hdf.get('channel').keys() if 'channel' in key]: chans = nev_hdf.root.channel - chan_names= chans._v_children + chan_names = chans._v_children for key in [key for key in list(chan_names.keys()) if 'channel' in key]: chan_tab = nev_hdf.root.channel._f_getChild(key) if 'spike_set' in chan_tab: @@ -956,6 +967,9 @@ def parse_blackrock_file_n2h5(nev_fname, nsx_files): nsx_lengths = [] if nsx_files is not None: + + import h5py + nsx_fs = dict() nsx_fs['.ns1'] = 500 nsx_fs['.ns2'] = 1000 @@ -989,15 +1003,16 @@ def parse_blackrock_file_n2h5(nev_fname, nsx_files): length = max([nev_length] + nsx_lengths) return length, units, + def parse_blackrock_file(nev_fname, nsx_files, task_entry, nsx_chan = np.arange(96) + 1): - ''' Method to parse blackrock files using new + """ Method to parse blackrock files using new brpy from blackrock (with some modifications). Files are saved as a ____ file? # this code goes through the spike_set for each channel in order to: # 1) determine the last timestamp in the file # 2) create a list of units that had spikes in this file - ''' + """ from riglib.blackrock.brpylib import NevFile, NsxFile # First parse the NEV file: @@ -1158,6 +1173,7 @@ def make_hdf_spks(data, nev_hdf_fname): return last_ts, units2, h5file + def make_hdf_cts(data, nsx_hdf_fname, nsx_file): last_ts = [] channels = [] @@ -1186,19 +1202,23 @@ def make_hdf_cts(data, nsx_hdf_fname, nsx_file): print('successfully made HDF file from NSX file: %s' %nsx_hdf_fname) return t[-1] + class spike_set(tables.IsDescription): TimeStamp = tables.Int32Col() Unit = tables.Int8Col() Wave = tables.Int32Col(shape=(48,)) + class digital_set(tables.IsDescription): TimeStamp = tables.Int32Col() Value = tables.Int16Col() + class continuous_set(tables.IsDescription): TimeStamp = tables.Int32Col() Value = tables.Int16Col() + class mini_attr(tables.IsDescription): last_ts = tables.Float64Col() units = tables.Int16Col(shape=(500, 2)) @@ -1223,9 +1243,9 @@ def to_json(self): return dict(system=self.system.name, path=self.path) def get(self): - ''' + """ Open the datafile, if it's of a known type - ''' + """ if self.system.name == 'hdf': import tables return tables.open_file(self.get_path()) @@ -1236,9 +1256,9 @@ def get(self): raise ValueError("models.DataFile does not know how to open this type of file: %s" % self.path) def get_path(self, check_archive=False): - ''' + """ Get the full path to the file - ''' + """ if not check_archive and not self.archived: text_file = open("path.txt", "w") text_file.write("path: %s" % os.path.join(self.system.path, self.path)) @@ -1282,9 +1302,9 @@ def delete(self, **kwargs): super(DataFile, self).delete(**kwargs) def is_backed_up(self, backup_root): - ''' + """ Return a boolean indicating whether a copy of the file is available on the backup - ''' + """ fname = self.get_path() rel_datafile = os.path.relpath(fname, '/storage') backup_fname = os.path.join(backup_root, rel_datafile) diff --git a/db/tracker/tasktrack.py b/db/tracker/tasktrack.py index 83e68f4e..14bcd0d7 100644 --- a/db/tracker/tasktrack.py +++ b/db/tracker/tasktrack.py @@ -1,8 +1,8 @@ -''' +""" Web browser GUI-launched tasks run in a separate process. This module provides mechanisms for interacting withe the task running in another process, e.g., calling functions to start/stop the task, enabling/disabling decoder adaptation, etc. -''' +""" import os import sys @@ -23,12 +23,15 @@ import traceback log_filename = os.path.join(config.log_path, "tasktrack_log") + + def log_error(err, mode='a'): traceback.print_exc(None, err) with open(log_filename, mode) as fp: err.seek(0) fp.write(err.read()) + def log_str(s, mode="a", newline=True): if newline and not s.endswith("\n"): s += "\n" @@ -37,9 +40,9 @@ def log_str(s, mode="a", newline=True): class Track(object): - ''' + """ Tracker for task instantiation running in a separate process. This is a singleton. - ''' + """ def __init__(self, use_websock=True): # shared memory to store the status of the task in a char array self.status = mp.Array('c', 256) @@ -56,9 +59,9 @@ def notify(self, msg): self.status.value = b"" def runtask(self, **kwargs): - ''' + """ Begin running of task - ''' + """ log_str("Running new task: \n", mode="w") # initialize task status @@ -86,10 +89,10 @@ def runtask(self, **kwargs): self.proc.start() def __del__(self): - ''' + """ Destructor for Track object. Not sure if this function ever gets called since Track is a singleton created upon import of the db.tracker.ajax module... - ''' + """ if not self.websock is None: self.websock.stop() @@ -97,9 +100,9 @@ def __del__(self): # self.status.value = bytes(self.task_proxy.pause()) def stoptask(self): - ''' + """ Terminate the task gracefully by running riglib.experiment.Experiment.end_task - ''' + """ assert self.status.value in [b"testing", b"running"] try: self.task_proxy.end_task() @@ -130,9 +133,9 @@ def task_running(self): def remote_runtask(tracker_end_of_pipe, task_end_of_pipe, websock, **kwargs): - ''' + """ Target function to execute in the spawned process to start the task - ''' + """ log_str("remote_runtask") print("*************************** STARTING TASK *****************************") @@ -236,22 +239,23 @@ def remote_runtask(tracker_end_of_pipe, task_end_of_pipe, websock, **kwargs): else: cleanup_successful = task_wrapper.cleanup() - # inform the user in the browser that the task is done! if cleanup_successful: - if use_websock: websock.write("\n\n...done!\n") + if use_websock: + websock.write("\n\n...done!\n") else: - if use_websock: websock.write("\n\nError! Check for errors in the terminal!\n") + if use_websock: + websock.write("\n\nError! Check for errors in the terminal!\n") print("*************************** EXITING TASK *****************************") class TaskWrapper(object): - ''' + """ Wrapper for Experiment classes launched from the web interface - ''' + """ def __init__(self, subj, base_class, feats, params, seq=None, seq_params=None, saveid=None): - ''' + """ Parameters ---------- subj : tracker.models.Subject instance @@ -271,7 +275,7 @@ def __init__(self, subj, base_class, feats, params, seq=None, seq_params=None, s ID number of db.tracker.models.TaskEntry associated with this task if None specified, then the data saved will not be linked to the database entry and will be lost after the program exits - ''' + """ log_str("TaskWrapper constructor") self.saveid = saveid self.subj = subj @@ -281,7 +285,6 @@ def __init__(self, subj, base_class, feats, params, seq=None, seq_params=None, s self.params = Parameters(params) elif isinstance(params, dict): self.params = Parameters.from_dict(params) - if None in feats: raise Exception("Features not found properly in database!") diff --git a/db/tracker/tests.py b/db/tracker/tests.py index 34d0a1c6..d8c78e18 100644 --- a/db/tracker/tests.py +++ b/db/tracker/tests.py @@ -6,14 +6,16 @@ """ from django.test import TestCase, Client import json -import time, sys +import time +import sys -from tracker import models -from tracker import exp_tracker +from db.tracker import models +from db.tracker import exp_tracker # import psutil from riglib.experiment import LogExperiment + class TestDataFile(TestCase): def setUp(self): subj = models.Subject(name="test_subject") @@ -55,8 +57,10 @@ class TestModels(TestCase): def test_add_new_task_to_table(self): c = Client() - post_data = {"name": "test_add_new_task_to_table", - "import_path": "riglib.experiment.LogExperiment"} + post_data = { + "name": "test_add_new_task_to_table", + "import_path": "riglib.experiment.LogExperiment" + } resp = c.post("/setup/add/new_task", post_data) task = models.Task.objects.get(name="test_add_new_task_to_table") @@ -86,6 +90,7 @@ def test_create_task_entry(self): te = models.TaskEntry(subject_id=subj.id, task_id=task.id) te.save() + class TestTaskStartStop(TestCase): def test_start_experiment_python(self): subj = models.Subject(name="test_subject") @@ -117,7 +122,6 @@ def test_start_experiment_ajax(self): post_data = {"data": json.dumps(task_start_data)} - import sys # if sys.platform == "win32": start_resp = c.post("/test", post_data) start_resp_obj = json.loads(start_resp.content.decode("utf-8")) @@ -137,11 +141,9 @@ def test_start_experiment_ajax(self): reportstats = tracker.task_proxy.reportstats self.assertTrue(len(reportstats.keys()) > 0) - import time time.sleep(2) stop_resp = c.post("/exp_log/stop/") - - import time + time.sleep(2) self.assertFalse(tracker.task_running()) diff --git a/db/tracker/views.py b/db/tracker/views.py index f06a66b8..72193495 100644 --- a/db/tracker/views.py +++ b/db/tracker/views.py @@ -1,25 +1,28 @@ -''' +""" HTML rendering 'view' functions for Django web interface. Retreive data from database to put into HTML format. -''' +""" import json from django.template import RequestContext from django.shortcuts import render_to_response, render from django.http import HttpResponse +from django.views.decorators.csrf import csrf_exempt -from .models import TaskEntry, Task, Subject, Feature, Generator +from db.tracker.models import TaskEntry, Task, Subject, Feature, Generator, DataFile, System +from db.tracker import exp_tracker from config import namelist -from . import exp_tracker import datetime + def main(request): return render(request, "main.html", dict()) + def list_exp_history(request): - ''' + """ Top-level view called when browser pointed at webroot Parameters @@ -30,7 +33,7 @@ def list_exp_history(request): Returns ------- Django HTTPResponse instance - ''' + """ print("views.list: new root request received") td = datetime.timedelta(days=60) start_date = datetime.date.today() - td @@ -38,7 +41,6 @@ def list_exp_history(request): # entries = TaskEntry.objects.all()[:200][::-1] entries = TaskEntry.objects.filter(visible=True).order_by('-date')[:200] - for k in range(0, len(entries)): ent = entries[k] if k == 0 or not entries[k].date.date() == entries[k-1].date.date(): @@ -47,7 +49,7 @@ def list_exp_history(request): ent.html_date = None ent.html_time = ent.date.time() - ## Determine how many rows the date should span + # Determine how many rows the date should span last = -1 for k, ent in enumerate(entries[::-1]): if ent.html_date: @@ -100,8 +102,9 @@ def list_exp_history(request): print("views.list: resp done!") return resp + def listall(request): - ''' + """ Top-level view called when browser pointed at WEBROOT/all Parameters @@ -112,7 +115,7 @@ def listall(request): Returns ------- Django HTTPResponse instance - ''' + """ entries = TaskEntry.objects.all().order_by("-date") epoch = datetime.datetime.utcfromtimestamp(0) @@ -144,8 +147,9 @@ def listall(request): fields['running'] = tracker.task_proxy.saveid return render_to_response('list.html', fields, RequestContext(request)) + def listdb(request, dbname='default', subject=None, task=None): - ''' + """ Top-level view called when browser pointed at WEBROOT/dbname/DBNAME, to list the task entries in a particular database @@ -157,7 +161,7 @@ def listdb(request, dbname='default', subject=None, task=None): Returns ------- Django HTTPResponse instance - ''' + """ filter_kwargs = dict(visible=True) if not (subject is None) and isinstance(subject, str): filter_kwargs['subject__name'] = subject @@ -189,6 +193,7 @@ def listdb(request, dbname='default', subject=None, task=None): fields['running'] = tracker.task_proxy.saveid return render_to_response('list.html', fields, RequestContext(request)) + def setup(request): from . import models subjects = models.Subject.objects.all() @@ -197,6 +202,7 @@ def setup(request): return render(request, "setup.html", dict(subjects=subjects, tasks=tasks, features=features)) + def _color_entries(entries): epoch = datetime.datetime.utcfromtimestamp(0) @@ -210,11 +216,12 @@ def _color_entries(entries): last_tdiff = tdiff entry.bgcolor = colors[color_idx] + def get_sequence(request, idx): - ''' + """ Pointing browser to WEBROOT/sequence_for/(?P\d+)/ returns a pickled file with the 'sequence' used in the specified id - ''' + """ import pickle entry = TaskEntry.objects.get(pk=idx) seq = pickle.loads(str(entry.sequence.sequence)) @@ -228,21 +235,21 @@ def get_sequence(request, idx): idx=idx) return response + def link_data_files_view_generator(request, task_entry_id): - from . import models - systems = models.System.objects.all() + systems = System.objects.all() display_data = dict(systems=systems, task_entry_id=task_entry_id) return render(request, "link_data_files.html", display_data) -from django.views.decorators.csrf import csrf_exempt + @csrf_exempt def link_data_files_response_handler(request, task_entry_id): - from . import models print("link_data_files_response_handler", request.POST) file_path = request.POST["file_path"] data_system_id = request.POST["data_system_id"] - data_file = models.DataFile(local=True, archived=False, path=file_path, + data_file = DataFile( + local=True, archived=False, path=file_path, system_id=data_system_id, entry_id=task_entry_id) data_file.save() return HttpResponse("Added new data file") \ No newline at end of file diff --git a/db/tracker/websocket.py b/db/tracker/websocket.py index 16a147dd..216fd1b3 100644 --- a/db/tracker/websocket.py +++ b/db/tracker/websocket.py @@ -1,7 +1,7 @@ -''' +""" An extension of Tornado's web socket which enables the task to print data to the web interface while the task is running. -''' +""" import os import sys @@ -15,6 +15,7 @@ sockets = [] + class ClientSocket(websocket.WebSocketHandler): def open(self): sockets.append(self) @@ -25,7 +26,7 @@ def on_close(self): sockets.remove(self) def check_origin(self, origin): - ''' + """ Returns a boolean indicating whether the requesting URL is one that the handler will respond to. For this websocket, everyone with access gets a response since we're running the server locally (or over ssh tunnels) and not over the regular internet. @@ -41,14 +42,14 @@ def check_origin(self, origin): Returns True if the request originates from a valid URL See websocket.WebSocketHandler.check_origin for additional documentation - ''' + """ return True class Server(mp.Process): - ''' + """ Spawn a process to deal with the websocket asynchronously, without halting other webserver operations. - ''' + """ def __init__(self, notify=None): super(self.__class__, self).__init__() self._pipe, self.pipe = os.pipe() @@ -58,9 +59,9 @@ def __init__(self, notify=None): self.start() def run(self): - ''' + """ Main function to run in the process. See mp.Process.run() for additional documentation. - ''' + """ print("Running websocket service") application = tornado.web.Application([ (r"/connect", ClientSocket), @@ -85,9 +86,9 @@ def send(self, msg): os.write(self.pipe, struct.pack('I', len(msg)) + bytes(msg, 'utf8')) def _stdout(self, fd, event): - ''' + """ Handler for self._pipe; Read the data from the input pipe and propagate the data to all the listening sockets - ''' + """ nbytes, = struct.unpack('I', os.read(fd, 4)) msg = os.read(fd, nbytes) @@ -103,7 +104,7 @@ def stop(self): ##### Currently unused functions below this line ##### def write(self, data): - '''Used for stdout hooking''' + """Used for stdout hooking""" self.outqueue += data self.flush() @@ -123,9 +124,9 @@ def _send(self, fd, event): class NotifyFeat(object): - ''' + """ Send task report and state data to display on the web inteface - ''' + """ def __init__(self, *args, **kwargs): super(NotifyFeat, self).__init__(*args, **kwargs) self.websock = kwargs.pop('websock') diff --git a/install/docker/bmi3d.base.dockerfile b/install/docker/bmi3d.base.dockerfile new file mode 100644 index 00000000..67483763 --- /dev/null +++ b/install/docker/bmi3d.base.dockerfile @@ -0,0 +1,54 @@ +##### ################################################################### #### +##### ---- Starting point when any os dependencies have been changed ---- #### +##### ################################################################### #### +FROM python:3 + +#### Connect up third-party repositories (rabbitmq and erlang) +RUN curl -s https://packagecloud.io/install/repositories/rabbitmq/rabbitmq-server/script.deb.sh | bash +RUN echo "deb http://dl.bintray.com/rabbitmq-erlang/debian bionic erlang" \ + >> /etc/apt/sources.list.d/bintray.erlang.list +RUN apt-get -y update + + +##### Install required ubuntu packages +RUN apt-get install -y \ + smbclient \ + cifs-utils \ + bison \ + flex \ + openssh-server \ + libusb-dev\ + libcomedi-dev \ + python-comedilib \ + swig \ + isc-dhcp-server \ + sqlite3 \ + vim + +# Install rabbitmq with it's erlang dependencies +RUN apt-get install -y --allow-unauthenticated \ + erlang-base-hipe \ + erlang-asn1 \ + erlang-crypto \ + erlang-eldap \ + erlang-ftp \ + erlang-inets \ + erlang-mnesia \ + erlang-os-mon \ + erlang-parsetools \ + erlang-public-key \ + erlang-runtime-tools \ + erlang-snmp \ + erlang-ssl \ + erlang-syntax-tools \ + erlang-tftp \ + erlang-tools \ + erlang-xmerl \ + rabbitmq-server + +####### Set up directories +RUN mkdir -v -p /code/src/ +RUN mkdir -v -p /backup && chown root /backup +RUN mkdir -v -p /storage/plots && chown -R root /storage + + diff --git a/install/docker/bmi3d.code.dockerfile b/install/docker/bmi3d.code.dockerfile new file mode 100644 index 00000000..47ffabd2 --- /dev/null +++ b/install/docker/bmi3d.code.dockerfile @@ -0,0 +1,17 @@ +#### ##################################################################### ##### +#### ---- Starting point when only small local changes have been made ---- ##### +#### ##################################################################### ##### +FROM bmi3d:python +COPY bmi3d/ /code/bmi3d/ +COPY bmi3d_tasks_analysis/ /code/bmi3d_tasks_analysis/ + +RUN python config/make_config.py --use-defaults && \ + python db/manage.py makemigrations && \ + python db/manage.py migrate + +# Fix all .sh files that might have aquired windows line endings +RUN for f in $(find /code/ -name "*.sh"); \ + do echo "fixing: $f" && sed -i 's/\r$//' $f; \ + done + +CMD [ "/bin/bash", "./db/runserver.sh" ] \ No newline at end of file diff --git a/install/docker/bmi3d.python.dockerfile b/install/docker/bmi3d.python.dockerfile new file mode 100644 index 00000000..3946fc4c --- /dev/null +++ b/install/docker/bmi3d.python.dockerfile @@ -0,0 +1,20 @@ +##### ################################################################### #### +##### ---- Starting point when python dependencies have been changed ---- #### +##### ################################################################### #### +FROM bmi3d:base + +# --- Expect cache invalidation here if source files have changed --- # +COPY bmi3d/ /code/bmi3d/ +COPY bmi3d_tasks_analysis/ /code/bmi3d_tasks_analysis/ + +WORKDIR /code/bmi3d/ +RUN mkdir -v log + +###### Install python dependencies +RUN pip install --upgrade pip +RUN pip install -r requirements.txt + +# Set env vars for future reference +ENV BMI3D="/code/bmi3d" \ + PYTHONPATH="${PYTHONPATH}:/code/bmi3d/:/code/bmi3d_tasks_analysis" + diff --git a/install/docker/src_code_install.sh b/install/docker/src_code_install.sh new file mode 100644 index 00000000..641465f8 --- /dev/null +++ b/install/docker/src_code_install.sh @@ -0,0 +1,70 @@ +#!/bin/bash +####### Declare environment variables +CODE=/code +BMI3D=$CODE/bmi3d ### Directory in which to install the bmi3d software +USER=root # We're in a docker container so root is safe + +####### Download any src code +git clone https://github.com/sgowda/plot $HOME/code/plotutil +git clone https://github.com/sgowda/robotics_toolbox $HOME/code/robotics +# pygame +hg clone https://bitbucket.org/pygame/pygame $HOME/code/pygame +# Phidgets code +#wget https://www.phidgets.com/downloads/phidget22/libraries/linux/libphidget22/libphidget22-1.1.20190417.tar.gz +#wget https://www.phidgets.com/downloads/phidget22/libraries/any/Phidget22Python/Phidget22Python_1.1.20190418.zip + + +####### Install source code, configure software +# plexread module +#cd $BMI3D/riglib +which python +#python setup.py install + +# pygame +cd $HOME/code/pygame +python setup.py install + +# NIDAQ software -- deprecated! +# $HOME/code/bmi3d/riglib/nidaq/build.sh + +echo "TESTED IF HERE" + +# Phidgets libraries +#cd $CODE/src/ +#tar xzf libphidget.tar.gz +#cd libphidget* +#./configure +#make +#make install + +#cd $CODE/src/ +#unzip PhidgetsPython.zip +#cd PhidgetsPython +#python setup.py install + + + +####### Configure udev rules, permissions +# Phidgets +#cp $CODE/src/libphidget*/udev/99-phidgets.rules /etc/udev/rules.d +#chmod a+r /etc/udev/rules.d/99-phidgets.rules +# NIDAQ +cp $HOME/code/bmi3d/install/udev/comedi.rules /etc/udev/rules.d/ +chmod a+r /etc/udev/rules.d/comedi.rules +udevadm control --reload-rules +# Group permissions +usermod -a -G iocard $USER # NIDAQ card belongs to iocard group +usermod -a -G dialout $USER # Serial ports belong to 'dialout' group + + +####### Reconfigure .bashrc +sed -i '$a export PYTHONPATH=$PYTHONPATH:$HOME/code/robotics' $HOME/.bashrc +sed -i '$a export BMI3D=/home/lab/code/bmi3d' $HOME/.bashrc +sed -i '$a source $HOME/code/bmi3d/pathconfig.sh' $HOME/.bashrc +source $HOME/.bashrc + +chown -R $USER ~/.matplotlibs + +cd $HOME/code/bmi3d/db +python manage.py syncdb +# Add superuser 'lab' with password 'lab' diff --git a/requirements.txt b/requirements.txt index 2812921f..12f6d086 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,13 +3,12 @@ scipy matplotlib numexpr cython -django-celery traits pandas patsy statsmodels -PyOpenGL -PyOpenGL_accelerate +PyOpenGL +PyOpenGL-accelerate Django pylibftdi nitime @@ -17,7 +16,10 @@ sphinx numpydoc tornado tables +sklearn pyserial h5py pygame -ipdb \ No newline at end of file +ipdb +celery +jinja2 \ No newline at end of file