diff --git a/README.md b/README.md index bdb0ea9d4..3fde65b31 100644 --- a/README.md +++ b/README.md @@ -35,9 +35,9 @@ Clipper is a prediction serving system that sits between user-facing application ``` $ pip install clipper_admin $ python ->>> import clipper_admin.clipper_manager as cm, numpy as np +>>> from clipper_admin import Clipper, numpy as np # Start a Clipper instance on localhost ->>> clipper = cm.Clipper("localhost") +>>> clipper = Clipper("localhost") Checking if Docker is running... # Start Clipper. Running this command for the first time will @@ -62,7 +62,7 @@ Success! return [str(np.sum(x)) for x in xs] # Deploy the model, naming it "feature_sum_model" and giving it version 1 ->>> clipper.deploy_predict_function("feature_sum_model", 1, feature_sum_function, ["quickstart"], "doubles") +>>> clipper.deploy_predict_function("feature_sum_model", 1, feature_sum_function, "doubles") ``` diff --git a/bin/run_unittests.sh b/bin/run_unittests.sh index b019657b1..215cfa4e6 100755 --- a/bin/run_unittests.sh +++ b/bin/run_unittests.sh @@ -100,22 +100,19 @@ function run_rpc_container_tests { } function run_libclipper_tests { + cd $DIR/../debug echo -e "\nRunning libclipper tests\n\n" ./src/libclipper/libclippertests --redis_port $REDIS_PORT } function run_management_tests { + cd $DIR/../debug echo -e "\nRunning management tests\n\n" ./src/management/managementtests --redis_port $REDIS_PORT } -function run_clipper_admin_tests { - echo -e "Running clipper admin tests" - cd $DIR - python ../clipper_admin/tests/clipper_manager_test.py -} - function run_frontend_tests { + cd $DIR/../debug echo -e "\nRunning frontend tests\n\n" ./src/frontends/frontendtests --redis_port $REDIS_PORT } @@ -123,7 +120,8 @@ function run_frontend_tests { function run_integration_tests { echo -e "\nRunning integration tests\n\n" cd $DIR - python ../integration-tests/light_load_all_functionality.py 2 3 + python ../integration-tests/clipper_manager_tests.py + python ../integration-tests/many_apps_many_models.py 2 3 } function run_all_tests { @@ -133,9 +131,6 @@ function run_all_tests { redis-cli -p $REDIS_PORT "flushall" run_management_tests redis-cli -p $REDIS_PORT "flushall" - sleep 5 - run_clipper_admin_tests - redis-cli -p $REDIS_PORT "flushall" run_jvm_container_tests redis-cli -p $REDIS_PORT "flushall" run_rpc_container_tests @@ -160,9 +155,6 @@ case $args in -m | --management ) set_test_environment run_management_tests ;; - -c | --clipperadmin ) set_test_environment - run_clipper_admin_tests - ;; -f | --frontend ) set_test_environment run_frontend_tests ;; diff --git a/clipper_admin/__init__.py b/clipper_admin/__init__.py index e69de29bb..259d6f87f 100644 --- a/clipper_admin/__init__.py +++ b/clipper_admin/__init__.py @@ -0,0 +1,8 @@ +import sys +if sys.version_info >= (3, 0): + sys.stdout.write( + "Sorry, clipper_admin requires Python 2.x, but you are running Python 3.x\n" + ) + sys.exit(1) + +from clipper_manager import Clipper diff --git a/clipper_admin/clipper_manager.py b/clipper_admin/clipper_manager.py index ea0036776..ff9581c87 100644 --- a/clipper_admin/clipper_manager.py +++ b/clipper_admin/clipper_manager.py @@ -41,6 +41,9 @@ CLIPPER_LOGS_PATH = "/tmp/clipper-logs" CLIPPER_DOCKER_LABEL = "ai.clipper.container.label" +CLIPPER_MODEL_CONTAINER_LABEL = "ai.clipper.model_container.model_version" + +DEFAULT_LABEL = ["DEFAULT"] aws_cli_config = """ [default] @@ -335,6 +338,9 @@ def start(self): yaml.dump( self.docker_compost_dict, default_flow_style=False)) + print( + "Note: Docker must download the Clipper Docker images if they are not already cached. This may take awhile." + ) self._execute_root("docker-compose up -d query_frontend") print("Clipper is running") @@ -441,8 +447,8 @@ def deploy_model(self, version, model_data, container_name, - labels, input_type, + labels=DEFAULT_LABEL, num_containers=1): """Registers a model with Clipper and deploys instances of it in containers. @@ -463,10 +469,10 @@ def deploy_model(self, be the path you provided to the serialize method call. container_name : str The Docker container image to use to run this model container. - labels : list of str - A set of strings annotating the model input_type : str One of "integers", "floats", "doubles", "bytes", or "strings". + labels : list of str, optional + A list of strings annotating the model num_containers : int, optional The number of replicas of the model to create. More replicas can be created later as well. Defaults to 1. @@ -545,7 +551,11 @@ def deploy_model(self, for r in range(num_containers) ]) - def register_external_model(self, name, version, labels, input_type): + def register_external_model(self, + name, + version, + input_type, + labels=DEFAULT_LABEL): """Registers a model with Clipper without deploying it in any containers. Parameters @@ -554,10 +564,10 @@ def register_external_model(self, name, version, labels, input_type): The name to assign this model. version : int The version to assign this model. - labels : list of str - A set of strings annotating the model input_type : str One of "integers", "floats", "doubles", "bytes", or "strings". + labels : list of str, optional + A list of strings annotating the model. """ return self._publish_new_model(name, version, labels, input_type, EXTERNALLY_MANAGED_MODEL, @@ -567,8 +577,8 @@ def deploy_predict_function(self, name, version, predict_function, - labels, input_type, + labels=DEFAULT_LABEL, num_containers=1): """Deploy an arbitrary Python function to Clipper. @@ -586,16 +596,18 @@ def deploy_predict_function(self, predict_function : function The prediction function. Any state associated with the function should be captured via closure capture. - labels : list of str - A set of strings annotating the model input_type : str One of "integers", "floats", "doubles", "bytes", or "strings". + labels : list of str, optional + A list of strings annotating the model num_containers : int, optional The number of replicas of the model to create. More replicas can be created later as well. Defaults to 1. Example ------- + Define a feature function ``center()`` and train a model on the featurized input:: + def center(xs): means = np.mean(xs, axis=0) return xs - means @@ -612,7 +624,6 @@ def centered_predict(inputs): "example_model", 1, centered_predict, - ["example"], "doubles", num_containers=1) """ @@ -668,8 +679,8 @@ def centered_predict(inputs): # Deploy function deploy_result = self.deploy_model(name, version, serialization_dir, - default_python_container, labels, - input_type, num_containers) + default_python_container, input_type, + labels, num_containers) # Remove temp files shutil.rmtree(serialization_dir) @@ -797,7 +808,9 @@ def get_container_info(self, model_name, model_version, replica_id): print(r.text) return None - def inspect_selection_policy(self, app_name, uid): + def _inspect_selection_policy(self, app_name, uid): + # NOTE: This method is private (it's still functional, but it won't be documented) + # until Clipper supports different selection policies """Fetches a human-readable string with the current selection policy state. Parameters @@ -934,8 +947,9 @@ def add_container(self, model_name, model_version): key=model_key, db=REDIS_MODEL_DB_NUM), capture=True) + print(result) - if "nil" in result.stdout: + if "empty list or set" in result.stdout: # Model not found warn("Trying to add container but model {mn}:{mv} not in " "Redis".format(mn=model_name, mv=model_version)) @@ -954,7 +968,7 @@ def add_container(self, model_name, model_version): add_container_cmd = ( "docker run -d --network={nw} --restart={restart_policy} -v {path}:/model:ro " "-e \"CLIPPER_MODEL_NAME={mn}\" -e \"CLIPPER_MODEL_VERSION={mv}\" " - "-e \"CLIPPER_IP=query_frontend\" -e \"CLIPPER_INPUT_TYPE={mip}\" -l \"{clipper_label}\" " + "-e \"CLIPPER_IP=query_frontend\" -e \"CLIPPER_INPUT_TYPE={mip}\" -l \"{clipper_label}\" -l \"{mv_label}\" " "{image}".format( path=model_data_path, nw=DOCKER_NW, @@ -963,6 +977,8 @@ def add_container(self, model_name, model_version): mv=model_version, mip=model_input_type, clipper_label=CLIPPER_DOCKER_LABEL, + mv_label="%s=%s:%d" % (CLIPPER_MODEL_CONTAINER_LABEL, + model_name, model_version), restart_policy=restart_policy)) result = self._execute_root(add_container_cmd) return result.return_code == 0 @@ -1058,6 +1074,45 @@ def set_model_version(self, model_name, model_version, num_containers=0): for r in range(num_containers): self.add_container(model_name, model_version) + def remove_inactive_containers(self, model_name): + """Removes all containers serving stale versions of the specified model. + + Parameters + ---------- + model_name : str + The name of the model whose old containers you want to clean. + + """ + # Get all Docker containers tagged as model containers + num_containers_removed = 0 + with hide("output", "warnings", "running"): + containers = self._execute_root( + "docker ps -aq --filter label={model_container_label}".format( + model_container_label=CLIPPER_MODEL_CONTAINER_LABEL)) + if len(containers) > 0: + container_ids = [l.strip() for l in containers.split("\n")] + for container in container_ids: + # returns a string formatted as ":" + container_model_name_and_version = self._execute_root( + "docker inspect --format \"{{ index .Config.Labels \\\"%s\\\"}}\" %s" + % (CLIPPER_MODEL_CONTAINER_LABEL, container)) + splits = container_model_name_and_version.split(":") + container_model_name = splits[0] + container_model_version = int(splits[1]) + if container_model_name == model_name: + # check if container_model_version is the currently deployed version + model_info = self.get_model_info( + container_model_name, container_model_version) + if model_info == None or not model_info["is_current_version"]: + self._execute_root("docker stop {container}". + format(container=container)) + self._execute_root("docker rm {container}".format( + container=container)) + num_containers_removed += 1 + print("Removed %d inactive containers for model %s" % + (num_containers_removed, model_name)) + return num_containers_removed + def stop_all(self): """Stops and removes all Clipper Docker containers on the host. diff --git a/clipper_admin/tests/__init__.py b/clipper_admin/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/index.rst b/docs/index.rst index 36818c2de..400050937 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,7 +2,7 @@ Clipper Manager API Reference ============================== -.. automodule:: clipper_admin.clipper_manager +.. autoclass:: clipper_admin.Clipper :members: :undoc-members: :show-inheritance: diff --git a/examples/basic_query/README.md b/examples/basic_query/README.md index e9686f0d7..e67b0ee17 100644 --- a/examples/basic_query/README.md +++ b/examples/basic_query/README.md @@ -1,13 +1,14 @@ # Basic Query Example Requirements -The examples in this directory depend on a few Python packages. +The examples in this directory assume you have the `clipper_admin` pip package installed: + +```sh +pip install clipper_admin +``` We recommend using [Anaconda](https://www.continuum.io/downloads) to install Python packages. -+ [`requests`](http://docs.python-requests.org/en/master/) -+ [`numpy`](http://www.numpy.org/) - # Running the example query 1. Start Clipper locally diff --git a/examples/basic_query/example_client.py b/examples/basic_query/example_client.py index a7a4c61ce..4150f8d61 100644 --- a/examples/basic_query/example_client.py +++ b/examples/basic_query/example_client.py @@ -1,9 +1,7 @@ from __future__ import print_function import sys import os -cur_dir = os.path.dirname(os.path.abspath(__file__)) -sys.path.append(os.path.abspath('%s/../../management/' % cur_dir)) -import clipper_manager as cm +from clipper_admin import Clipper import json import requests from datetime import datetime @@ -24,11 +22,10 @@ def predict(host, uid, x): if __name__ == '__main__': host = "localhost" - clipper = cm.Clipper(host, check_for_docker=False) + clipper = Clipper(host, check_for_docker=False) clipper.register_application("example_app", "example_model", "doubles", "-1.0", 40000) - clipper.register_external_model("example_model", 1, ["l1", "l2"], - "doubles") + clipper.register_external_model("example_model", 1, "doubles") time.sleep(1.0) uid = 0 while True: diff --git a/examples/tutorial/tutorial_part_one.ipynb b/examples/tutorial/tutorial_part_one.ipynb index 8741405db..1a4ab3259 100644 --- a/examples/tutorial/tutorial_part_one.ipynb +++ b/examples/tutorial/tutorial_part_one.ipynb @@ -121,9 +121,16 @@ "source": [ "# Start Clipper\n", "\n", - "Now you're ready to start Clipper! You will be using the `clipper_manager` client library to perform admninistrative commands.\n", + "Now you're ready to start Clipper! You will be using the `clipper_admin` client library to perform administrative commands.\n", "\n", - "> *Remember, Docker and Docker-Compose must be installed before deploying Clipper. Visit https://docs.docker.com/compose/install/ for instructions on how to do so.*\n", + "\n", + "> *Remember, Docker and Docker-Compose must be installed before deploying Clipper. Visit https://docs.docker.com/compose/install/ for instructions on how to do so. In addition, we recommend using [Anaconda](https://www.continuum.io/downloads) and Anaconda environments to manage Python.*\n", + "\n", + "Start by installing the library with `pip`:\n", + "\n", + "```sh\n", + "pip install clipper_admin\n", + "```\n", "\n", "Clipper uses Docker to manage application configurations and to deploy machine-learning models. Make sure your Docker daemon, local or remote, is up and running. You can check this by running `docker ps` in your command line – if your Docker daemon is not running, you will be told explicitly.\n", "\n", @@ -131,7 +138,7 @@ "\n", "If you'd like to deploy Clipper locally, you can leave the `user` and `key` variables blank and set `host=\"localhost\"`. Otherwise, you can deploy Clipper remotely to a machine that you have SSH access to. Set the `user` variable to your SSH username, the `key` variable to the path to your SSH key, and the `host` variable to the remote hostname or IP address.\n", "\n", - "> If your SSH server is running on a non-standard port, you can specify the SSH port to use as another argument to the Clipper constructor. For example, `clipper = cm.Clipper(host, user, key, ssh_port=9999)`." + "> If your SSH server is running on a non-standard port, you can specify the SSH port to use as another argument to the Clipper constructor. For example, `clipper = Clipper(host, user, key, ssh_port=9999)`." ] }, { @@ -145,17 +152,16 @@ }, "outputs": [], "source": [ - "# clipper_manager must be on your path:\n", "import sys\n", "import os\n", - "import clipper_admin.clipper_manager as cm\n", + "from clipper_admin import Clipper\n", "# Change the username if necessary\n", "user = \"\"\n", "# Set the path to the SSH key\n", "key = \"\"\n", "# Set the SSH host\n", "host = \"\"\n", - "clipper = cm.Clipper(host, user, key)\n", + "clipper = Clipper(host, user, key)\n", "\n", "clipper.start()" ] diff --git a/examples/tutorial/tutorial_part_two.ipynb b/examples/tutorial/tutorial_part_two.ipynb index 9d8900f3f..7b7dbd44c 100644 --- a/examples/tutorial/tutorial_part_two.ipynb +++ b/examples/tutorial/tutorial_part_two.ipynb @@ -22,17 +22,16 @@ }, "outputs": [], "source": [ - "# clipper_manager must be on your path:\n", "import sys\n", "import os\n", - "import clipper_admin.clipper_manager as cm\n", + "from clipper_admin import Clipper\n", "# Change the username if necessary\n", "user = \"\"\n", "# Set the path to the SSH key\n", "key = \"\"\n", "# Set the SSH host\n", "host = \"\"\n", - "clipper = cm.Clipper(host, user, key)" + "clipper = Clipper(host, user, key)" ] }, { @@ -120,7 +119,6 @@ " 1,\n", " lr_model,\n", " \"clipper/sklearn_cifar_container:latest\",\n", - " [\"cifar\", \"sklearn\"],\n", " \"doubles\",\n", " num_containers=1\n", ")\n", @@ -201,7 +199,6 @@ " 2,\n", " os.path.abspath(\"tf_cifar_model\"),\n", " \"clipper/tf_cifar_container:latest\",\n", - " [\"cifar\", \"tf\"],\n", " \"doubles\",\n", " num_containers=1\n", ")\n", diff --git a/clipper_admin/tests/clipper_manager_test.py b/integration-tests/clipper_manager_tests.py similarity index 77% rename from clipper_admin/tests/clipper_manager_test.py rename to integration-tests/clipper_manager_tests.py index da0cb4493..12fc73729 100644 --- a/clipper_admin/tests/clipper_manager_test.py +++ b/integration-tests/clipper_manager_tests.py @@ -8,12 +8,12 @@ from argparse import ArgumentParser cur_dir = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, os.path.abspath('%s/../' % cur_dir)) -import clipper_manager +from clipper_admin.clipper_manager import Clipper import random import socket """ Executes a test suite consisting of two separate cases: short tests and long tests. -Before each case, an instance of clipper_manager.Clipper is created. Tests +Before each case, an instance of Clipper is created. Tests are then performed by invoking methods on this instance, often resulting in the execution of docker commands. """ @@ -39,8 +39,9 @@ def find_unbound_port(): class ClipperManagerTestCaseShort(unittest.TestCase): @classmethod def setUpClass(self): - self.clipper_inst = clipper_manager.Clipper( + self.clipper_inst = Clipper( "localhost", redis_port=find_unbound_port()) + self.clipper_inst.stop_all() self.clipper_inst.start() self.app_name = "app1" self.model_name = "m1" @@ -56,10 +57,9 @@ def tearDownClass(self): def test_external_models_register_correctly(self): name = "m1" version1 = 1 - tags = ["test"] input_type = "doubles" result = self.clipper_inst.register_external_model( - self.model_name, self.model_version_1, tags, input_type) + self.model_name, self.model_version_1, input_type) self.assertTrue(result) registered_model_info = self.clipper_inst.get_model_info( self.model_name, self.model_version_1) @@ -67,7 +67,7 @@ def test_external_models_register_correctly(self): version2 = 2 result = self.clipper_inst.register_external_model( - self.model_name, self.model_version_2, tags, input_type) + self.model_name, self.model_version_2, input_type) self.assertTrue(result) registered_model_info = self.clipper_inst.get_model_info( self.model_name, self.model_version_2) @@ -128,11 +128,10 @@ def test_model_deploys_successfully(self): # that will be deployed to a no-op container model_data = svm.SVC() container_name = "clipper/noop-container" - labels = ["test"] input_type = "doubles" result = self.clipper_inst.deploy_model( self.deploy_model_name, self.deploy_model_version, model_data, - container_name, labels, input_type) + container_name, input_type) self.assertTrue(result) model_info = self.clipper_inst.get_model_info( self.deploy_model_name, self.deploy_model_version) @@ -152,14 +151,60 @@ def test_add_container_for_deployed_model_succeeds(self): split_output = running_containers_output.split("\n") self.assertGreaterEqual(len(split_output), 2) + def test_remove_inactive_containers_succeeds(self): + # Initialize a support vector classifier + # that will be deployed to a no-op container + self.clipper_inst.stop_all() + self.clipper_inst.start() + model_data = svm.SVC() + container_name = "clipper/noop-container" + input_type = "doubles" + model_name = "remove_inactive_test_model" + result = self.clipper_inst.deploy_model( + model_name, + 1, + model_data, + container_name, + input_type, + num_containers=2) + self.assertTrue(result) + running_containers_output = self.clipper_inst._execute_standard( + "docker ps -q --filter \"ancestor=clipper/noop-container\"") + self.assertIsNotNone(running_containers_output) + num_running_containers = running_containers_output.split("\n") + print("RUNNING CONTAINERS: %s" % str(num_running_containers)) + self.assertEqual(len(num_running_containers), 2) + + result = self.clipper_inst.deploy_model( + model_name, + 2, + model_data, + container_name, + input_type, + num_containers=3) + self.assertTrue(result) + running_containers_output = self.clipper_inst._execute_standard( + "docker ps -q --filter \"ancestor=clipper/noop-container\"") + self.assertIsNotNone(running_containers_output) + num_running_containers = running_containers_output.split("\n") + self.assertEqual(len(num_running_containers), 5) + + num_containers_removed = self.clipper_inst.remove_inactive_containers( + model_name) + self.assertEqual(num_containers_removed, 2) + running_containers_output = self.clipper_inst._execute_standard( + "docker ps -q --filter \"ancestor=clipper/noop-container\"") + self.assertIsNotNone(running_containers_output) + num_running_containers = running_containers_output.split("\n") + self.assertEqual(len(num_running_containers), 3) + def test_predict_function_deploys_successfully(self): model_name = "m2" model_version = 1 predict_func = lambda inputs: ["0" for x in inputs] - labels = ["test"] input_type = "doubles" result = self.clipper_inst.deploy_predict_function( - model_name, model_version, predict_func, labels, input_type) + model_name, model_version, predict_func, input_type) self.assertTrue(result) model_info = self.clipper_inst.get_model_info(model_name, model_version) @@ -173,8 +218,9 @@ def test_predict_function_deploys_successfully(self): class ClipperManagerTestCaseLong(unittest.TestCase): @classmethod def setUpClass(self): - self.clipper_inst = clipper_manager.Clipper( + self.clipper_inst = Clipper( "localhost", redis_port=find_unbound_port()) + self.clipper_inst.stop_all() self.clipper_inst.start() self.app_name_1 = "app3" self.app_name_2 = "app4" @@ -200,30 +246,29 @@ def test_deployed_model_queried_successfully(self): # that will be deployed to a no-op container model_data = svm.SVC() container_name = "clipper/noop-container" - labels = ["test"] result = self.clipper_inst.deploy_model( self.model_name_2, model_version, model_data, container_name, - labels, self.input_type) + self.input_type) self.assertTrue(result) time.sleep(30) url = "http://localhost:1337/{}/predict".format(self.app_name_2) test_input = [99.3, 18.9, 67.2, 34.2] - req_json = json.dumps({'uid': 0, 'input': test_input}) + req_json = json.dumps({'input': test_input}) headers = {'Content-type': 'application/json'} response = requests.post(url, headers=headers, data=req_json) - parsed_response = json.loads(response.text) + parsed_response = response.json() + print(parsed_response) self.assertNotEqual(parsed_response["output"], self.default_output) self.assertFalse(parsed_response["default"]) def test_deployed_predict_function_queried_successfully(self): model_version = 1 predict_func = lambda inputs: [str(len(x)) for x in inputs] - labels = ["test"] input_type = "doubles" result = self.clipper_inst.deploy_predict_function( - self.model_name_1, model_version, predict_func, labels, input_type) + self.model_name_1, model_version, predict_func, input_type) self.assertTrue(result) time.sleep(60) @@ -231,11 +276,12 @@ def test_deployed_predict_function_queried_successfully(self): received_non_default_prediction = False url = "http://localhost:1337/{}/predict".format(self.app_name_1) test_input = [101.1, 99.5, 107.2] - req_json = json.dumps({'uid': 0, 'input': test_input}) + req_json = json.dumps({'input': test_input}) headers = {'Content-type': 'application/json'} for i in range(0, 40): response = requests.post(url, headers=headers, data=req_json) - parsed_response = json.loads(response.text) + parsed_response = response.json() + print(parsed_response) output = parsed_response["output"] if output == self.default_output: time.sleep(20) @@ -253,17 +299,17 @@ def test_deployed_predict_function_queried_successfully(self): 'get_app_info_for_registered_app_returns_info_dictionary', 'get_app_info_for_nonexistent_app_returns_none', 'test_add_container_for_external_model_fails', - 'test_model_version_sets_correctly', - 'test_get_logs_creates_log_files', + 'test_model_version_sets_correctly', 'test_get_logs_creates_log_files', 'test_inspect_instance_returns_json_dict', 'test_model_deploys_successfully', 'test_add_container_for_deployed_model_succeeds', - # 'test_predict_function_deploys_successfully' + 'test_remove_inactive_containers_succeeds', + 'test_predict_function_deploys_successfully' ] LONG_TEST_ORDERING = [ 'test_deployed_model_queried_successfully', - # 'test_deployed_predict_function_queried_successfully' + 'test_deployed_predict_function_queried_successfully' ] if __name__ == '__main__': diff --git a/integration-tests/light_load_all_functionality.py b/integration-tests/many_apps_many_models.py similarity index 93% rename from integration-tests/light_load_all_functionality.py rename to integration-tests/many_apps_many_models.py index 000f0f847..6a1d9fed3 100644 --- a/integration-tests/light_load_all_functionality.py +++ b/integration-tests/many_apps_many_models.py @@ -5,8 +5,8 @@ import json import numpy as np cur_dir = os.path.dirname(os.path.abspath(__file__)) -sys.path.insert(0, os.path.abspath('%s/../clipper_admin/' % cur_dir)) -import clipper_manager as cm +sys.path.insert(0, os.path.abspath('%s/../' % cur_dir)) +from clipper_admin.clipper_manager import Clipper import time import subprocess32 as subprocess import pprint @@ -48,7 +48,7 @@ def find_unbound_port(): def init_clipper(): - clipper = cm.Clipper("localhost", redis_port=find_unbound_port()) + clipper = Clipper("localhost", redis_port=find_unbound_port()) clipper.start() time.sleep(1) return clipper @@ -71,7 +71,7 @@ def deploy_model(clipper, name, version): model_name, version, fake_model_data, - "clipper/noop-container", [name], + "clipper/noop-container", "doubles", num_containers=1) time.sleep(10) @@ -82,7 +82,6 @@ def deploy_model(clipper, name, version): "http://localhost:1337/%s/predict" % app_name, headers=headers, data=json.dumps({ - 'uid': 0, 'input': list(np.random.random(30)) })) result = response.json() @@ -106,7 +105,6 @@ def create_and_test_app(clipper, name, num_models): "http://localhost:1337/%s/predict" % app_name, headers=headers, data=json.dumps({ - 'uid': 0, 'input': list(np.random.random(30)) })) result = response.json() @@ -148,6 +146,6 @@ def create_and_test_app(clipper, name, num_models): else: clipper.stop_all() except: - clipper = cm.Clipper("localhost") + clipper = Clipper("localhost") clipper.stop_all() sys.exit(1) diff --git a/src/frontends/src/query_frontend.hpp b/src/frontends/src/query_frontend.hpp index a35b487cc..7d1dbebaf 100644 --- a/src/frontends/src/query_frontend.hpp +++ b/src/frontends/src/query_frontend.hpp @@ -49,7 +49,6 @@ const std::string PREDICTION_ERROR_NAME_QUERY_PROCESSING = const std::string PREDICTION_JSON_SCHEMA = R"( { - "uid" := string, "input" := [double] | [int] | [string] | [byte] | [float], } )"; @@ -438,7 +437,6 @@ class RequestHandler { /* * JSON format for prediction query request: * { - * "uid" := string, * "input" := [double] | [int] | [string] | [byte] | [float] * } */ @@ -448,7 +446,10 @@ class RequestHandler { long latency_slo_micros, InputType input_type) { rapidjson::Document d; clipper::json::parse_json(json_content, d); - long uid = clipper::json::get_long(d, "uid"); + long uid = 0; + // NOTE: We will eventually support personalization again so this commented + // out code is intentionally left in as a placeholder. + // long uid = clipper::json::get_long(d, "uid"); std::shared_ptr input = clipper::json::parse_input(input_type, d); auto prediction = query_processor_.predict( Query{name, uid, input, latency_slo_micros, policy, models}); diff --git a/src/frontends/src/query_frontend_tests.cpp b/src/frontends/src/query_frontend_tests.cpp index 17e8927d9..cb0e141aa 100644 --- a/src/frontends/src/query_frontend_tests.cpp +++ b/src/frontends/src/query_frontend_tests.cpp @@ -61,7 +61,7 @@ class QueryFrontendTest : public ::testing::Test { }; TEST_F(QueryFrontendTest, TestDecodeCorrectInputInts) { - std::string test_json_ints = "{\"uid\": 23, \"input\": [1,2,3,4]}"; + std::string test_json_ints = "{\"input\": [1,2,3,4]}"; Response response = rh_.decode_and_handle_predict(test_json_ints, "test", {}, "test_policy", 30000, InputType::Ints) @@ -69,7 +69,6 @@ TEST_F(QueryFrontendTest, TestDecodeCorrectInputInts) { Query parsed_query = response.query_; - EXPECT_EQ(parsed_query.user_id_, 23); const std::vector& parsed_input = std::static_pointer_cast(parsed_query.input_)->get_data(); std::vector expected_input{1, 2, 3, 4}; @@ -80,8 +79,7 @@ TEST_F(QueryFrontendTest, TestDecodeCorrectInputInts) { } TEST_F(QueryFrontendTest, TestDecodeCorrectInputDoubles) { - std::string test_json_doubles = - "{\"uid\": 23, \"input\": [1.4,2.23,3.243242,0.3223424]}"; + std::string test_json_doubles = "{\"input\": [1.4,2.23,3.243242,0.3223424]}"; Response response = rh_.decode_and_handle_predict(test_json_doubles, "test", {}, "test_policy", 30000, InputType::Doubles) @@ -89,7 +87,6 @@ TEST_F(QueryFrontendTest, TestDecodeCorrectInputDoubles) { Query parsed_query = response.query_; - EXPECT_EQ(parsed_query.user_id_, 23); const std::vector& parsed_input = std::static_pointer_cast(parsed_query.input_)->get_data(); std::vector expected_input{1.4, 2.23, 3.243242, 0.3223424}; @@ -101,7 +98,7 @@ TEST_F(QueryFrontendTest, TestDecodeCorrectInputDoubles) { TEST_F(QueryFrontendTest, TestDecodeCorrectInputString) { std::string test_json_string = - "{\"uid\": 23, \"input\": \"hello world. This is a test string with " + "{\"input\": \"hello world. This is a test string with " "punctionation!@#$Y#;}#\"}"; Response response = rh_.decode_and_handle_predict(test_json_string, "test", {}, "test_policy", @@ -110,7 +107,6 @@ TEST_F(QueryFrontendTest, TestDecodeCorrectInputString) { Query parsed_query = response.query_; - EXPECT_EQ(parsed_query.user_id_, 23); const std::string& parsed_input = std::static_pointer_cast(parsed_query.input_) ->get_data(); @@ -140,7 +136,8 @@ TEST_F(QueryFrontendTest, TestDecodeMalformedJSON) { } TEST_F(QueryFrontendTest, TestDecodeMissingJsonField) { - std::string json_missing_field = "{\"input\": [1.4,2.23,3.243242,0.3223424]}"; + std::string json_missing_field = + "{\"other_field\": [1.4,2.23,3.243242,0.3223424]}"; ASSERT_THROW( rh_.decode_and_handle_predict(json_missing_field, "test", {}, "test_policy", 30000, InputType::Doubles),