diff --git a/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentControllerImpl.java b/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentControllerImpl.java index 4fc15fbcd1..cc0e42d691 100644 --- a/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentControllerImpl.java +++ b/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentControllerImpl.java @@ -269,6 +269,7 @@ public void createOrReplaceSeldonDeployment(SeldonDeployment mlDep) { if (existing == null || !existing.getSpec().equals(mlDep.getSpec())) { logger.debug("Running updates for "+mlDep.getMetadata().getName()); + SeldonDeployment mlDepStatusUpdated = operator.updateStatus(mlDep); SeldonDeployment mlDep2 = operator.defaulting(mlDep); operator.validate(mlDep2); mlCache.put(mlDep2); @@ -281,13 +282,13 @@ public void createOrReplaceSeldonDeployment(SeldonDeployment mlDep) { //removeServices(client,namespace, mlDep2, resources.services); //Proto Client not presently working for deletion ApiClient client2 = clientProvider.getClient(); removeServices(client2,namespace, mlDep2, resources.services); - if (!mlDep.getSpec().equals(mlDep2.getSpec())) + if (!mlDep.getSpec().equals(mlDepStatusUpdated.getSpec())) { - logger.debug("Pushing updated SeldonDeployment "+mlDep2.getMetadata().getName()+" back to kubectl"); - crdHandler.updateSeldonDeployment(mlDep2); + logger.debug("Pushing updated SeldonDeployment "+mlDepStatusUpdated.getMetadata().getName()+" back to kubectl"); + crdHandler.updateSeldonDeployment(mlDepStatusUpdated); } else - logger.debug("Not pushing an update as no change to spec for SeldonDeployment "+mlDep2.getMetadata().getName()); + logger.debug("Not pushing an update as no change to status for SeldonDeployment "+mlDep2.getMetadata().getName()); } else { diff --git a/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentOperator.java b/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentOperator.java index ea12b7486a..664178928b 100644 --- a/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentOperator.java +++ b/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentOperator.java @@ -21,6 +21,7 @@ public interface SeldonDeploymentOperator { + public SeldonDeployment updateStatus(SeldonDeployment mlDep); public SeldonDeployment defaulting(SeldonDeployment mlDep); public void validate(SeldonDeployment mlDep) throws SeldonDeploymentException; public DeploymentResources createResources(SeldonDeployment mlDep) throws SeldonDeploymentException; diff --git a/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentOperatorImpl.java b/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentOperatorImpl.java index 683807c9c9..e31ed32dd9 100644 --- a/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentOperatorImpl.java +++ b/cluster-manager/src/main/java/io/seldon/clustermanager/k8s/SeldonDeploymentOperatorImpl.java @@ -356,7 +356,19 @@ public String getSeldonServiceName(SeldonDeployment dep,PredictorSpec pred,Strin else return svcName; } - + + @Override + public SeldonDeployment updateStatus(SeldonDeployment mlDep) { + SeldonDeployment.Builder mlBuilder = SeldonDeployment.newBuilder(mlDep); + + if (!mlBuilder.hasStatus()) + { + mlBuilder.getStatusBuilder().setState(Constants.STATE_CREATING); + } + + return mlBuilder.build(); + } + @Override public SeldonDeployment defaulting(SeldonDeployment mlDep) { @@ -378,32 +390,34 @@ public SeldonDeployment defaulting(SeldonDeployment mlDep) { for(int cIdx = 0;cIdx < spec.getSpec().getContainersCount();cIdx++) { V1.Container c = spec.getSpec().getContainers(cIdx); - String containerServiceKey = getPredictorServiceNameKey(c.getName()); - String containerServiceValue = getSeldonServiceName(mlDep, p, c.getName()); - metaBuilder.putLabels(containerServiceKey, containerServiceValue); - - int portNum; - if (servicePortMap.containsKey(c.getName())) - portNum = servicePortMap.get(c.getName()); - else + // Only update graph and container if container is referenced in the inference graph + V1.Container c2; + if(isContainerInGraph(p.getGraph(), c)) { - portNum = currentServicePortNum; - servicePortMap.put(c.getName(), portNum); - currentServicePortNum++; + String containerServiceKey = getPredictorServiceNameKey(c.getName()); + String containerServiceValue = getSeldonServiceName(mlDep, p, c.getName()); + metaBuilder.putLabels(containerServiceKey, containerServiceValue); + + int portNum; + if (servicePortMap.containsKey(c.getName())) + portNum = servicePortMap.get(c.getName()); + else + { + portNum = currentServicePortNum; + servicePortMap.put(c.getName(), portNum); + currentServicePortNum++; + } + c2 = this.updateContainer(c, findPredictiveUnitForContainer(mlDep.getSpec().getPredictors(pbIdx).getGraph(),c.getName()),portNum,deploymentName,predictorName); + updatePredictiveUnitBuilderByName(mlBuilder.getSpecBuilder().getPredictorsBuilder(pbIdx).getGraphBuilder(),c2,containerServiceValue); } - V1.Container c2 = this.updateContainer(c, findPredictiveUnitForContainer(mlDep.getSpec().getPredictors(pbIdx).getGraph(),c.getName()),portNum,deploymentName,predictorName); + else + c2 = c; mlBuilder.getSpecBuilder().getPredictorsBuilder(pbIdx).getComponentSpecsBuilder(ptsIdx).getSpecBuilder().addContainers(cIdx, c2); - updatePredictiveUnitBuilderByName(mlBuilder.getSpecBuilder().getPredictorsBuilder(pbIdx).getGraphBuilder(),c2,containerServiceValue); } mlBuilder.getSpecBuilder().getPredictorsBuilder(pbIdx).getComponentSpecsBuilder(ptsIdx).setMetadata(metaBuilder); } } - if (!mlBuilder.hasStatus()) - { - mlBuilder.getStatusBuilder().setState(Constants.STATE_CREATING); - } - return mlBuilder.build(); } @@ -504,6 +518,26 @@ private String getAmbassadorAnnotation(SeldonDeployment mlDep,String serviceName return restMapping + grpcMapping; } + /** + * + * @param pu - A predictiveUnit + * @param container - a container + * @return True if container name can be found in graph of pu + */ + private boolean isContainerInGraph(PredictiveUnit pu,V1.Container container) + { + if (pu.getName().equals(container.getName())) + { + return true; + } + else + { + for(int i=0;i deployments, List services) } + } diff --git a/docs/wrappers/nodejs.md b/docs/wrappers/nodejs.md index 4eb5fe4c8f..e09826cf2c 100644 --- a/docs/wrappers/nodejs.md +++ b/docs/wrappers/nodejs.md @@ -136,13 +136,14 @@ The name of the JS file containing the model. ### API_TYPE -API type to create. Can be REST only at present. +API type to create. Can be REST or GRPC. ### SERVICE_TYPE The service type being created. Available options are: - MODEL +- TRANSFORMER ### PERSISTENCE diff --git a/examples/models/nodejs_mnist/.s2i/environment_grpc b/examples/models/nodejs_mnist/.s2i/environment_grpc new file mode 100644 index 0000000000..cd38583149 --- /dev/null +++ b/examples/models/nodejs_mnist/.s2i/environment_grpc @@ -0,0 +1,4 @@ +MODEL_NAME=MnistClassifier.js +API_TYPE=GRPC +SERVICE_TYPE=MODEL +PERSISTENCE=0 diff --git a/examples/models/nodejs_mnist/nodejs_mnist.ipynb b/examples/models/nodejs_mnist/nodejs_mnist.ipynb index e2c170caea..04dc33f85a 100644 --- a/examples/models/nodejs_mnist/nodejs_mnist.ipynb +++ b/examples/models/nodejs_mnist/nodejs_mnist.ipynb @@ -51,7 +51,7 @@ "metadata": {}, "outputs": [], "source": [ - "!s2i build . seldonio/seldon-core-s2i-nodejs:0.1 node-s2i-mnist-model:0.1" + "!s2i build . seldonio/seldon-core-s2i-nodejs:0.2-SNAPSHOT node-s2i-mnist-model:0.1" ] }, { @@ -97,6 +97,65 @@ "!docker rm nodejs_mnist_predictor --force" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prediction using GRPC API on the docker container" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!s2i build -E ./.s2i/environment_grpc . seldonio/seldon-core-s2i-nodejs:0.2-SNAPSHOT node-s2i-mnist-model:0.2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!docker run --name \"nodejs_mnist_predictor\" -d --rm -p 5000:5000 node-s2i-mnist-model:0.2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!cd ../../../wrappers/testing && make build_protos" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Send some random features that conform to the contract" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!python ../../../wrappers/testing/tester.py contract.json 0.0.0.0 5000 -p -t --grpc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!docker rm nodejs_mnist_predictor --force" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -147,7 +206,7 @@ "metadata": {}, "outputs": [], "source": [ - "!eval $(minikube docker-env) && s2i build . seldonio/seldon-core-s2i-nodejs:0.1 node-s2i-mnist-model:0.1" + "!eval $(minikube docker-env) && s2i build . seldonio/seldon-core-s2i-nodejs:0.2-SNAPSHOT node-s2i-mnist-model:0.1" ] }, { diff --git a/examples/models/nodejs_tensorflow/.s2i/environment_grpc b/examples/models/nodejs_tensorflow/.s2i/environment_grpc new file mode 100644 index 0000000000..b50695cae7 --- /dev/null +++ b/examples/models/nodejs_tensorflow/.s2i/environment_grpc @@ -0,0 +1,4 @@ +MODEL_NAME=MyModel.js +API_TYPE=GRPC +SERVICE_TYPE=MODEL +PERSISTENCE=0 diff --git a/examples/models/nodejs_tensorflow/nodejs_tensorflow.ipynb b/examples/models/nodejs_tensorflow/nodejs_tensorflow.ipynb index bfbe083a45..f2135949d3 100644 --- a/examples/models/nodejs_tensorflow/nodejs_tensorflow.ipynb +++ b/examples/models/nodejs_tensorflow/nodejs_tensorflow.ipynb @@ -50,7 +50,7 @@ "metadata": {}, "outputs": [], "source": [ - "!s2i build . seldonio/seldon-core-s2i-nodejs:0.1 node-s2i-model-image:0.1" + "!s2i build . seldonio/seldon-core-s2i-nodejs:0.2-SNAPSHOT node-s2i-model-image:0.1" ] }, { @@ -96,6 +96,72 @@ "!docker rm nodejs_tensorflow_predictor --force" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prediction using GRPC API on the docker container" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!s2i build -E ./.s2i/environment_grpc . seldonio/seldon-core-s2i-nodejs:0.2-SNAPSHOT node-s2i-model-image:0.2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!docker run --name \"nodejs_tensorflow_predictor\" -d --rm -p 5000:5000 node-s2i-model-image:0.2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!cd ../../../wrappers/testing && make build_protos" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Send some random features that conform to the contract" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!python ../../../wrappers/testing/tester.py contract.json 0.0.0.0 5000 -p -t --grpc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!docker rm nodejs_tensorflow_predictor --force" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prediction using Minikube" + ] + }, { "cell_type": "code", "execution_count": null, @@ -139,7 +205,7 @@ "metadata": {}, "outputs": [], "source": [ - "!eval $(minikube docker-env) && s2i build . seldonio/seldon-core-s2i-nodejs:0.1 node-s2i-model-image:0.1" + "!eval $(minikube docker-env) && s2i build . seldonio/seldon-core-s2i-nodejs:0.2-SNAPSHOT node-s2i-model-image:0.1" ] }, { diff --git a/examples/models/nvidia-mnist/.s2i/environment b/examples/models/nvidia-mnist/.s2i/environment new file mode 100644 index 0000000000..752bf2fd05 --- /dev/null +++ b/examples/models/nvidia-mnist/.s2i/environment @@ -0,0 +1,4 @@ +MODEL_NAME=MnistTransformer +API_TYPE=REST +SERVICE_TYPE=TRANSFORMER +PERSISTENCE=0 diff --git a/examples/models/nvidia-mnist/Makefile b/examples/models/nvidia-mnist/Makefile new file mode 100644 index 0000000000..25e5552595 --- /dev/null +++ b/examples/models/nvidia-mnist/Makefile @@ -0,0 +1,14 @@ +TRANSFORMER_IMAGE=seldonio/mnist-caffe2-transformer:0.1 + +clean: + rm -f rm -f tensorrt_mnist/1/model.plan + rm -rf MNIST_data + rm -f mnist.json + rm -f tmp.json + +build_transformer: + s2i build . seldonio/seldon-core-s2i-python3:0.2 ${TRANSFORMER_IMAGE} + +push_transformer: + docker push ${TRANSFORMER_IMAGE} + diff --git a/examples/models/nvidia-mnist/MnistTransformer.py b/examples/models/nvidia-mnist/MnistTransformer.py new file mode 100644 index 0000000000..84b097a077 --- /dev/null +++ b/examples/models/nvidia-mnist/MnistTransformer.py @@ -0,0 +1,27 @@ +import numpy as np + +MEANS=np.array([255.0,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,254,254,254,253,252,252,251,251,252,252,253,254,254,255,255,255,255,255,255,255,255,255,255,255,255,255,254,254,253,251,249,248,245,243,242,242,243,246,248,251,253,254,255,255,255,255,255,255,255,255,255,255,255,254,253,250,247,242,235,228,220,213,210,211,216,224,232,240,246,251,253,254,255,255,255,255,255,255,255,255,254,251,248,242,234,223,211,196,181,170,164,166,175,189,205,221,233,243,248,252,254,255,255,255,255,255,255,254,252,248,241,231,217,202,184,166,149,136,131,134,143,159,180,201,220,234,243,249,253,255,255,255,255,255,254,253,249,243,233,219,201,181,161,143,130,122,120,122,129,141,161,185,208,227,240,248,252,254,255,255,255,255,254,251,246,238,226,208,187,164,146,135,131,132,133,132,133,139,154,178,202,223,239,248,252,255,255,255,255,254,253,251,245,236,221,200,177,156,144,144,150,156,156,151,144,144,156,178,202,224,240,249,253,255,255,255,255,254,253,251,245,235,218,195,172,155,152,161,172,176,170,161,150,149,161,183,207,227,242,250,254,255,255,255,255,255,254,251,246,234,215,191,168,156,160,173,182,179,169,157,147,149,166,190,213,230,243,251,254,255,255,255,255,255,254,252,246,233,212,186,165,157,164,175,176,165,153,142,137,147,170,196,217,231,242,251,255,255,255,255,255,255,254,252,245,230,207,182,163,158,164,168,158,143,131,125,128,146,174,200,218,231,241,250,254,255,255,255,255,255,255,252,243,227,205,181,164,159,161,157,139,124,115,118,127,148,176,199,216,230,240,249,254,255,255,255,255,255,254,251,241,224,204,184,169,163,160,150,132,119,116,123,133,153,177,197,214,228,240,249,254,255,255,255,255,255,254,251,239,222,205,189,177,171,166,154,139,129,128,134,144,159,177,195,213,228,241,249,254,255,255,255,255,255,254,249,237,222,207,195,186,180,175,166,153,143,140,142,150,162,178,195,214,230,242,250,254,255,255,255,255,255,253,247,235,220,207,197,189,183,179,172,160,148,142,143,150,161,178,198,217,233,244,250,254,255,255,255,255,255,253,246,233,218,204,192,184,177,172,165,153,142,137,139,148,163,183,204,222,236,246,251,254,255,255,255,255,255,253,247,234,218,201,186,174,165,157,148,137,130,129,137,151,171,194,214,230,242,248,252,254,255,255,255,255,255,253,249,238,222,203,184,168,154,143,132,124,123,130,145,165,188,209,227,239,247,251,253,255,255,255,255,255,255,254,251,244,232,214,194,174,156,142,132,130,134,148,167,189,210,226,238,246,250,253,254,255,255,255,255,255,255,255,253,250,243,231,215,196,178,163,155,156,164,179,197,215,230,240,247,251,253,254,255,255,255,255,255,255,255,255,254,253,251,246,238,228,217,208,203,204,210,218,228,236,243,248,251,253,254,255,255,255,255,255,255,255,255,255,255,255,254,252,249,245,241,238,237,237,239,242,245,247,250,252,253,254,255,255,255,255,255,255,255,255,255,255,255,255,254,254,253,252,250,249,248,249,249,250,252,253,253,254,254,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,254,254,254,254,255,255,255,255,255,255,255,255,255,255,255,255]) + + +class MnistTransformer(object): + + def __init__(self): + print("init"); + + def preProcessMNIST(self,X): + ''' + Convert values assumed to be in 0-1 range to a value in 0-255. + The remove the training mean needed by the Caffe2 model. + Finally reshape the output to that expected by the model + ''' + X = X * 255 + X = 255 - X + X = (X.reshape(784) - MEANS).reshape(28,28,1) + X = np.transpose(X, (2, 0, 1)) + return X + + def transform_input(self,X,names): + return self.preProcessMNIST(X) + + def transform_output(self,X,names): + return X.reshape(1,10) diff --git a/examples/models/nvidia-mnist/fetch-model.sh b/examples/models/nvidia-mnist/fetch-model.sh new file mode 100755 index 0000000000..6b298153f6 --- /dev/null +++ b/examples/models/nvidia-mnist/fetch-model.sh @@ -0,0 +1 @@ +wget -O mnist_tensorrt_model/1/model.plan http://seldon-public.s3.amazonaws.com/nvidia-mnist-model/model.plan diff --git a/examples/models/nvidia-mnist/nvidia-mnist/Chart.yaml b/examples/models/nvidia-mnist/nvidia-mnist/Chart.yaml new file mode 100644 index 0000000000..cc00617700 --- /dev/null +++ b/examples/models/nvidia-mnist/nvidia-mnist/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +description: Seldon MNIST Nvidia Inference Server Example +name: nvidia-mnist +sources: +- https://github.com/SeldonIO/seldon-core +version: 0.1 diff --git a/examples/models/nvidia-mnist/nvidia-mnist/templates/mnist_nvidia_deployment.json b/examples/models/nvidia-mnist/nvidia-mnist/templates/mnist_nvidia_deployment.json new file mode 100644 index 0000000000..97ce30c6fe --- /dev/null +++ b/examples/models/nvidia-mnist/nvidia-mnist/templates/mnist_nvidia_deployment.json @@ -0,0 +1,135 @@ +{ + "apiVersion": "machinelearning.seldon.io/v1alpha2", + "kind": "SeldonDeployment", + "metadata": { + "labels": { + "app": "seldon" + }, + "name": "nvidia-mnist", + "namespace": "{{ .Release.Namespace }}" + }, + "spec": { + "name": "caffe2-mnist", + "predictors": [ + { + "componentSpecs": [{ + "spec": { + "containers": [ + { + "image": "seldonio/mnist-caffe2-transformer:0.1", + "name": "mnist-transformer" + }, + { + "image": "seldonio/nvidia-inference-server-proxy:0.1", + "name": "nvidia-proxy" + }, + { + "args": [ + "--model-store={{ .Values.nvidia.model_store }}" + ], + "command": [ + "inference_server" + ], + "image": "nvcr.io/nvidia/inferenceserver:18.08.1-py2", + "livenessProbe": { + "failureThreshold": 3, + "handler":{ + "httpGet": { + "path": "/api/health/live", + "port": {{ .Values.nvidia.port }}, + "scheme": "HTTP" + } + }, + "initialDelaySeconds": 5, + "periodSeconds": 5, + "successThreshold": 1, + "timeoutSeconds": 1 + }, + "name": "inference-server", + "ports": [ + { + "containerPort": {{ .Values.nvidia.port }}, + "protocol": "TCP" + }, + { + "containerPort": 8001, + "protocol": "TCP" + }, + { + "containerPort": 8002, + "protocol": "TCP" + } + ], + "readinessProbe": { + "failureThreshold": 3, + "handler":{ + "httpGet": { + "path": "/api/health/ready", + "port": {{ .Values.nvidia.port }}, + "scheme": "HTTP" + } + }, + "initialDelaySeconds": 5, + "periodSeconds": 5, + "successThreshold": 1, + "timeoutSeconds": 1 + }, + "resources": { + "limits": { + "nvidia.com/gpu": "1" + }, + "requests": { + "cpu": "100m", + "nvidia.com/gpu": "1" + } + }, + "securityContext": { + "runAsUser": 1000 + } + } + ], + "terminationGracePeriodSeconds": 1, + "imagePullSecrets": [ + { + "name": "ngc" + } + ] + } + }], + "graph": { + "name": "mnist-transformer", + "endpoint": { "type" : "REST" }, + "type": "TRANSFORMER", + "children": [ + { + "name": "nvidia-proxy", + "endpoint": { "type" : "REST" }, + "type": "MODEL", + "children": [], + "parameters": + [ + { + "name":"url", + "type":"STRING", + "value":"127.0.0.1:{{ .Values.nvidia.port }}" + }, + { + "name":"model_name", + "type":"STRING", + "value":"tensorrt_mnist" + }, + { + "name":"protocol", + "type":"STRING", + "value":"HTTP" + } + ] + } + ] + }, + "name": "mnist-nvidia", + "replicas": 1 + } + ] + } +} diff --git a/examples/models/nvidia-mnist/nvidia-mnist/values.yaml b/examples/models/nvidia-mnist/nvidia-mnist/values.yaml new file mode 100644 index 0000000000..adaff98e15 --- /dev/null +++ b/examples/models/nvidia-mnist/nvidia-mnist/values.yaml @@ -0,0 +1,4 @@ +nvidia: + model_store: gs://seldon-inference-server-model-store + port: 8000 + diff --git a/examples/models/nvidia-mnist/nvidia_mnist.ipynb b/examples/models/nvidia-mnist/nvidia_mnist.ipynb new file mode 100644 index 0000000000..c20b80d5f8 --- /dev/null +++ b/examples/models/nvidia-mnist/nvidia_mnist.ipynb @@ -0,0 +1,1361 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Seldon and NVIDIA Inference Server MNIST Example\n", + "\n", + "This example shows how you can combine Seldon with the NVIDIA Inference Server. We will use a Seldon TensorRT proxy model image that will forward Seldon internal microservice prediction calls out to an external TensorRT Inference Server.\n", + "\n", + "The example will use the MNIST digit classification task with a pre-trained CAFFE2 model.\n", + "\n", + "A Seldon transformer will transform the inputs before sending to the Proxy which will forward the request to the Nvidia Inference Server.\n", + "\n", + "This example will:\n", + "\n", + " * Show the packaging of the components using S2I and a step by step local testing of these via Docker\n", + " * Show running the example in Seldon Core on GCP with an embedded Nvidia Inference Server\n", + " " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Setup" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/clive/anaconda3/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", + " from ._conv import register_converters as _register_converters\n" + ] + } + ], + "source": [ + "%matplotlib inline\n", + "import requests\n", + "from random import randint,random\n", + "import json\n", + "from matplotlib import pyplot as plt\n", + "import numpy as np\n", + "from tensorflow.examples.tutorials.mnist import input_data\n", + "import sys\n", + "sys.path.append(\"../../../notebooks\")\n", + "from visualizer import get_graph" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "def gen_image(arr):\n", + " two_d = (np.reshape(arr, (28, 28)) * 255).astype(np.uint8)\n", + " plt.imshow(two_d,cmap=plt.cm.gray_r, interpolation='nearest')\n", + " return plt\n", + "\n", + "def download_mnist():\n", + " return input_data.read_data_sets(\"MNIST_data/\", one_hot = True)\n", + "\n", + "def rest_predict_request(endpoint,data):\n", + " request = {\"data\":{\"ndarray\":data.tolist()}}\n", + " response = requests.post(\n", + " \"http://\"+endpoint+\"/predict\",\n", + " data={\"json\":json.dumps(request),\"isDefault\":True})\n", + " return response.json() \n", + "\n", + "def rest_transform_input_request(endpoint,data):\n", + " request = {\"data\":{\"ndarray\":data.tolist()}}\n", + " response = requests.post(\n", + " \"http://\"+endpoint+\"/transform-input\",\n", + " data={\"json\":json.dumps(request),\"isDefault\":True})\n", + " return response.json() \n", + "\n", + "def rest_transform_output_request(endpoint,data):\n", + " request = {\"data\":{\"ndarray\":data.tolist()}}\n", + " response = requests.post(\n", + " \"http://\"+endpoint+\"/transform-output\",\n", + " data={\"json\":json.dumps(request),\"isDefault\":True})\n", + " return response.json() \n", + "\n", + "def rest_request_ambassador(deploymentName,endpoint=\"localhost:8003\",arr=None):\n", + " payload = {\"data\":{\"names\":[\"a\",\"b\"],\"tensor\":{\"shape\":[1,784],\"values\":arr.tolist()}}}\n", + " response = requests.post(\n", + " \"http://\"+endpoint+\"/seldon/\"+deploymentName+\"/api/v0.1/predictions\",\n", + " json=payload)\n", + " print(response.status_code)\n", + " print(response.text)\n", + "\n", + "\n", + "def gen_mnist_data(mnist):\n", + " batch_xs, batch_ys = mnist.train.next_batch(1)\n", + " chosen=0\n", + " gen_image(batch_xs[chosen]).show()\n", + " data = batch_xs[chosen].reshape((1,784))\n", + " return data\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:From :7: read_data_sets (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:260: maybe_download (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please write your own downloading logic.\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/base.py:252: _internal_retry..wrap..wrapped_fn (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use urllib or similar directly.\n", + "Successfully downloaded train-images-idx3-ubyte.gz 9912422 bytes.\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:262: extract_images (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use tf.data to implement this functionality.\n", + "Extracting MNIST_data/train-images-idx3-ubyte.gz\n", + "Successfully downloaded train-labels-idx1-ubyte.gz 28881 bytes.\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:267: extract_labels (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use tf.data to implement this functionality.\n", + "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:110: dense_to_one_hot (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use tf.one_hot on tensors.\n", + "Successfully downloaded t10k-images-idx3-ubyte.gz 1648877 bytes.\n", + "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", + "Successfully downloaded t10k-labels-idx1-ubyte.gz 4542 bytes.\n", + "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:290: DataSet.__init__ (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n" + ] + } + ], + "source": [ + "mnist = download_mnist()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Create an Nvidia Model Repository" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Fetch pretrained MNIST model ready for serving and place in model repository" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2018-09-20 14:18:52-- http://seldon-public.s3.amazonaws.com/nvidia-mnist-model/model.plan\n", + "Resolving seldon-public.s3.amazonaws.com (seldon-public.s3.amazonaws.com)... 52.218.48.249\n", + "Connecting to seldon-public.s3.amazonaws.com (seldon-public.s3.amazonaws.com)|52.218.48.249|:80... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 1731864 (1.7M) [binary/octet-stream]\n", + "Saving to: ‘tensorrt_mnist/1/model.plan’\n", + "\n", + "tensorrt_mnist/1/mo 100%[===================>] 1.65M 148KB/s in 12s \n", + "\n", + "2018-09-20 14:19:05 (136 KB/s) - ‘tensorrt_mnist/1/model.plan’ saved [1731864/1731864]\n", + "\n" + ] + } + ], + "source": [ + "!wget -O tensorrt_mnist/1/model.plan http://seldon-public.s3.amazonaws.com/nvidia-mnist-model/model.plan" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To run your Mvidia Inference Server you will need to upload to a google bucket the model repository in mnsit_tensorrt_model. Follow the steps below:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: MODEL_REPOSITORY_BUCKET=gs://seldon-inference-server-model-store\n" + ] + } + ], + "source": [ + "# CHANGE THIS TO YOUR OWN CHOSEN GOOGLE BUCKET NAME\n", + "%env MODEL_REPOSITORY_BUCKET=gs://seldon-inference-server-model-store" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Creating gs://seldon-inference-server-model-store/...\n", + "ServiceException: 409 Bucket seldon-inference-server-model-store already exists.\n" + ] + } + ], + "source": [ + "!gsutil mb ${MODEL_REPOSITORY_BUCKET}" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Copying file://tensorrt_mnist/config.pbtxt [Content-Type=application/octet-stream]...\n", + "Copying file://tensorrt_mnist/config.pbtxt~ [Content-Type=application/octet-stream]...\n", + "Copying file://tensorrt_mnist/mnist_labels.txt [Content-Type=text/plain]... \n", + "Copying file://tensorrt_mnist/1/model.plan [Content-Type=application/octet-stream]...\n", + "\\ [4 files][ 1.7 MiB/ 1.7 MiB] \n", + "Operation completed over 4 objects/1.7 MiB. \n" + ] + } + ], + "source": [ + "!gsutil cp -r tensorrt_mnist ${MODEL_REPOSITORY_BUCKET}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Test Locally From Docker with Standalone NVIDIA Inference Server" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create your Nvidia Inference Server\n", + "\n", + "For example:\n", + "\n", + " * Follow the steps in the [Kubeflow guide](https://github.com/kubeflow/kubeflow/tree/master/kubeflow/nvidia-inference-server) to create your Nvidia Inference Server \n", + " * You will need to use the Google Bucket location" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Package and run a Transformer and Nvidia Proxy" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We will use a Seldon Transformer to remove the training-set mean values from the input features and rehsape the output as the prediction comes back." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[34mimport\u001b[39;49;00m \u001b[04m\u001b[36mnumpy\u001b[39;49;00m \u001b[34mas\u001b[39;49;00m \u001b[04m\u001b[36mnp\u001b[39;49;00m\r\n", + "\r\n", + "MEANS=np.array([\u001b[34m255.0\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m248\u001b[39;49;00m,\u001b[34m245\u001b[39;49;00m,\u001b[34m243\u001b[39;49;00m,\u001b[34m242\u001b[39;49;00m,\u001b[34m242\u001b[39;49;00m,\u001b[34m243\u001b[39;49;00m,\u001b[34m246\u001b[39;49;00m,\u001b[34m248\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m250\u001b[39;49;00m,\u001b[34m247\u001b[39;49;00m,\u001b[34m242\u001b[39;49;00m,\u001b[34m235\u001b[39;49;00m,\u001b[34m228\u001b[39;49;00m,\u001b[34m220\u001b[39;49;00m,\u001b[34m213\u001b[39;49;00m,\u001b[34m210\u001b[39;49;00m,\u001b[34m211\u001b[39;49;00m,\u001b[34m216\u001b[39;49;00m,\u001b[34m224\u001b[39;49;00m,\u001b[34m232\u001b[39;49;00m,\u001b[34m240\u001b[39;49;00m,\u001b[34m246\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m248\u001b[39;49;00m,\u001b[34m242\u001b[39;49;00m,\u001b[34m234\u001b[39;49;00m,\u001b[34m223\u001b[39;49;00m,\u001b[34m211\u001b[39;49;00m,\u001b[34m196\u001b[39;49;00m,\u001b[34m181\u001b[39;49;00m,\u001b[34m170\u001b[39;49;00m,\u001b[34m164\u001b[39;49;00m,\u001b[34m166\u001b[39;49;00m,\u001b[34m175\u001b[39;49;00m,\u001b[34m189\u001b[39;49;00m,\u001b[34m205\u001b[39;49;00m,\u001b[34m221\u001b[39;49;00m,\u001b[34m233\u001b[39;49;00m,\u001b[34m243\u001b[39;49;00m,\u001b[34m248\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m248\u001b[39;49;00m,\u001b[34m241\u001b[39;49;00m,\u001b[34m231\u001b[39;49;00m,\u001b[34m217\u001b[39;49;00m,\u001b[34m202\u001b[39;49;00m,\u001b[34m184\u001b[39;49;00m,\u001b[34m166\u001b[39;49;00m,\u001b[34m149\u001b[39;49;00m,\u001b[34m136\u001b[39;49;00m,\u001b[34m131\u001b[39;49;00m,\u001b[34m134\u001b[39;49;00m,\u001b[34m143\u001b[39;49;00m,\u001b[34m159\u001b[39;49;00m,\u001b[34m180\u001b[39;49;00m,\u001b[34m201\u001b[39;49;00m,\u001b[34m220\u001b[39;49;00m,\u001b[34m234\u001b[39;49;00m,\u001b[34m243\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m243\u001b[39;49;00m,\u001b[34m233\u001b[39;49;00m,\u001b[34m219\u001b[39;49;00m,\u001b[34m201\u001b[39;49;00m,\u001b[34m181\u001b[39;49;00m,\u001b[34m161\u001b[39;49;00m,\u001b[34m143\u001b[39;49;00m,\u001b[34m130\u001b[39;49;00m,\u001b[34m122\u001b[39;49;00m,\u001b[34m120\u001b[39;49;00m,\u001b[34m122\u001b[39;49;00m,\u001b[34m129\u001b[39;49;00m,\u001b[34m141\u001b[39;49;00m,\u001b[34m161\u001b[39;49;00m,\u001b[34m185\u001b[39;49;00m,\u001b[34m208\u001b[39;49;00m,\u001b[34m227\u001b[39;49;00m,\u001b[34m240\u001b[39;49;00m,\u001b[34m248\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m246\u001b[39;49;00m,\u001b[34m238\u001b[39;49;00m,\u001b[34m226\u001b[39;49;00m,\u001b[34m208\u001b[39;49;00m,\u001b[34m187\u001b[39;49;00m,\u001b[34m164\u001b[39;49;00m,\u001b[34m146\u001b[39;49;00m,\u001b[34m135\u001b[39;49;00m,\u001b[34m131\u001b[39;49;00m,\u001b[34m132\u001b[39;49;00m,\u001b[34m133\u001b[39;49;00m,\u001b[34m132\u001b[39;49;00m,\u001b[34m133\u001b[39;49;00m,\u001b[34m139\u001b[39;49;00m,\u001b[34m154\u001b[39;49;00m,\u001b[34m178\u001b[39;49;00m,\u001b[34m202\u001b[39;49;00m,\u001b[34m223\u001b[39;49;00m,\u001b[34m239\u001b[39;49;00m,\u001b[34m248\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m245\u001b[39;49;00m,\u001b[34m236\u001b[39;49;00m,\u001b[34m221\u001b[39;49;00m,\u001b[34m200\u001b[39;49;00m,\u001b[34m177\u001b[39;49;00m,\u001b[34m156\u001b[39;49;00m,\u001b[34m144\u001b[39;49;00m,\u001b[34m144\u001b[39;49;00m,\u001b[34m150\u001b[39;49;00m,\u001b[34m156\u001b[39;49;00m,\u001b[34m156\u001b[39;49;00m,\u001b[34m151\u001b[39;49;00m,\u001b[34m144\u001b[39;49;00m,\u001b[34m144\u001b[39;49;00m,\u001b[34m156\u001b[39;49;00m,\u001b[34m178\u001b[39;49;00m,\u001b[34m202\u001b[39;49;00m,\u001b[34m224\u001b[39;49;00m,\u001b[34m240\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m245\u001b[39;49;00m,\u001b[34m235\u001b[39;49;00m,\u001b[34m218\u001b[39;49;00m,\u001b[34m195\u001b[39;49;00m,\u001b[34m172\u001b[39;49;00m,\u001b[34m155\u001b[39;49;00m,\u001b[34m152\u001b[39;49;00m,\u001b[34m161\u001b[39;49;00m,\u001b[34m172\u001b[39;49;00m,\u001b[34m176\u001b[39;49;00m,\u001b[34m170\u001b[39;49;00m,\u001b[34m161\u001b[39;49;00m,\u001b[34m150\u001b[39;49;00m,\u001b[34m149\u001b[39;49;00m,\u001b[34m161\u001b[39;49;00m,\u001b[34m183\u001b[39;49;00m,\u001b[34m207\u001b[39;49;00m,\u001b[34m227\u001b[39;49;00m,\u001b[34m242\u001b[39;49;00m,\u001b[34m250\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m246\u001b[39;49;00m,\u001b[34m234\u001b[39;49;00m,\u001b[34m215\u001b[39;49;00m,\u001b[34m191\u001b[39;49;00m,\u001b[34m168\u001b[39;49;00m,\u001b[34m156\u001b[39;49;00m,\u001b[34m160\u001b[39;49;00m,\u001b[34m173\u001b[39;49;00m,\u001b[34m182\u001b[39;49;00m,\u001b[34m179\u001b[39;49;00m,\u001b[34m169\u001b[39;49;00m,\u001b[34m157\u001b[39;49;00m,\u001b[34m147\u001b[39;49;00m,\u001b[34m149\u001b[39;49;00m,\u001b[34m166\u001b[39;49;00m,\u001b[34m190\u001b[39;49;00m,\u001b[34m213\u001b[39;49;00m,\u001b[34m230\u001b[39;49;00m,\u001b[34m243\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m246\u001b[39;49;00m,\u001b[34m233\u001b[39;49;00m,\u001b[34m212\u001b[39;49;00m,\u001b[34m186\u001b[39;49;00m,\u001b[34m165\u001b[39;49;00m,\u001b[34m157\u001b[39;49;00m,\u001b[34m164\u001b[39;49;00m,\u001b[34m175\u001b[39;49;00m,\u001b[34m176\u001b[39;49;00m,\u001b[34m165\u001b[39;49;00m,\u001b[34m153\u001b[39;49;00m,\u001b[34m142\u001b[39;49;00m,\u001b[34m137\u001b[39;49;00m,\u001b[34m147\u001b[39;49;00m,\u001b[34m170\u001b[39;49;00m,\u001b[34m196\u001b[39;49;00m,\u001b[34m217\u001b[39;49;00m,\u001b[34m231\u001b[39;49;00m,\u001b[34m242\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m245\u001b[39;49;00m,\u001b[34m230\u001b[39;49;00m,\u001b[34m207\u001b[39;49;00m,\u001b[34m182\u001b[39;49;00m,\u001b[34m163\u001b[39;49;00m,\u001b[34m158\u001b[39;49;00m,\u001b[34m164\u001b[39;49;00m,\u001b[34m168\u001b[39;49;00m,\u001b[34m158\u001b[39;49;00m,\u001b[34m143\u001b[39;49;00m,\u001b[34m131\u001b[39;49;00m,\u001b[34m125\u001b[39;49;00m,\u001b[34m128\u001b[39;49;00m,\u001b[34m146\u001b[39;49;00m,\u001b[34m174\u001b[39;49;00m,\u001b[34m200\u001b[39;49;00m,\u001b[34m218\u001b[39;49;00m,\u001b[34m231\u001b[39;49;00m,\u001b[34m241\u001b[39;49;00m,\u001b[34m250\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m243\u001b[39;49;00m,\u001b[34m227\u001b[39;49;00m,\u001b[34m205\u001b[39;49;00m,\u001b[34m181\u001b[39;49;00m,\u001b[34m164\u001b[39;49;00m,\u001b[34m159\u001b[39;49;00m,\u001b[34m161\u001b[39;49;00m,\u001b[34m157\u001b[39;49;00m,\u001b[34m139\u001b[39;49;00m,\u001b[34m124\u001b[39;49;00m,\u001b[34m115\u001b[39;49;00m,\u001b[34m118\u001b[39;49;00m,\u001b[34m127\u001b[39;49;00m,\u001b[34m148\u001b[39;49;00m,\u001b[34m176\u001b[39;49;00m,\u001b[34m199\u001b[39;49;00m,\u001b[34m216\u001b[39;49;00m,\u001b[34m230\u001b[39;49;00m,\u001b[34m240\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m241\u001b[39;49;00m,\u001b[34m224\u001b[39;49;00m,\u001b[34m204\u001b[39;49;00m,\u001b[34m184\u001b[39;49;00m,\u001b[34m169\u001b[39;49;00m,\u001b[34m163\u001b[39;49;00m,\u001b[34m160\u001b[39;49;00m,\u001b[34m150\u001b[39;49;00m,\u001b[34m132\u001b[39;49;00m,\u001b[34m119\u001b[39;49;00m,\u001b[34m116\u001b[39;49;00m,\u001b[34m123\u001b[39;49;00m,\u001b[34m133\u001b[39;49;00m,\u001b[34m153\u001b[39;49;00m,\u001b[34m177\u001b[39;49;00m,\u001b[34m197\u001b[39;49;00m,\u001b[34m214\u001b[39;49;00m,\u001b[34m228\u001b[39;49;00m,\u001b[34m240\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m239\u001b[39;49;00m,\u001b[34m222\u001b[39;49;00m,\u001b[34m205\u001b[39;49;00m,\u001b[34m189\u001b[39;49;00m,\u001b[34m177\u001b[39;49;00m,\u001b[34m171\u001b[39;49;00m,\u001b[34m166\u001b[39;49;00m,\u001b[34m154\u001b[39;49;00m,\u001b[34m139\u001b[39;49;00m,\u001b[34m129\u001b[39;49;00m,\u001b[34m128\u001b[39;49;00m,\u001b[34m134\u001b[39;49;00m,\u001b[34m144\u001b[39;49;00m,\u001b[34m159\u001b[39;49;00m,\u001b[34m177\u001b[39;49;00m,\u001b[34m195\u001b[39;49;00m,\u001b[34m213\u001b[39;49;00m,\u001b[34m228\u001b[39;49;00m,\u001b[34m241\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m237\u001b[39;49;00m,\u001b[34m222\u001b[39;49;00m,\u001b[34m207\u001b[39;49;00m,\u001b[34m195\u001b[39;49;00m,\u001b[34m186\u001b[39;49;00m,\u001b[34m180\u001b[39;49;00m,\u001b[34m175\u001b[39;49;00m,\u001b[34m166\u001b[39;49;00m,\u001b[34m153\u001b[39;49;00m,\u001b[34m143\u001b[39;49;00m,\u001b[34m140\u001b[39;49;00m,\u001b[34m142\u001b[39;49;00m,\u001b[34m150\u001b[39;49;00m,\u001b[34m162\u001b[39;49;00m,\u001b[34m178\u001b[39;49;00m,\u001b[34m195\u001b[39;49;00m,\u001b[34m214\u001b[39;49;00m,\u001b[34m230\u001b[39;49;00m,\u001b[34m242\u001b[39;49;00m,\u001b[34m250\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m247\u001b[39;49;00m,\u001b[34m235\u001b[39;49;00m,\u001b[34m220\u001b[39;49;00m,\u001b[34m207\u001b[39;49;00m,\u001b[34m197\u001b[39;49;00m,\u001b[34m189\u001b[39;49;00m,\u001b[34m183\u001b[39;49;00m,\u001b[34m179\u001b[39;49;00m,\u001b[34m172\u001b[39;49;00m,\u001b[34m160\u001b[39;49;00m,\u001b[34m148\u001b[39;49;00m,\u001b[34m142\u001b[39;49;00m,\u001b[34m143\u001b[39;49;00m,\u001b[34m150\u001b[39;49;00m,\u001b[34m161\u001b[39;49;00m,\u001b[34m178\u001b[39;49;00m,\u001b[34m198\u001b[39;49;00m,\u001b[34m217\u001b[39;49;00m,\u001b[34m233\u001b[39;49;00m,\u001b[34m244\u001b[39;49;00m,\u001b[34m250\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m246\u001b[39;49;00m,\u001b[34m233\u001b[39;49;00m,\u001b[34m218\u001b[39;49;00m,\u001b[34m204\u001b[39;49;00m,\u001b[34m192\u001b[39;49;00m,\u001b[34m184\u001b[39;49;00m,\u001b[34m177\u001b[39;49;00m,\u001b[34m172\u001b[39;49;00m,\u001b[34m165\u001b[39;49;00m,\u001b[34m153\u001b[39;49;00m,\u001b[34m142\u001b[39;49;00m,\u001b[34m137\u001b[39;49;00m,\u001b[34m139\u001b[39;49;00m,\u001b[34m148\u001b[39;49;00m,\u001b[34m163\u001b[39;49;00m,\u001b[34m183\u001b[39;49;00m,\u001b[34m204\u001b[39;49;00m,\u001b[34m222\u001b[39;49;00m,\u001b[34m236\u001b[39;49;00m,\u001b[34m246\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m247\u001b[39;49;00m,\u001b[34m234\u001b[39;49;00m,\u001b[34m218\u001b[39;49;00m,\u001b[34m201\u001b[39;49;00m,\u001b[34m186\u001b[39;49;00m,\u001b[34m174\u001b[39;49;00m,\u001b[34m165\u001b[39;49;00m,\u001b[34m157\u001b[39;49;00m,\u001b[34m148\u001b[39;49;00m,\u001b[34m137\u001b[39;49;00m,\u001b[34m130\u001b[39;49;00m,\u001b[34m129\u001b[39;49;00m,\u001b[34m137\u001b[39;49;00m,\u001b[34m151\u001b[39;49;00m,\u001b[34m171\u001b[39;49;00m,\u001b[34m194\u001b[39;49;00m,\u001b[34m214\u001b[39;49;00m,\u001b[34m230\u001b[39;49;00m,\u001b[34m242\u001b[39;49;00m,\u001b[34m248\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m238\u001b[39;49;00m,\u001b[34m222\u001b[39;49;00m,\u001b[34m203\u001b[39;49;00m,\u001b[34m184\u001b[39;49;00m,\u001b[34m168\u001b[39;49;00m,\u001b[34m154\u001b[39;49;00m,\u001b[34m143\u001b[39;49;00m,\u001b[34m132\u001b[39;49;00m,\u001b[34m124\u001b[39;49;00m,\u001b[34m123\u001b[39;49;00m,\u001b[34m130\u001b[39;49;00m,\u001b[34m145\u001b[39;49;00m,\u001b[34m165\u001b[39;49;00m,\u001b[34m188\u001b[39;49;00m,\u001b[34m209\u001b[39;49;00m,\u001b[34m227\u001b[39;49;00m,\u001b[34m239\u001b[39;49;00m,\u001b[34m247\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m244\u001b[39;49;00m,\u001b[34m232\u001b[39;49;00m,\u001b[34m214\u001b[39;49;00m,\u001b[34m194\u001b[39;49;00m,\u001b[34m174\u001b[39;49;00m,\u001b[34m156\u001b[39;49;00m,\u001b[34m142\u001b[39;49;00m,\u001b[34m132\u001b[39;49;00m,\u001b[34m130\u001b[39;49;00m,\u001b[34m134\u001b[39;49;00m,\u001b[34m148\u001b[39;49;00m,\u001b[34m167\u001b[39;49;00m,\u001b[34m189\u001b[39;49;00m,\u001b[34m210\u001b[39;49;00m,\u001b[34m226\u001b[39;49;00m,\u001b[34m238\u001b[39;49;00m,\u001b[34m246\u001b[39;49;00m,\u001b[34m250\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m250\u001b[39;49;00m,\u001b[34m243\u001b[39;49;00m,\u001b[34m231\u001b[39;49;00m,\u001b[34m215\u001b[39;49;00m,\u001b[34m196\u001b[39;49;00m,\u001b[34m178\u001b[39;49;00m,\u001b[34m163\u001b[39;49;00m,\u001b[34m155\u001b[39;49;00m,\u001b[34m156\u001b[39;49;00m,\u001b[34m164\u001b[39;49;00m,\u001b[34m179\u001b[39;49;00m,\u001b[34m197\u001b[39;49;00m,\u001b[34m215\u001b[39;49;00m,\u001b[34m230\u001b[39;49;00m,\u001b[34m240\u001b[39;49;00m,\u001b[34m247\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m246\u001b[39;49;00m,\u001b[34m238\u001b[39;49;00m,\u001b[34m228\u001b[39;49;00m,\u001b[34m217\u001b[39;49;00m,\u001b[34m208\u001b[39;49;00m,\u001b[34m203\u001b[39;49;00m,\u001b[34m204\u001b[39;49;00m,\u001b[34m210\u001b[39;49;00m,\u001b[34m218\u001b[39;49;00m,\u001b[34m228\u001b[39;49;00m,\u001b[34m236\u001b[39;49;00m,\u001b[34m243\u001b[39;49;00m,\u001b[34m248\u001b[39;49;00m,\u001b[34m251\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m245\u001b[39;49;00m,\u001b[34m241\u001b[39;49;00m,\u001b[34m238\u001b[39;49;00m,\u001b[34m237\u001b[39;49;00m,\u001b[34m237\u001b[39;49;00m,\u001b[34m239\u001b[39;49;00m,\u001b[34m242\u001b[39;49;00m,\u001b[34m245\u001b[39;49;00m,\u001b[34m247\u001b[39;49;00m,\u001b[34m250\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m250\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m248\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m249\u001b[39;49;00m,\u001b[34m250\u001b[39;49;00m,\u001b[34m252\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m253\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m254\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m,\u001b[34m255\u001b[39;49;00m])\r\n", + "\r\n", + "\r\n", + "\u001b[34mclass\u001b[39;49;00m \u001b[04m\u001b[32mMnistTransformer\u001b[39;49;00m(\u001b[36mobject\u001b[39;49;00m):\r\n", + "\r\n", + " \u001b[34mdef\u001b[39;49;00m \u001b[32m__init__\u001b[39;49;00m(\u001b[36mself\u001b[39;49;00m):\r\n", + " \u001b[34mprint\u001b[39;49;00m(\u001b[33m\"\u001b[39;49;00m\u001b[33minit\u001b[39;49;00m\u001b[33m\"\u001b[39;49;00m);\r\n", + " \r\n", + " \u001b[34mdef\u001b[39;49;00m \u001b[32mpreProcessMNIST\u001b[39;49;00m(\u001b[36mself\u001b[39;49;00m,X):\r\n", + " \u001b[33m'''\u001b[39;49;00m\r\n", + "\u001b[33m Convert values assumed to be in 0-1 range to a value in 0-255.\u001b[39;49;00m\r\n", + "\u001b[33m The remove the training mean needed by the Caffe2 model.\u001b[39;49;00m\r\n", + "\u001b[33m Finally reshape the output to that expected by the model\u001b[39;49;00m\r\n", + "\u001b[33m '''\u001b[39;49;00m\r\n", + " X = X * \u001b[34m255\u001b[39;49;00m\r\n", + " X = \u001b[34m255\u001b[39;49;00m - X\r\n", + " X = (X.reshape(\u001b[34m784\u001b[39;49;00m) - MEANS).reshape(\u001b[34m28\u001b[39;49;00m,\u001b[34m28\u001b[39;49;00m,\u001b[34m1\u001b[39;49;00m)\r\n", + " X = np.transpose(X, (\u001b[34m2\u001b[39;49;00m, \u001b[34m0\u001b[39;49;00m, \u001b[34m1\u001b[39;49;00m))\r\n", + " \u001b[34mreturn\u001b[39;49;00m X\r\n", + "\r\n", + " \u001b[34mdef\u001b[39;49;00m \u001b[32mtransform_input\u001b[39;49;00m(\u001b[36mself\u001b[39;49;00m,X,names):\r\n", + " \u001b[34mreturn\u001b[39;49;00m \u001b[36mself\u001b[39;49;00m.preProcessMNIST(X)\r\n", + "\r\n", + " \u001b[34mdef\u001b[39;49;00m \u001b[32mtransform_output\u001b[39;49;00m(\u001b[36mself\u001b[39;49;00m,X,names):\r\n", + " \u001b[34mreturn\u001b[39;49;00m X.reshape(\u001b[34m1\u001b[39;49;00m,\u001b[34m10\u001b[39;49;00m)\r\n" + ] + } + ], + "source": [ + "!pygmentize MnistTransformer.py" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "---> Installing application source...\n", + "Build completed successfully\n" + ] + } + ], + "source": [ + "!s2i build . seldonio/seldon-core-s2i-python3:0.2 mnist-caffe2-transformer:0.1" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "f2ce59df93344847d070d4218d6bccb5a7d1dab48120967a2e213435a69dce9c\r\n" + ] + } + ], + "source": [ + "!docker run --name \"mnist-transformer\" -d --rm -p 5000:5000 mnist-caffe2-transformer:0.1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Run the Seldon Nvidia Inference Server Proxy Model.\n", + "\n", + "** CHANGE THE IP ADDRESS BELOW TO THAT OF YOUR RUNNING NVIDIA SERVER **" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "3bcca7450fadcbb8d54f90af0b40c32f26b875938b742451a4365ced0c5d3ccf\r\n" + ] + } + ], + "source": [ + "!docker run --name \"tensorrt-proxy\" -d --rm -p 5001:5001 \\\n", + " -e PREDICTIVE_UNIT_SERVICE_PORT=5001 \\\n", + " -e PREDICTIVE_UNIT_PARAMETERS='[{\"name\":\"url\",\"type\":\"STRING\",\"value\":\"35.204.115.6:8000\"},{\"name\":\"model_name\",\"type\":\"STRING\",\"value\":\"tensorrt_mnist\"},{\"name\":\"protocol\",\"type\":\"STRING\",\"value\":\"HTTP\"}]' \\\n", + " seldonio/nvidia-inference-server-proxy:0.1" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [], + "source": [ + "TRANSFORMER_URL=\"localhost:5000\"\n", + "PREDICTOR_URL=\"localhost:5001\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the next few steps we illustrate each step by step process and test that out on our running Docker containers." + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAC7dJREFUeJzt3V2IXIUZxvHnqdUb9ULJaBeNXRukKEKTMATBUiwS0SLEDxQDKRGC8UJRwYtKEBKQgpSq9aIKsVncgB8V1JoLaRUNWKGE3Yj40dgqstFtQjLBinolJm8v9kTWuDM7O3M+Jn3/P5CZOefszMvgf8/MnMkeR4QA5PODpgcA0AziB5IifiAp4geSIn4gKeIHkiJ+ICniB5IifiCpH9b5YMuWLYvx8fE6HxJIZWZmRkeOHHE/2w4Vv+2rJT0q6RRJf4qIB3ttPz4+runp6WEeEkAP7Xa7720Hftlv+xRJf5R0jaRLJK23fcmg9wegXsO8518j6aOI+Dgivpb0rKR15YwFoGrDxH+epE/n3Z4tln2H7c22p21PdzqdIR4OQJmGiX+hDxW+9++DI2J7RLQjot1qtYZ4OABlGib+WUnL590+X9KB4cYBUJdh4p+SdJHtC22fJukWSbvKGQtA1QY+1BcR39i+U9LfNHeobyIi3i9tMgCVGuo4f0S8LOnlkmYBUCO+3gskRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyQ11Fl6bc9I+lLSUUnfRES7jKFQn1tvvbXn+snJyZ7rH3jggZ7r77///qWOhJoMFX/hlxFxpIT7AVAjXvYDSQ0bf0h6xfZe25vLGAhAPYZ92X95RBywfY6kV21/EBFvzN+g+KWwWZIuuOCCIR8OQFmG2vNHxIHi8rCkFyWtWWCb7RHRjoh2q9Ua5uEAlGjg+G2fbvvM49clXSXpvbIGA1CtYV72nyvpRdvH7+fpiPhrKVMBqNzA8UfEx5J+VuIsGEHFL/euduzY0XP9pk2buq4bGxsbaCaUg0N9QFLEDyRF/EBSxA8kRfxAUsQPJFXGv+pDYvv37++5/vPPP++6jkN9zWLPDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJLXo3+23PSHpWkmHI+LSYtnZkv4saVzSjKSbI+K/1Y2Jqqxatarn+p07dw51/3v37u267uKLLx7qvjGcfvb8T0q6+oRl90l6LSIukvRacRvASWTR+CPiDUmfnbB4naTJ4vqkpOtKngtAxQZ9z39uRByUpOLynPJGAlCHyj/ws73Z9rTt6U6nU/XDAejToPEfsj0mScXl4W4bRsT2iGhHRLvVag34cADKNmj8uyRtLK5vlPRSOeMAqMui8dt+RtI/JP3U9qztTZIelLTW9oeS1ha3AZxEFj3OHxHru6y6suRZ0IC1a9dWev+vv/5613UbNmyo9LHRG9/wA5IifiAp4geSIn4gKeIHkiJ+ICniB5IifiAp4geSIn4gKeIHkiJ+ICniB5IifiAp4geSIn4gKeIHkiJ+ICniB5IifiAp4geSIn4gqUX/dDf+v83OzlZ6/zfccEOl94/BsecHkiJ+ICniB5IifiAp4geSIn4gKeIHklr0OL/tCUnXSjocEZcWy7ZJuk1Sp9hsS0S8XNWQqM6ePXsqvf8VK1ZUev8YXD97/iclXb3A8kciYmXxH+EDJ5lF44+INyR9VsMsAGo0zHv+O22/Y3vC9lmlTQSgFoPG/7ikFZJWSjoo6aFuG9rebHva9nSn0+m2GYCaDRR/RByKiKMRcUzSE5LW9Nh2e0S0I6LdarUGnRNAyQaK3/bYvJvXS3qvnHEA1KWfQ33PSLpC0jLbs5K2SrrC9kpJIWlG0u0VzgigAovGHxHrF1i8o4JZ0IAbb7yx5/qtW7fWNAnqxjf8gKSIH0iK+IGkiB9IiviBpIgfSIo/3Z3c1NRU0yOgIez5gaSIH0iK+IGkiB9IiviBpIgfSIr4gaQ4zp/c7t27mx4BDWHPDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8ktWj8tpfb3m17n+33bd9dLD/b9qu2Pywuz6p+XABl6WfP/42keyPiYkmXSbrD9iWS7pP0WkRcJOm14jaAk8Si8UfEwYh4q7j+paR9ks6TtE7SZLHZpKTrqhoSQPmW9J7f9rikVZL2SDo3Ig5Kc78gJJ1T9nAAqtN3/LbPkPS8pHsi4osl/Nxm29O2pzudziAzAqhAX/HbPlVz4T8VES8Uiw/ZHivWj0k6vNDPRsT2iGhHRLvVapUxM4AS9PNpvyXtkLQvIh6et2qXpI3F9Y2SXip/PABV6edPd18u6deS3rX9drFsi6QHJT1ne5OkTyTdVM2IGGWXXXZZz/XLly+vaRIs1aLxR8Sbktxl9ZXljgOgLnzDD0iK+IGkiB9IiviBpIgfSIr4gaQ4RTeGcvTo0Z7rP/jgg67r2u122eNgCdjzA0kRP5AU8QNJET+QFPEDSRE/kBTxA0lxnB9DmZqa6rn+scce67puYmKi7HGwBOz5gaSIH0iK+IGkiB9IiviBpIgfSIr4gaQ4zp/cXXfd1XP9zp07e67fsGFDz/Xbtm1b6kioCXt+ICniB5IifiAp4geSIn4gKeIHkiJ+IKlFj/PbXi5pp6QfSTomaXtEPGp7m6TbJHWKTbdExMtVDYpqrF69uuf6Y8eO1TQJ6tbPl3y+kXRvRLxl+0xJe22/Wqx7JCJ+X914AKqyaPwRcVDSweL6l7b3STqv6sEAVGtJ7/ltj0taJWlPsehO2+/YnrB9Vpef2Wx72vZ0p9NZaBMADeg7fttnSHpe0j0R8YWkxyWtkLRSc68MHlro5yJie0S0I6LdarVKGBlAGfqK3/apmgv/qYh4QZIi4lBEHI2IY5KekLSmujEBlG3R+G1b0g5J+yLi4XnLx+Ztdr2k98ofD0BV+vm0/3JJv5b0ru23i2VbJK23vVJSSJqRdHslEwKoRD+f9r8pyQus4pg+cBLjG35AUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJOWIqO/B7I6k/fMWLZN0pLYBlmZUZxvVuSRmG1SZs/04Ivr6e3m1xv+9B7enI6Ld2AA9jOpsozqXxGyDamo2XvYDSRE/kFTT8W9v+PF7GdXZRnUuidkG1chsjb7nB9Ccpvf8ABrSSPy2r7b9L9sf2b6viRm6sT1j+13bb9uebniWCduHbb83b9nZtl+1/WFxueBp0hqabZvt/xTP3du2f9XQbMtt77a9z/b7tu8uljf63PWYq5HnrfaX/bZPkfRvSWslzUqakrQ+Iv5Z6yBd2J6R1I6Ixo8J2/6FpK8k7YyIS4tlv5P0WUQ8WPziPCsifjMis22T9FXTZ24uTigzNv/M0pKuk3SrGnzuesx1sxp43prY86+R9FFEfBwRX0t6VtK6BuYYeRHxhqTPTli8TtJkcX1Sc//z1K7LbCMhIg5GxFvF9S8lHT+zdKPPXY+5GtFE/OdJ+nTe7VmN1im/Q9Irtvfa3tz0MAs4tzht+vHTp5/T8DwnWvTMzXU64czSI/PcDXLG67I1Ef9CZ/8ZpUMOl0fEaknXSLqjeHmL/vR15ua6LHBm6ZEw6Bmvy9ZE/LOSls+7fb6kAw3MsaCIOFBcHpb0okbv7MOHjp8ktbg83PA83xqlMzcvdGZpjcBzN0pnvG4i/ilJF9m+0PZpkm6RtKuBOb7H9unFBzGyfbqkqzR6Zx/eJWljcX2jpJcanOU7RuXMzd3OLK2Gn7tRO+N1I1/yKQ5l/EHSKZImIuK3tQ+xANs/0dzeXpo7ienTTc5m+xlJV2juX30dkrRV0l8kPSfpAkmfSLopImr/4K3LbFdo7qXrt2duPv4eu+bZfi7p75LelXSsWLxFc++vG3vuesy1Xg08b3zDD0iKb/gBSRE/kBTxA0kRP5AU8QNJET+QFPEDSRE/kNT/AHPCTzzFq9UKAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(1, 28, 28)\n" + ] + } + ], + "source": [ + "data = gen_mnist_data(mnist)\n", + "response = rest_transform_input_request(TRANSFORMER_URL,data)\n", + "transformed = np.array(response['data']['ndarray'])\n", + "print(transformed.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[[6.36274109e-08]]\n", + "\n", + " [[9.99982357e-01]]\n", + "\n", + " [[1.35594092e-07]]\n", + "\n", + " [[2.00526298e-08]]\n", + "\n", + " [[1.17960089e-05]]\n", + "\n", + " [[8.07224509e-08]]\n", + "\n", + " [[4.73712625e-08]]\n", + "\n", + " [[4.78241873e-06]]\n", + "\n", + " [[6.21992911e-07]]\n", + "\n", + " [[9.71163061e-08]]]]\n" + ] + } + ], + "source": [ + "response = rest_predict_request(PREDICTOR_URL,transformed)\n", + "predictions = np.array(response[\"data\"][\"ndarray\"])\n", + "print(predictions)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'data': {'names': None, 'ndarray': [[6.362741089560586e-08, 0.9999823570251465, 1.3559409239860543e-07, 2.005262977888833e-08, 1.1796008948294912e-05, 8.072245094581376e-08, 4.737126246823209e-08, 4.782418727700133e-06, 6.219929105100164e-07, 9.711630610809152e-08]]}}\n" + ] + } + ], + "source": [ + "response = rest_transform_output_request(TRANSFORMER_URL,predictions)\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "mnist-transformer\n", + "tensorrt-proxy\n" + ] + } + ], + "source": [ + "!docker rm -f mnist-transformer\n", + "!docker rm -f tensorrt-proxy" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Test From GCP Cluster" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set up GCP Kubernetes Cluster\n", + "\n", + "To run the steps below you will need to:\n", + "\n", + " 1. Create a GCP Cluster with a GPU node pool with Nvidia V100 GPUs\n", + " 2. Enable CUDA on the GPU nodes\n", + " 3. Add an Image Pull Secret so you can download the Nvidia Inference Server \n", + " \n", + " ### Create a GCP Cluster\n", + " This can be done from the Google console or via the command line as shown below. Change the cluster name and zones as appropriate for your setup.\n", + "\n", + "```\n", + " gcloud container clusters create myinferenceserver --num-nodes=2 --cluster-version=1.10.6-gke.2\n", + " gcloud config set container/cluster myinferenceserver\n", + " gcloud container node-pools create gpu-pool --num-nodes=1 --machine-type=n1-standard-8 --accelerator type=nvidia-tesla-v100,count=1\n", + " gcloud container clusters get-credentials myinferenceserver\n", + "```\n", + "\n", + "### Enable CUDA on GPU Nodes\n", + "\n", + "To enable the CUDA drivers on your GPU nodes run:\n", + "\n", + "```\n", + "kubectl apply -f https://raw.githubusercontent.com/GoogleCloudPlatform/container-engine-accelerators/stable/nvidia-driver-installer/cos/daemonset-preloaded.yaml\n", + "```\n", + "\n", + "### Create Image Pull Secret for the Nvidia Repository\n", + "\n", + " * [Sign up to the NVIDIA GPU Cloud and get an API Key](https://ngc.nvidia.com/)\n", + " * Create a kubernetes secret\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "namespace \"seldon\" created\r\n" + ] + } + ], + "source": [ + "!kubectl create namespace seldon" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%env NVIDIA_API_KEY=\n", + "%env NVIDIA_CLOUD_EMAIL=" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!kubectl config set-context $(kubectl config current-context) --namespace=seldon" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!kubectl create secret docker-registry ngc \\\n", + " --docker-server=nvcr.io \\\n", + " --docker-username=\\$oauthtoken \\\n", + " --docker-password=${NVIDIA_API_KEY} --docker-email=${NVIDIA_CLOUD_EMAIL}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Run MNIST Inference Graph" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "serviceaccount \"tiller\" created\n", + "clusterrolebinding \"tiller\" created\n", + "$HELM_HOME has been configured at /home/clive/.helm.\n", + "\n", + "Tiller (the Helm server-side component) has been installed into your Kubernetes Cluster.\n", + "\n", + "Please note: by default, Tiller is deployed with an insecure 'allow unauthenticated users' policy.\n", + "To prevent this, run `helm init` with the --tiller-tls-verify flag.\n", + "For more information on securing your installation see: https://docs.helm.sh/using_helm/#securing-your-helm-installation\n", + "Happy Helming!\n" + ] + } + ], + "source": [ + "!kubectl -n kube-system create sa tiller\n", + "!kubectl create clusterrolebinding tiller --clusterrole cluster-admin --serviceaccount=kube-system:tiller\n", + "!helm init --service-account tiller" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Waiting for rollout to finish: 0 of 1 updated replicas are available...\n", + "deployment \"tiller-deploy\" successfully rolled out\n" + ] + } + ], + "source": [ + "!kubectl rollout status deploy/tiller-deploy -n kube-system" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME: seldon-core-crd\n", + "LAST DEPLOYED: Wed Sep 26 16:52:13 2018\n", + "NAMESPACE: seldon\n", + "STATUS: DEPLOYED\n", + "\n", + "RESOURCES:\n", + "==> v1beta1/ClusterRoleBinding\n", + "NAME AGE\n", + "seldon-spartakus-volunteer 1s\n", + "\n", + "==> v1/ConfigMap\n", + "NAME DATA AGE\n", + "seldon-spartakus-config 3 1s\n", + "\n", + "==> v1beta1/CustomResourceDefinition\n", + "NAME AGE\n", + "seldondeployments.machinelearning.seldon.io 1s\n", + "\n", + "==> v1beta1/Deployment\n", + "NAME DESIRED CURRENT UP-TO-DATE AVAILABLE AGE\n", + "seldon-spartakus-volunteer 1 0 0 0 1s\n", + "\n", + "==> v1/ServiceAccount\n", + "NAME SECRETS AGE\n", + "seldon-spartakus-volunteer 1 1s\n", + "\n", + "==> v1beta1/ClusterRole\n", + "NAME AGE\n", + "seldon-spartakus-volunteer 1s\n", + "\n", + "\n", + "NOTES:\n", + "NOTES: TODO\n", + "\n", + "\n" + ] + } + ], + "source": [ + "!helm install ../../../helm-charts/seldon-core-crd --name seldon-core-crd \\\n", + " --set usage_metrics.enabled=true" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Context \"gke_kubeflow-testing-213813_europe-west4-a_cluster-4\" modified.\r\n" + ] + } + ], + "source": [ + "!kubectl config set-context $(kubectl config current-context) --namespace=seldon" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME: seldon-core\n", + "LAST DEPLOYED: Wed Sep 26 16:52:15 2018\n", + "NAMESPACE: seldon\n", + "STATUS: DEPLOYED\n", + "\n", + "RESOURCES:\n", + "==> v1/RoleBinding\n", + "NAME AGE\n", + "seldon 0s\n", + "\n", + "==> v1beta1/RoleBinding\n", + "ambassador 0s\n", + "\n", + "==> v1beta1/Deployment\n", + "NAME DESIRED CURRENT UP-TO-DATE AVAILABLE AGE\n", + "seldon-core-ambassador 1 1 1 0 0s\n", + "seldon-core-seldon-apiserver 1 1 1 0 0s\n", + "seldon-core-seldon-cluster-manager 1 1 1 0 0s\n", + "seldon-core-redis 1 1 1 0 0s\n", + "\n", + "==> v1/Pod(related)\n", + "NAME READY STATUS RESTARTS AGE\n", + "seldon-core-ambassador-778c58bf5d-swkjd 0/2 ContainerCreating 0 0s\n", + "seldon-core-seldon-apiserver-6b8dbc978b-cktjb 0/1 ContainerCreating 0 0s\n", + "seldon-core-seldon-cluster-manager-596d4674fd-dqbvq 0/1 ContainerCreating 0 0s\n", + "seldon-core-redis-8668565565-nkt7q 0/1 ContainerCreating 0 0s\n", + "\n", + "==> v1/ServiceAccount\n", + "NAME SECRETS AGE\n", + "seldon 1 0s\n", + "\n", + "==> v1/ClusterRoleBinding\n", + "NAME AGE\n", + "seldon 0s\n", + "\n", + "==> v1beta1/Role\n", + "NAME AGE\n", + "ambassador 0s\n", + "seldon-local 0s\n", + "\n", + "==> v1beta1/ClusterRole\n", + "seldon-crd 0s\n", + "\n", + "==> v1/Service\n", + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "seldon-core-ambassador-admin NodePort 10.43.245.152 8877:31931/TCP 0s\n", + "seldon-core-ambassador NodePort 10.43.254.206 8080:32597/TCP 0s\n", + "seldon-core-seldon-apiserver NodePort 10.43.243.99 8080:30835/TCP,5000:31146/TCP 0s\n", + "seldon-core-redis ClusterIP 10.43.240.11 6379/TCP 0s\n", + "\n", + "\n", + "NOTES:\n", + "Thank you for installing Seldon Core.\n", + "\n", + "Documentation can be found at https://github.com/SeldonIO/seldon-core\n", + "\n", + "\n", + "\n", + "\n" + ] + } + ], + "source": [ + "!helm install ../../../helm-charts/seldon-core --name seldon-core \\\n", + " --namespace seldon \\\n", + " --set ambassador.enabled=true" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Waiting for rollout to finish: 0 of 1 updated replicas are available...\n", + "deployment \"seldon-core-seldon-cluster-manager\" successfully rolled out\n", + "deployment \"seldon-core-seldon-apiserver\" successfully rolled out\n" + ] + } + ], + "source": [ + "!kubectl rollout status deploy/seldon-core-seldon-cluster-manager -n seldon\n", + "!kubectl rollout status deploy/seldon-core-seldon-apiserver -n seldon" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME: running-wolf\n", + "LAST DEPLOYED: Wed Sep 26 17:08:44 2018\n", + "NAMESPACE: seldon\n", + "STATUS: DEPLOYED\n", + "\n", + "RESOURCES:\n", + "==> v1alpha2/SeldonDeployment\n", + "NAME AGE\n", + "nvidia-mnist 1s\n", + "\n", + "\n" + ] + } + ], + "source": [ + "!helm install nvidia-mnist --namespace seldon --set tfserving.model_base_path=${MODEL_REPOSITORY_BUCKET}" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "!helm template nvidia-mnist --name nvidia-mnist --namespace seldon --set tfserving.model_base_path=${MODEL_REPOSITORY_BUCKET} > mnist.json" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "!sed '1,2d' mnist.json > tmp.json" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "%3\n", + "\n", + "cluster_0\n", + "\n", + "predictor-0\n", + "\n", + "\n", + "mnist-transformer0\n", + "\n", + "mnist-transformer\n", + "\n", + "\n", + "mnist-transformer0endpoint\n", + "\n", + "REST\n", + "\n", + "\n", + "mnist-transformer0->mnist-transformer0endpoint\n", + "\n", + "\n", + "\n", + "\n", + "nvidia-proxy\n", + "\n", + "nvidia-proxy\n", + "\n", + "\n", + "mnist-transformer0->nvidia-proxy\n", + "\n", + "\n", + "\n", + "\n", + "nvidia-proxyendpoint\n", + "\n", + "REST\n", + "\n", + "\n", + "nvidia-proxy->nvidia-proxyendpoint\n", + "\n", + "\n", + "\n", + "\n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "get_graph(\"tmp.json\")" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[04m\u001b[31;01m-\u001b[39;49;00m\u001b[04m\u001b[31;01m-\u001b[39;49;00m\u001b[04m\u001b[31;01m-\u001b[39;49;00m\r\n", + "\u001b[04m\u001b[31;01m#\u001b[39;49;00m \u001b[04m\u001b[31;01mS\u001b[39;49;00m\u001b[04m\u001b[31;01mo\u001b[39;49;00m\u001b[04m\u001b[31;01mu\u001b[39;49;00m\u001b[04m\u001b[31;01mr\u001b[39;49;00m\u001b[04m\u001b[31;01mc\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01m:\u001b[39;49;00m \u001b[04m\u001b[31;01mn\u001b[39;49;00m\u001b[04m\u001b[31;01mv\u001b[39;49;00m\u001b[04m\u001b[31;01mi\u001b[39;49;00m\u001b[04m\u001b[31;01md\u001b[39;49;00m\u001b[04m\u001b[31;01mi\u001b[39;49;00m\u001b[04m\u001b[31;01ma\u001b[39;49;00m\u001b[04m\u001b[31;01m-\u001b[39;49;00m\u001b[04m\u001b[31;01mm\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\u001b[04m\u001b[31;01mi\u001b[39;49;00m\u001b[04m\u001b[31;01ms\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01m/\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01mm\u001b[39;49;00m\u001b[04m\u001b[31;01mp\u001b[39;49;00m\u001b[04m\u001b[31;01ml\u001b[39;49;00m\u001b[04m\u001b[31;01ma\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01ms\u001b[39;49;00m\u001b[04m\u001b[31;01m/\u001b[39;49;00m\u001b[04m\u001b[31;01mm\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\u001b[04m\u001b[31;01mi\u001b[39;49;00m\u001b[04m\u001b[31;01ms\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01m_\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\u001b[04m\u001b[31;01mv\u001b[39;49;00m\u001b[04m\u001b[31;01mi\u001b[39;49;00m\u001b[04m\u001b[31;01md\u001b[39;49;00m\u001b[04m\u001b[31;01mi\u001b[39;49;00m\u001b[04m\u001b[31;01ma\u001b[39;49;00m\u001b[04m\u001b[31;01m_\u001b[39;49;00m\u001b[04m\u001b[31;01md\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01mp\u001b[39;49;00m\u001b[04m\u001b[31;01ml\u001b[39;49;00m\u001b[04m\u001b[31;01mo\u001b[39;49;00m\u001b[04m\u001b[31;01my\u001b[39;49;00m\u001b[04m\u001b[31;01mm\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01m.\u001b[39;49;00m\u001b[04m\u001b[31;01mj\u001b[39;49;00m\u001b[04m\u001b[31;01ms\u001b[39;49;00m\u001b[04m\u001b[31;01mo\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\r\n", + "{\r\n", + " \u001b[34;01m\"apiVersion\"\u001b[39;49;00m: \u001b[33m\"machinelearning.seldon.io/v1alpha2\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"kind\"\u001b[39;49;00m: \u001b[33m\"SeldonDeployment\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"metadata\"\u001b[39;49;00m: {\r\n", + " \u001b[34;01m\"labels\"\u001b[39;49;00m: {\r\n", + " \u001b[34;01m\"app\"\u001b[39;49;00m: \u001b[33m\"seldon\"\u001b[39;49;00m\r\n", + " },\r\n", + " \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"nvidia-mnist\"\u001b[39;49;00m,\r\n", + "\t\u001b[34;01m\"namespace\"\u001b[39;49;00m: \u001b[33m\"seldon\"\u001b[39;49;00m\r\n", + " },\r\n", + " \u001b[34;01m\"spec\"\u001b[39;49;00m: {\r\n", + " \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"caffe2-mnist\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"predictors\"\u001b[39;49;00m: [\r\n", + " {\r\n", + " \u001b[34;01m\"componentSpecs\"\u001b[39;49;00m: [{\r\n", + " \u001b[34;01m\"spec\"\u001b[39;49;00m: {\r\n", + " \u001b[34;01m\"containers\"\u001b[39;49;00m: [\r\n", + " {\r\n", + " \u001b[34;01m\"image\"\u001b[39;49;00m: \u001b[33m\"seldonio/mnist-caffe2-transformer:0.1\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"mnist-transformer\"\u001b[39;49;00m\r\n", + " },\r\n", + " {\r\n", + " \u001b[34;01m\"image\"\u001b[39;49;00m: \u001b[33m\"seldonio/nvidia-inference-server-proxy:0.1\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"nvidia-proxy\"\u001b[39;49;00m\r\n", + " },\r\n", + "\t\t\t {\r\n", + "\t\t\t\t\u001b[34;01m\"args\"\u001b[39;49;00m: [\r\n", + "\t\t\t\t \u001b[33m\"--model-store=gs://seldon-inference-server-model-store\"\u001b[39;49;00m\r\n", + "\t\t\t\t],\r\n", + "\t\t\t\t\u001b[34;01m\"command\"\u001b[39;49;00m: [\r\n", + "\t\t\t\t \u001b[33m\"inference_server\"\u001b[39;49;00m\r\n", + "\t\t\t\t],\r\n", + "\t\t\t\t\u001b[34;01m\"image\"\u001b[39;49;00m: \u001b[33m\"nvcr.io/nvidia/inferenceserver:18.08.1-py2\"\u001b[39;49;00m,\r\n", + "\t\t\t\t\u001b[34;01m\"livenessProbe\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t \u001b[34;01m\"failureThreshold\"\u001b[39;49;00m: \u001b[34m3\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"handler\"\u001b[39;49;00m:{\r\n", + "\t\t\t\t\t\u001b[34;01m\"httpGet\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t\t \u001b[34;01m\"path\"\u001b[39;49;00m: \u001b[33m\"/api/health/live\"\u001b[39;49;00m,\r\n", + "\t\t\t\t\t \u001b[34;01m\"port\"\u001b[39;49;00m: \u001b[34m8000\u001b[39;49;00m,\r\n", + "\t\t\t\t\t \u001b[34;01m\"scheme\"\u001b[39;49;00m: \u001b[33m\"HTTP\"\u001b[39;49;00m\r\n", + "\t\t\t\t\t}\r\n", + "\t\t\t\t },\r\n", + "\t\t\t\t \u001b[34;01m\"initialDelaySeconds\"\u001b[39;49;00m: \u001b[34m5\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"periodSeconds\"\u001b[39;49;00m: \u001b[34m5\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"successThreshold\"\u001b[39;49;00m: \u001b[34m1\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"timeoutSeconds\"\u001b[39;49;00m: \u001b[34m1\u001b[39;49;00m\r\n", + "\t\t\t\t},\r\n", + "\t\t\t\t\u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"inference-server\"\u001b[39;49;00m,\r\n", + "\t\t\t\t\u001b[34;01m\"ports\"\u001b[39;49;00m: [\r\n", + "\t\t\t\t {\r\n", + "\t\t\t\t\t\u001b[34;01m\"containerPort\"\u001b[39;49;00m: \u001b[34m8000\u001b[39;49;00m,\r\n", + "\t\t\t\t\t\u001b[34;01m\"protocol\"\u001b[39;49;00m: \u001b[33m\"TCP\"\u001b[39;49;00m\r\n", + "\t\t\t\t },\r\n", + "\t\t\t\t {\r\n", + "\t\t\t\t\t\u001b[34;01m\"containerPort\"\u001b[39;49;00m: \u001b[34m8001\u001b[39;49;00m,\r\n", + "\t\t\t\t\t\u001b[34;01m\"protocol\"\u001b[39;49;00m: \u001b[33m\"TCP\"\u001b[39;49;00m\r\n", + "\t\t\t\t },\r\n", + "\t\t\t\t {\r\n", + "\t\t\t\t\t\u001b[34;01m\"containerPort\"\u001b[39;49;00m: \u001b[34m8002\u001b[39;49;00m,\r\n", + "\t\t\t\t\t\u001b[34;01m\"protocol\"\u001b[39;49;00m: \u001b[33m\"TCP\"\u001b[39;49;00m\r\n", + "\t\t\t\t }\r\n", + "\t\t\t\t],\r\n", + "\t\t\t\t\u001b[34;01m\"readinessProbe\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t \u001b[34;01m\"failureThreshold\"\u001b[39;49;00m: \u001b[34m3\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"handler\"\u001b[39;49;00m:{\r\n", + "\t\t\t\t\t\u001b[34;01m\"httpGet\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t\t \u001b[34;01m\"path\"\u001b[39;49;00m: \u001b[33m\"/api/health/ready\"\u001b[39;49;00m,\r\n", + "\t\t\t\t\t \u001b[34;01m\"port\"\u001b[39;49;00m: \u001b[34m8000\u001b[39;49;00m,\r\n", + "\t\t\t\t\t \u001b[34;01m\"scheme\"\u001b[39;49;00m: \u001b[33m\"HTTP\"\u001b[39;49;00m\r\n", + "\t\t\t\t\t}\r\n", + "\t\t\t\t },\r\n", + "\t\t\t\t \u001b[34;01m\"initialDelaySeconds\"\u001b[39;49;00m: \u001b[34m5\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"periodSeconds\"\u001b[39;49;00m: \u001b[34m5\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"successThreshold\"\u001b[39;49;00m: \u001b[34m1\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"timeoutSeconds\"\u001b[39;49;00m: \u001b[34m1\u001b[39;49;00m\r\n", + "\t\t\t\t},\r\n", + "\t\t\t\t\u001b[34;01m\"resources\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t \u001b[34;01m\"limits\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t\t\u001b[34;01m\"nvidia.com/gpu\"\u001b[39;49;00m: \u001b[33m\"1\"\u001b[39;49;00m\r\n", + "\t\t\t\t },\r\n", + "\t\t\t\t \u001b[34;01m\"requests\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t\t\u001b[34;01m\"cpu\"\u001b[39;49;00m: \u001b[33m\"100m\"\u001b[39;49;00m,\r\n", + "\t\t\t\t\t\u001b[34;01m\"nvidia.com/gpu\"\u001b[39;49;00m: \u001b[33m\"1\"\u001b[39;49;00m\r\n", + "\t\t\t\t }\r\n", + "\t\t\t\t},\r\n", + "\t\t\t\t\u001b[34;01m\"securityContext\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t \u001b[34;01m\"runAsUser\"\u001b[39;49;00m: \u001b[34m1000\u001b[39;49;00m\r\n", + "\t\t\t\t}\r\n", + "\t\t\t }\r\n", + "\t\t\t],\r\n", + "\t\t\t\u001b[34;01m\"terminationGracePeriodSeconds\"\u001b[39;49;00m: \u001b[34m1\u001b[39;49;00m,\r\n", + "\t\t\t\u001b[34;01m\"imagePullSecrets\"\u001b[39;49;00m: [\r\n", + "\t\t\t {\r\n", + "\t\t\t\t\u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"ngc\"\u001b[39;49;00m\r\n", + "\t\t\t }\r\n", + "\t\t\t]\r\n", + "\t\t }\r\n", + "\t\t}],\r\n", + " \u001b[34;01m\"graph\"\u001b[39;49;00m: {\r\n", + " \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"mnist-transformer\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"endpoint\"\u001b[39;49;00m: { \u001b[34;01m\"type\"\u001b[39;49;00m : \u001b[33m\"REST\"\u001b[39;49;00m },\r\n", + " \u001b[34;01m\"type\"\u001b[39;49;00m: \u001b[33m\"TRANSFORMER\"\u001b[39;49;00m,\r\n", + "\t\t \u001b[34;01m\"children\"\u001b[39;49;00m: [\r\n", + "\t\t\t{\r\n", + "\t\t\t \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"nvidia-proxy\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"endpoint\"\u001b[39;49;00m: { \u001b[34;01m\"type\"\u001b[39;49;00m : \u001b[33m\"REST\"\u001b[39;49;00m },\r\n", + "\t\t\t \u001b[34;01m\"type\"\u001b[39;49;00m: \u001b[33m\"MODEL\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"children\"\u001b[39;49;00m: [],\r\n", + "\t\t\t \u001b[34;01m\"parameters\"\u001b[39;49;00m:\r\n", + "\t\t\t [\r\n", + "\t\t\t\t{\r\n", + "\t\t\t\t \u001b[34;01m\"name\"\u001b[39;49;00m:\u001b[33m\"url\"\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"type\"\u001b[39;49;00m:\u001b[33m\"STRING\"\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"value\"\u001b[39;49;00m:\u001b[33m\"127.0.0.1:8000\"\u001b[39;49;00m\r\n", + "\t\t\t\t},\r\n", + "\t\t\t\t{\r\n", + "\t\t\t\t \u001b[34;01m\"name\"\u001b[39;49;00m:\u001b[33m\"model_name\"\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"type\"\u001b[39;49;00m:\u001b[33m\"STRING\"\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"value\"\u001b[39;49;00m:\u001b[33m\"tensorrt_mnist\"\u001b[39;49;00m\r\n", + "\t\t\t\t},\r\n", + "\t\t\t\t{\r\n", + "\t\t\t\t \u001b[34;01m\"name\"\u001b[39;49;00m:\u001b[33m\"protocol\"\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"type\"\u001b[39;49;00m:\u001b[33m\"STRING\"\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[34;01m\"value\"\u001b[39;49;00m:\u001b[33m\"HTTP\"\u001b[39;49;00m\r\n", + "\t\t\t\t}\r\n", + "\t\t\t ]\r\n", + "\t\t\t}\r\n", + "\t\t ]\r\n", + " },\r\n", + " \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"mnist-nvidia\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"replicas\"\u001b[39;49;00m: \u001b[34m1\u001b[39;49;00m\r\n", + " }\r\n", + " ]\r\n", + " }\r\n", + "}\r\n" + ] + } + ], + "source": [ + "!pygmentize mnist.json" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Port forward Ambassador**\n", + "\n", + "```\n", + "kubectl port-forward $(kubectl get pods -n seldon -l service=ambassador -o jsonpath='{.items[0].metadata.name}') -n seldon 8003:8080\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAC2NJREFUeJzt3U+InPUdx/HPp1Ev6iGSSQgx6VoJpSI0liEUUuoGUWIpRA8Wc5AUpOtBQcFDJZdsDoVQqraHIsQaTME/CGrNIbSGEEmFIq4iJmnaRmSraUJ2Qg7Gk0S/PewTWZOdP5l5/kz8vl8gM/PM7M6XwXeemXlm9ueIEIB8vtP0AACaQfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJHVVnXe2bNmymJiYqPMugVRmZ2d15swZD3LbkeK3vUnSHyQtkfSniNjZ6/YTExOamZkZ5S4B9NButwe+7dBP+20vkfRHSXdLukXSFtu3DPv7ANRrlNf86yV9FBEfR8QXkl6WtLmcsQBUbZT4V0n6dMHlE8W2b7A9ZXvG9kyn0xnh7gCUaZT4F3tT4ZLvB0fErohoR0S71WqNcHcAyjRK/CckrV5w+UZJJ0cbB0BdRon/XUlrbd9k+xpJ90vaW85YAKo29KG+iDhv+xFJf9P8ob7dEXG0tMkAVGqk4/wRsU/SvpJmAVAjPt4LJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyRV6xLdwOWwB1ppuquDBw92vW5ycnKk3/1twJ4fSIr4gaSIH0iK+IGkiB9IiviBpIgfSGqk4/y2ZyWdk/SlpPMR0S5jKOSwcePGSn//W2+91fU6jvOX8yGfjRFxpoTfA6BGPO0Hkho1/pD0pu33bE+VMRCAeoz6tH9DRJy0vVzSftv/iohDC29Q/KMwJUlr1qwZ8e4AlGWkPX9EnCxO5yS9Lmn9IrfZFRHtiGi3Wq1R7g5AiYaO3/a1tq+/cF7SXZKOlDUYgGqN8rR/haTXi69dXiXpxYj4aylTAajc0PFHxMeSfljiLEim13H4MnAsvzcO9QFJET+QFPEDSRE/kBTxA0kRP5AUf7ob31oc6uuNPT+QFPEDSRE/kBTxA0kRP5AU8QNJET+QFMf5B9Trz0yP+tXUiBjp54FhsOcHkiJ+ICniB5IifiAp4geSIn4gKeIHkuI4/4Cq/jPTQN3Y8wNJET+QFPEDSRE/kBTxA0kRP5AU8QNJ9Y3f9m7bc7aPLNh2g+39to8Xp0urHRNA2QbZ8z8vadNF256QdCAi1ko6UFwGcAXpG39EHJJ09qLNmyXtKc7vkXRPyXMBqNiwr/lXRMQpSSpOl5c3EoA6VP6Gn+0p2zO2ZzqdTtV3B2BAw8Z/2vZKSSpO57rdMCJ2RUQ7ItqtVmvIuwNQtmHj3ytpa3F+q6Q3yhkHQF0GOdT3kqR/SPq+7RO2H5S0U9Kdto9LurO4DOAK0vf7/BGxpctVd5Q8C4Aa8Qk/ICniB5IifiAp4geSIn4gKeIHkiJ+ICniB5IifiAp4geSIn4gKeIHkiJ+ICniB5JiiW5UqsmlzXvd9+TkZG1zjCv2/EBSxA8kRfxAUsQPJEX8QFLEDyRF/EBSHOcvNHk8euPGjT2vv/3222uapHw7duxo7L45zt8be34gKeIHkiJ+ICniB5IifiAp4geSIn4gqb7H+W3vlvRzSXMRcWuxbVrSryR1iptti4h9VQ1Zh3H93vkg1wPDGGTP/7ykTYtsfzoi1hX/XdHhAxn1jT8iDkk6W8MsAGo0ymv+R2x/aHu37aWlTQSgFsPG/4ykmyWtk3RK0pPdbmh7yvaM7ZlOp9PtZgBqNlT8EXE6Ir6MiK8kPStpfY/b7oqIdkS0W63WsHMCKNlQ8dteueDivZKOlDMOgLoMcqjvJUmTkpbZPiFpu6RJ2+skhaRZSQ9VOCOACvSNPyK2LLL5uQpmaVS/73c3+b10DIfv7PfGJ/yApIgfSIr4gaSIH0iK+IGkiB9Iij/dXeh3WGj79u1D/+5+hwn73Xe/P909PT19mRPVp9dsVR8+5VBfb+z5gaSIH0iK+IGkiB9IiviBpIgfSIr4gaQ4zj+gUY6lj/NxeOTFnh9IiviBpIgfSIr4gaSIH0iK+IGkiB9IiuP8uGKN8jcWwJ4fSIv4gaSIH0iK+IGkiB9IiviBpIgfSKpv/LZX2z5o+5jto7YfLbbfYHu/7ePF6dLqxwVQlkH2/OclPR4RP5D0Y0kP275F0hOSDkTEWkkHissArhB944+IUxHxfnH+nKRjklZJ2ixpT3GzPZLuqWpIAOW7rNf8tick3SbpHUkrIuKUNP8PhKTlZQ8HoDoDx2/7OkmvSnosIj67jJ+bsj1je6bT6QwzI4AKDBS/7as1H/4LEfFasfm07ZXF9SslzS32sxGxKyLaEdFutVplzAygBIO8229Jz0k6FhFPLbhqr6Stxfmtkt4ofzwAVRnkK70bJD0g6bDtD4pt2yTtlPSK7QclfSLpvmpGBFCFvvFHxNuS3OXqO8odB0Bd+IQfkBTxA0kRP5AU8QNJET+QFPEDSRE/kBTxA0kRP5AU8QNJET+QFPEDSRE/kBTxA0mxRDcqNTk52fW6HTt2jPS7p6enR/r57NjzA0kRP5AU8QNJET+QFPEDSRE/kBTxA0lxnB+V6nWcPyLqGwSXYM8PJEX8QFLEDyRF/EBSxA8kRfxAUsQPJNU3fturbR+0fcz2UduPFtunbf/P9gfFfz+rflwAZRnkQz7nJT0eEe/bvl7Se7b3F9c9HRG/q248AFXpG39EnJJ0qjh/zvYxSauqHgxAtS7rNb/tCUm3SXqn2PSI7Q9t77a9tMvPTNmesT3T6XRGGhZAeQaO3/Z1kl6V9FhEfCbpGUk3S1qn+WcGTy72cxGxKyLaEdFutVoljAygDAPFb/tqzYf/QkS8JkkRcToivoyIryQ9K2l9dWMCKNsg7/Zb0nOSjkXEUwu2r1xws3slHSl/PABVGeTd/g2SHpB02PYHxbZtkrbYXicpJM1KeqiSCQFUYpB3+9+W5EWu2lf+OADqwif8gKSIH0iK+IGkiB9IiviBpIgfSIr4gaSIH0iK+IGkiB9IiviBpIgfSIr4gaSIH0jKdS6TbLsj6b8LNi2TdKa2AS7PuM42rnNJzDasMmf7bkQM9Pfyao3/kju3ZyKi3dgAPYzrbOM6l8Rsw2pqNp72A0kRP5BU0/Hvavj+exnX2cZ1LonZhtXIbI2+5gfQnKb3/AAa0kj8tjfZ/rftj2w/0cQM3dietX24WHl4puFZdtues31kwbYbbO+3fbw4XXSZtIZmG4uVm3usLN3oYzduK17X/rTf9hJJ/5F0p6QTkt6VtCUi/lnrIF3YnpXUjojGjwnb/qmkzyX9OSJuLbb9VtLZiNhZ/MO5NCJ+PSazTUv6vOmVm4sFZVYuXFla0j2SfqkGH7sec/1CDTxuTez510v6KCI+jogvJL0saXMDc4y9iDgk6exFmzdL2lOc36P5/3lq12W2sRARpyLi/eL8OUkXVpZu9LHrMVcjmoh/laRPF1w+ofFa8jskvWn7PdtTTQ+ziBXFsukXlk9f3vA8F+u7cnOdLlpZemweu2FWvC5bE/EvtvrPOB1y2BARP5J0t6SHi6e3GMxAKzfXZZGVpcfCsCtel62J+E9IWr3g8o2STjYwx6Ii4mRxOifpdY3f6sOnLyySWpzONTzP18Zp5ebFVpbWGDx247TidRPxvytpre2bbF8j6X5JexuY4xK2ry3eiJHtayXdpfFbfXivpK3F+a2S3mhwlm8Yl5Wbu60srYYfu3Fb8bqRD/kUhzJ+L2mJpN0R8Zvah1iE7e9pfm8vzS9i+mKTs9l+SdKk5r/1dVrSdkl/kfSKpDWSPpF0X0TU/sZbl9kmNf/U9euVmy+8xq55tp9I+rukw5K+KjZv0/zr68Yeux5zbVEDjxuf8AOS4hN+QFLEDyRF/EBSxA8kRfxAUsQPJEX8QFLEDyT1f8SFR6l1JU/LAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "200\n", + "{\n", + " \"meta\": {\n", + " \"puid\": \"rnaaqs28vus3vsi3t0n19q3240\",\n", + " \"tags\": {\n", + " },\n", + " \"routing\": {\n", + " \"mnist-transformer\": -1\n", + " }\n", + " },\n", + " \"data\": {\n", + " \"names\": [\"t:0\", \"t:1\", \"t:2\", \"t:3\", \"t:4\", \"t:5\", \"t:6\", \"t:7\", \"t:8\", \"t:9\"],\n", + " \"tensor\": {\n", + " \"shape\": [1, 10, 1, 1],\n", + " \"values\": [3.5578280637360224E-14, 6.920142844174526E-12, 4.660422221747229E-12, 4.3726320849736544E-14, 1.0, 3.065522011583277E-12, 4.4698250499129386E-14, 3.898779521449569E-9, 9.478232001342013E-13, 3.0998212330324293E-10]\n", + " }\n", + " }\n", + "}\n" + ] + } + ], + "source": [ + "data = gen_mnist_data(mnist)\n", + "data = data.reshape((784))\n", + "rest_request_ambassador(\"nvidia-mnist\",endpoint=\"localhost:8003\",arr=data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Analytics and Load Test" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME: seldon-core-analytics\n", + "LAST DEPLOYED: Mon Sep 24 14:40:46 2018\n", + "NAMESPACE: seldon\n", + "STATUS: DEPLOYED\n", + "\n", + "RESOURCES:\n", + "==> v1beta1/ClusterRoleBinding\n", + "NAME AGE\n", + "prometheus 0s\n", + "\n", + "==> v1/Job\n", + "NAME DESIRED SUCCESSFUL AGE\n", + "grafana-prom-import-dashboards 1 0 0s\n", + "\n", + "==> v1/Service\n", + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "alertmanager ClusterIP 10.39.243.208 80/TCP 0s\n", + "grafana-prom NodePort 10.39.246.121 80:30588/TCP 0s\n", + "prometheus-node-exporter ClusterIP None 9100/TCP 0s\n", + "prometheus-seldon ClusterIP 10.39.253.10 80/TCP 0s\n", + "\n", + "==> v1beta1/DaemonSet\n", + "NAME DESIRED CURRENT READY UP-TO-DATE AVAILABLE NODE SELECTOR AGE\n", + "prometheus-node-exporter 1 1 0 1 0 0s\n", + "\n", + "==> v1/ConfigMap\n", + "NAME DATA AGE\n", + "alertmanager-server-conf 1 0s\n", + "grafana-import-dashboards 7 0s\n", + "prometheus-rules 4 0s\n", + "prometheus-server-conf 1 0s\n", + "\n", + "==> v1beta1/ClusterRole\n", + "NAME AGE\n", + "prometheus 0s\n", + "\n", + "==> v1beta1/Deployment\n", + "NAME DESIRED CURRENT UP-TO-DATE AVAILABLE AGE\n", + "alertmanager-deployment 1 1 1 0 0s\n", + "grafana-prom-deployment 1 1 1 0 0s\n", + "prometheus-deployment 1 1 1 0 0s\n", + "\n", + "==> v1/Pod(related)\n", + "NAME READY STATUS RESTARTS AGE\n", + "grafana-prom-import-dashboards-wrtnb 0/1 ContainerCreating 0 0s\n", + "alertmanager-deployment-557b99ccf8-ztdnk 0/1 ContainerCreating 0 0s\n", + "grafana-prom-deployment-dd84b7788-zwwls 0/1 ContainerCreating 0 0s\n", + "prometheus-node-exporter-zg9cg 0/1 ContainerCreating 0 0s\n", + "prometheus-deployment-78dd89b44f-8ntcw 0/1 Pending 0 0s\n", + "\n", + "==> v1/Secret\n", + "NAME TYPE DATA AGE\n", + "grafana-prom-secret Opaque 1 0s\n", + "\n", + "==> v1/ServiceAccount\n", + "NAME SECRETS AGE\n", + "prometheus 1 0s\n", + "\n", + "\n", + "NOTES:\n", + "NOTES: TODO\n", + "\n", + "\n" + ] + } + ], + "source": [ + "!helm install ../../../helm-charts/seldon-core-analytics --name seldon-core-analytics \\\n", + " --set grafana_prom_admin_password=password \\\n", + " --set persistence.enabled=false \\\n", + " --namespace seldon" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "node \"gke-cluster-1-default-pool-5e22f4e3-djd7\" labeled\r\n" + ] + } + ], + "source": [ + "!kubectl label nodes $(kubectl get nodes -o jsonpath='{.items[0].metadata.name}') role=locust" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME: loadtest\n", + "LAST DEPLOYED: Mon Sep 24 14:40:53 2018\n", + "NAMESPACE: seldon\n", + "STATUS: DEPLOYED\n", + "\n", + "RESOURCES:\n", + "==> v1/ReplicationController\n", + "NAME DESIRED CURRENT READY AGE\n", + "locust-slave-1 1 1 0 0s\n", + "locust-master-1 1 1 0 0s\n", + "\n", + "==> v1/Service\n", + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "locust-master-1 NodePort 10.39.254.17 5557:31880/TCP,5558:30185/TCP,8089:32570/TCP 0s\n", + "\n", + "==> v1/Pod(related)\n", + "NAME READY STATUS RESTARTS AGE\n", + "locust-slave-1-p5n6f 0/1 ContainerCreating 0 0s\n", + "locust-master-1-ksk7g 0/1 ContainerCreating 0 0s\n", + "\n", + "\n" + ] + } + ], + "source": [ + "!helm install seldon-core-loadtesting --name loadtest \\\n", + " --namespace seldon \\\n", + " --repo https://storage.googleapis.com/seldon-charts \\\n", + " --set locust.script=mnist_rest_locust.py \\\n", + " --set locust.host=http://caffe2-mnist:8000 \\\n", + " --set oauth.enabled=false \\\n", + " --set locust.hatchRate=1 \\\n", + " --set locust.clients=1 \\\n", + " --set loadtest.sendFeedback=1 \\\n", + " --set locust.minWait=0 \\\n", + " --set locust.maxWait=0 \\\n", + " --set replicaCount=1 \\\n", + " --set data.size=784\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You should port-foward the grafana dashboard\n", + "\n", + "```\n", + "kubectl port-forward $(kubectl get pods -n seldon -l app=grafana-prom-server -o jsonpath='{.items[0].metadata.name}') -n seldon 3000:3000\n", + "```\n", + "\n", + "You can then view an analytics dashboard inside the cluster at http://localhost:3000/dashboard/db/prediction-analytics?refresh=5s&orgId=1. Your IP address may be different. get it via minikube ip. Login with:\n", + "\n", + " Username : admin\n", + "\n", + " password : password (as set when starting seldon-core-analytics above)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/models/nvidia-mnist/tensorrt_mnist/config.pbtxt b/examples/models/nvidia-mnist/tensorrt_mnist/config.pbtxt new file mode 100644 index 0000000000..57032e9d08 --- /dev/null +++ b/examples/models/nvidia-mnist/tensorrt_mnist/config.pbtxt @@ -0,0 +1,19 @@ +name: "tensorrt_mnist" +platform: "tensorrt_plan" +max_batch_size: 1 +input [ + { + name: "data" + data_type: TYPE_FP32 + format: FORMAT_NCHW + dims: [ 1, 28, 28 ] + } +] +output [ + { + name: "prob" + data_type: TYPE_FP32 + dims: [ 10, 1, 1 ] + label_filename: "mnist_labels.txt" + } +] diff --git a/examples/models/nvidia-mnist/tensorrt_mnist/mnist_labels.txt b/examples/models/nvidia-mnist/tensorrt_mnist/mnist_labels.txt new file mode 100644 index 0000000000..8b1acc12b6 --- /dev/null +++ b/examples/models/nvidia-mnist/tensorrt_mnist/mnist_labels.txt @@ -0,0 +1,10 @@ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 diff --git a/examples/models/tfserving-mnist/Makefile b/examples/models/tfserving-mnist/Makefile new file mode 100644 index 0000000000..7241b4f57b --- /dev/null +++ b/examples/models/tfserving-mnist/Makefile @@ -0,0 +1,8 @@ + + +clean: + rm -rf MNIST_data + rm -rf mnist-model/1 + rm -rf serving + rm -f tmp.json + rm -f mnist.json diff --git a/examples/models/tfserving-mnist/mnist_tfserving_deployment.json.template b/examples/models/tfserving-mnist/mnist_tfserving_deployment.json.template new file mode 100644 index 0000000000..73693ef039 --- /dev/null +++ b/examples/models/tfserving-mnist/mnist_tfserving_deployment.json.template @@ -0,0 +1,93 @@ +{ + "apiVersion": "machinelearning.seldon.io/v1alpha2", + "kind": "SeldonDeployment", + "metadata": { + "labels": { + "app": "seldon" + }, + "name": "tfserving-mnist" + }, + "spec": { + "name": "tf-mnist", + "predictors": [ + { + "componentSpecs": [{ + "spec": { + "containers": [ + { + "image": "seldonio/tfserving-proxy:0.1", + "name": "tfserving-proxy" + }, + { + "args": [ + "/usr/bin/tensorflow_model_server", + "--port=8000", + "--model_name=mnist-model", + "--model_base_path=gs://seldon-tfserving-store/mnist-model" + ], + "image": "gcr.io/kubeflow-images-public/tensorflow-serving-1.7:v20180604-0da89b8a", + "name": "mnist-model", + "ports": [ + { + "containerPort": 8000, + "protocol": "TCP" + } + ], + "resources": { + "limits": { + "cpu": "4", + "memory": "4Gi" + }, + "requests": { + "cpu": "1", + "memory": "1Gi" + } + }, + "securityContext": { + "runAsUser": 1000 + } + } + ], + "terminationGracePeriodSeconds": 1 + } + }], + "graph": { + "name": "tfserving-proxy", + "endpoint": { "type" : "REST" }, + "type": "MODEL", + "children": [], + "parameters": + [ + { + "name":"grpc_endpoint", + "type":"STRING", + "value":"localhost:8000" + }, + { + "name":"model_name", + "type":"STRING", + "value":"mnist-model" + }, + { + "name":"model_output", + "type":"STRING", + "value":"scores" + }, + { + "name":"model_input", + "type":"STRING", + "value":"images" + }, + { + "name":"signature_name", + "type":"STRING", + "value":"predict_images" + } + ] + }, + "name": "mnist-tfserving", + "replicas": 1 + } + ] + } +} diff --git a/examples/models/tfserving-mnist/tfserving-mnist.ipynb b/examples/models/tfserving-mnist/tfserving-mnist.ipynb new file mode 100644 index 0000000000..68f1eab8ad --- /dev/null +++ b/examples/models/tfserving-mnist/tfserving-mnist.ipynb @@ -0,0 +1,960 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Seldon and TensorFlow Serving MNIST Example\n", + "\n", + "This example shows how you can combine Seldon with Tensorflo Serving. We will use a Seldon Tensorflow Serving proxy model image that will forward Seldon internal microservice prediction calls out to a Tensorflow serving server.\n", + "\n", + "The example will use the MNIST digit classification task with the example MNIST model.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Setup" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/clive/anaconda3/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", + " from ._conv import register_converters as _register_converters\n" + ] + } + ], + "source": [ + "%matplotlib inline\n", + "import requests\n", + "from random import randint,random\n", + "import json\n", + "from matplotlib import pyplot as plt\n", + "import numpy as np\n", + "from tensorflow.examples.tutorials.mnist import input_data\n", + "import sys\n", + "sys.path.append(\"../../../notebooks\")\n", + "from visualizer import get_graph" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "def gen_image(arr):\n", + " two_d = (np.reshape(arr, (28, 28)) * 255).astype(np.uint8)\n", + " plt.imshow(two_d,cmap=plt.cm.gray_r, interpolation='nearest')\n", + " return plt\n", + "\n", + "def download_mnist():\n", + " return input_data.read_data_sets(\"MNIST_data/\", one_hot = True)\n", + "\n", + "def rest_predict_request(endpoint,data):\n", + " request = {\"data\":{\"ndarray\":data.tolist()}}\n", + " response = requests.post(\n", + " \"http://\"+endpoint+\"/predict\",\n", + " data={\"json\":json.dumps(request),\"isDefault\":True})\n", + " return response.json() \n", + "\n", + "def rest_transform_input_request(endpoint,data):\n", + " request = {\"data\":{\"ndarray\":data.tolist()}}\n", + " response = requests.post(\n", + " \"http://\"+endpoint+\"/transform-input\",\n", + " data={\"json\":json.dumps(request),\"isDefault\":True})\n", + " return response.json() \n", + "\n", + "def rest_transform_output_request(endpoint,data):\n", + " request = {\"data\":{\"ndarray\":data.tolist()}}\n", + " response = requests.post(\n", + " \"http://\"+endpoint+\"/transform-output\",\n", + " data={\"json\":json.dumps(request),\"isDefault\":True})\n", + " return response.json() \n", + "\n", + "def rest_request_ambassador(deploymentName,endpoint=\"localhost:8003\",arr=None):\n", + " payload = {\"data\":{\"names\":[\"a\",\"b\"],\"tensor\":{\"shape\":[1,784],\"values\":arr.tolist()}}}\n", + " response = requests.post(\n", + " \"http://\"+endpoint+\"/seldon/\"+deploymentName+\"/api/v0.1/predictions\",\n", + " json=payload)\n", + " print(response.status_code)\n", + " print(response.text)\n", + "\n", + "\n", + "def gen_mnist_data(mnist):\n", + " batch_xs, batch_ys = mnist.train.next_batch(1)\n", + " chosen=0\n", + " gen_image(batch_xs[chosen]).show()\n", + " data = batch_xs[chosen].reshape((1,784))\n", + " return data\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:From :7: read_data_sets (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:260: maybe_download (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please write your own downloading logic.\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/base.py:252: _internal_retry..wrap..wrapped_fn (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use urllib or similar directly.\n", + "Successfully downloaded train-images-idx3-ubyte.gz 9912422 bytes.\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:262: extract_images (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use tf.data to implement this functionality.\n", + "Extracting MNIST_data/train-images-idx3-ubyte.gz\n", + "Successfully downloaded train-labels-idx1-ubyte.gz 28881 bytes.\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:267: extract_labels (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use tf.data to implement this functionality.\n", + "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:110: dense_to_one_hot (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use tf.one_hot on tensors.\n", + "Successfully downloaded t10k-images-idx3-ubyte.gz 1648877 bytes.\n", + "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", + "Successfully downloaded t10k-labels-idx1-ubyte.gz 4542 bytes.\n", + "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n", + "WARNING:tensorflow:From /home/clive/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:290: DataSet.__init__ (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n" + ] + } + ], + "source": [ + "mnist = download_mnist()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Create MNIST Model Repository\n", + "You will need tensorflow installed to run these steps." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Train Tensorflow MNIST example model" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Cloning into 'serving'...\n", + "remote: Enumerating objects: 75, done.\u001b[K\n", + "remote: Counting objects: 100% (75/75), done.\u001b[K\n", + "remote: Compressing objects: 100% (43/43), done.\u001b[K\n", + "remote: Total 11251 (delta 53), reused 49 (delta 32), pack-reused 11176\u001b[K\n", + "Receiving objects: 100% (11251/11251), 3.62 MiB | 3.02 MiB/s, done.\n", + "Resolving deltas: 100% (8109/8109), done.\n", + "Checking connectivity... done.\n" + ] + } + ], + "source": [ + "!git clone https://github.com/tensorflow/serving.git" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/home/clive/anaconda3/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", + " from ._conv import register_converters as _register_converters\n", + "Training model...\n", + "Successfully downloaded train-images-idx3-ubyte.gz 9912422 bytes.\n", + "Extracting /tmp/train-images-idx3-ubyte.gz\n", + "Successfully downloaded train-labels-idx1-ubyte.gz 28881 bytes.\n", + "Extracting /tmp/train-labels-idx1-ubyte.gz\n", + "Successfully downloaded t10k-images-idx3-ubyte.gz 1648877 bytes.\n", + "Extracting /tmp/t10k-images-idx3-ubyte.gz\n", + "Successfully downloaded t10k-labels-idx1-ubyte.gz 4542 bytes.\n", + "Extracting /tmp/t10k-labels-idx1-ubyte.gz\n", + "2018-09-26 15:48:35.512962: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA\n", + "training accuracy 0.9162\n", + "Done training!\n", + "Exporting trained model to b'../../../mnist-model/1'\n", + "Done exporting!\n" + ] + } + ], + "source": [ + "!cd serving/tensorflow_serving/example && python mnist_saved_model.py --training_iteration=100000 ../../../mnist-model" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Copy Model to Google Bucket" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: MODEL_REPOSITORY_BUCKET=gs://seldon-tfserving-store\n" + ] + } + ], + "source": [ + "# CHANGE THIS TO YOUR OWN CHOSEN GOOGLE BUCKET NAME\n", + "%env MODEL_REPOSITORY_BUCKET=gs://seldon-tfserving-store" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Creating gs://seldon-tfserving-store/...\n", + "ServiceException: 409 Bucket seldon-tfserving-store already exists.\n" + ] + } + ], + "source": [ + "!gsutil mb ${MODEL_REPOSITORY_BUCKET}" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Copying file://mnist-model/1/saved_model.pb [Content-Type=application/octet-stream]...\n", + "Copying file://mnist-model/1/variables/variables.data-00000-of-00001 [Content-Type=application/octet-stream]...\n", + "Copying file://mnist-model/1/variables/variables.index [Content-Type=application/octet-stream]...\n", + "- [3 files][ 48.6 KiB/ 48.6 KiB] \n", + "Operation completed over 3 objects/48.6 KiB. \n" + ] + } + ], + "source": [ + "!gsutil cp -r mnist-model ${MODEL_REPOSITORY_BUCKET}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Test From GCP Cluster" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Run MNIST Inference Graph" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "serviceaccount \"tiller\" created\n", + "clusterrolebinding \"tiller\" created\n", + "$HELM_HOME has been configured at /home/clive/.helm.\n", + "\n", + "Tiller (the Helm server-side component) has been installed into your Kubernetes Cluster.\n", + "\n", + "Please note: by default, Tiller is deployed with an insecure 'allow unauthenticated users' policy.\n", + "To prevent this, run `helm init` with the --tiller-tls-verify flag.\n", + "For more information on securing your installation see: https://docs.helm.sh/using_helm/#securing-your-helm-installation\n", + "Happy Helming!\n" + ] + } + ], + "source": [ + "!kubectl -n kube-system create sa tiller\n", + "!kubectl create clusterrolebinding tiller --clusterrole cluster-admin --serviceaccount=kube-system:tiller\n", + "!helm init --service-account tiller" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "deployment \"tiller-deploy\" successfully rolled out\r\n" + ] + } + ], + "source": [ + "!kubectl rollout status deploy/tiller-deploy -n kube-system" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Error: a release named seldon-core-crd already exists.\r\n", + "Run: helm ls --all seldon-core-crd; to check the status of the release\r\n", + "Or run: helm del --purge seldon-core-crd; to delete it\r\n" + ] + } + ], + "source": [ + "!helm install ../../../helm-charts/seldon-core-crd --name seldon-core-crd \\\n", + " --set usage_metrics.enabled=true" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "namespace \"seldon\" created\r\n" + ] + } + ], + "source": [ + "!kubectl create namespace seldon" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Context \"gke_kubeflow-testing-213813_europe-west4-a_cluster-2\" modified.\r\n" + ] + } + ], + "source": [ + "!kubectl config set-context $(kubectl config current-context) --namespace=seldon" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME: seldon-core\n", + "LAST DEPLOYED: Wed Sep 26 15:53:10 2018\n", + "NAMESPACE: seldon\n", + "STATUS: DEPLOYED\n", + "\n", + "RESOURCES:\n", + "==> v1/ClusterRoleBinding\n", + "NAME AGE\n", + "seldon 1s\n", + "\n", + "==> v1beta1/RoleBinding\n", + "NAME AGE\n", + "ambassador 1s\n", + "\n", + "==> v1beta1/Deployment\n", + "NAME DESIRED CURRENT UP-TO-DATE AVAILABLE AGE\n", + "seldon-core-ambassador 1 1 1 0 0s\n", + "seldon-core-seldon-apiserver 1 1 1 0 0s\n", + "seldon-core-seldon-cluster-manager 1 1 1 0 0s\n", + "seldon-core-redis 1 1 1 0 0s\n", + "\n", + "==> v1/ServiceAccount\n", + "NAME SECRETS AGE\n", + "seldon 1 1s\n", + "\n", + "==> v1beta1/ClusterRole\n", + "NAME AGE\n", + "seldon-crd 1s\n", + "\n", + "==> v1beta1/Role\n", + "ambassador 1s\n", + "seldon-local 1s\n", + "\n", + "==> v1/RoleBinding\n", + "NAME AGE\n", + "seldon 1s\n", + "\n", + "==> v1/Service\n", + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "seldon-core-ambassador-admin NodePort 10.19.242.55 8877:32623/TCP 1s\n", + "seldon-core-ambassador NodePort 10.19.252.46 8080:32423/TCP 1s\n", + "seldon-core-seldon-apiserver NodePort 10.19.240.170 8080:31470/TCP,5000:30245/TCP 1s\n", + "seldon-core-redis ClusterIP 10.19.248.179 6379/TCP 1s\n", + "\n", + "==> v1/Pod(related)\n", + "NAME READY STATUS RESTARTS AGE\n", + "seldon-core-ambassador-778c58bf5d-tk9hz 0/2 ContainerCreating 0 0s\n", + "seldon-core-seldon-apiserver-6b8dbc978b-56std 0/1 ContainerCreating 0 0s\n", + "seldon-core-seldon-cluster-manager-596d4674fd-swqs6 0/1 ContainerCreating 0 0s\n", + "seldon-core-redis-8668565565-5g98j 0/1 ContainerCreating 0 0s\n", + "\n", + "\n", + "NOTES:\n", + "Thank you for installing Seldon Core.\n", + "\n", + "Documentation can be found at https://github.com/SeldonIO/seldon-core\n", + "\n", + "\n", + "\n", + "\n" + ] + } + ], + "source": [ + "!helm install ../../../helm-charts/seldon-core --name seldon-core \\\n", + " --namespace seldon \\\n", + " --set ambassador.enabled=true" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Waiting for rollout to finish: 0 of 1 updated replicas are available...\n", + "deployment \"seldon-core-seldon-cluster-manager\" successfully rolled out\n", + "Waiting for rollout to finish: 0 of 1 updated replicas are available...\n", + "deployment \"seldon-core-seldon-apiserver\" successfully rolled out\n" + ] + } + ], + "source": [ + "!kubectl rollout status deploy/seldon-core-seldon-cluster-manager -n seldon\n", + "!kubectl rollout status deploy/seldon-core-seldon-apiserver -n seldon" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME: tfserving-mnist\n", + "LAST DEPLOYED: Wed Sep 26 15:56:41 2018\n", + "NAMESPACE: seldon\n", + "STATUS: DEPLOYED\n", + "\n", + "RESOURCES:\n", + "==> v1alpha2/SeldonDeployment\n", + "NAME AGE\n", + "tfserving-mnist 0s\n", + "\n", + "\n" + ] + } + ], + "source": [ + "!helm install tfserving-mnist --name tfserving-mnist --namespace seldon --set tfserving.model_base_path=${MODEL_REPOSITORY_BUCKET}/mnist-model " + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "!helm template tfserving-mnist --name tfserving-mnist --namespace seldon --set tfserving.model_base_path=${MODEL_REPOSITORY_BUCKET}/mnist-model > mnist.json" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "!sed '1,2d' mnist.json > tmp.json" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "%3\n", + "\n", + "cluster_0\n", + "\n", + "predictor-0\n", + "\n", + "\n", + "tfserving-proxy0\n", + "\n", + "tfserving-proxy\n", + "\n", + "\n", + "tfserving-proxy0endpoint\n", + "\n", + "REST\n", + "\n", + "\n", + "tfserving-proxy0->tfserving-proxy0endpoint\n", + "\n", + "\n", + "\n", + "\n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "get_graph(\"tmp.json\")" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[04m\u001b[31;01m-\u001b[39;49;00m\u001b[04m\u001b[31;01m-\u001b[39;49;00m\u001b[04m\u001b[31;01m-\u001b[39;49;00m\r\n", + "\u001b[04m\u001b[31;01m#\u001b[39;49;00m \u001b[04m\u001b[31;01mS\u001b[39;49;00m\u001b[04m\u001b[31;01mo\u001b[39;49;00m\u001b[04m\u001b[31;01mu\u001b[39;49;00m\u001b[04m\u001b[31;01mr\u001b[39;49;00m\u001b[04m\u001b[31;01mc\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01m:\u001b[39;49;00m \u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01mf\u001b[39;49;00m\u001b[04m\u001b[31;01ms\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01mr\u001b[39;49;00m\u001b[04m\u001b[31;01mv\u001b[39;49;00m\u001b[04m\u001b[31;01mi\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\u001b[04m\u001b[31;01mg\u001b[39;49;00m\u001b[04m\u001b[31;01m-\u001b[39;49;00m\u001b[04m\u001b[31;01mm\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\u001b[04m\u001b[31;01mi\u001b[39;49;00m\u001b[04m\u001b[31;01ms\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01m/\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01mm\u001b[39;49;00m\u001b[04m\u001b[31;01mp\u001b[39;49;00m\u001b[04m\u001b[31;01ml\u001b[39;49;00m\u001b[04m\u001b[31;01ma\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01ms\u001b[39;49;00m\u001b[04m\u001b[31;01m/\u001b[39;49;00m\u001b[04m\u001b[31;01mm\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\u001b[04m\u001b[31;01mi\u001b[39;49;00m\u001b[04m\u001b[31;01ms\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01m_\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01mf\u001b[39;49;00m\u001b[04m\u001b[31;01ms\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01mr\u001b[39;49;00m\u001b[04m\u001b[31;01mv\u001b[39;49;00m\u001b[04m\u001b[31;01mi\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\u001b[04m\u001b[31;01mg\u001b[39;49;00m\u001b[04m\u001b[31;01m_\u001b[39;49;00m\u001b[04m\u001b[31;01md\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01mp\u001b[39;49;00m\u001b[04m\u001b[31;01ml\u001b[39;49;00m\u001b[04m\u001b[31;01mo\u001b[39;49;00m\u001b[04m\u001b[31;01my\u001b[39;49;00m\u001b[04m\u001b[31;01mm\u001b[39;49;00m\u001b[04m\u001b[31;01me\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\u001b[04m\u001b[31;01mt\u001b[39;49;00m\u001b[04m\u001b[31;01m.\u001b[39;49;00m\u001b[04m\u001b[31;01mj\u001b[39;49;00m\u001b[04m\u001b[31;01ms\u001b[39;49;00m\u001b[04m\u001b[31;01mo\u001b[39;49;00m\u001b[04m\u001b[31;01mn\u001b[39;49;00m\r\n", + "{\r\n", + " \u001b[34;01m\"apiVersion\"\u001b[39;49;00m: \u001b[33m\"machinelearning.seldon.io/v1alpha2\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"kind\"\u001b[39;49;00m: \u001b[33m\"SeldonDeployment\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"metadata\"\u001b[39;49;00m: {\r\n", + " \u001b[34;01m\"labels\"\u001b[39;49;00m: {\r\n", + " \u001b[34;01m\"app\"\u001b[39;49;00m: \u001b[33m\"seldon\"\u001b[39;49;00m\r\n", + " },\r\n", + " \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"tfserving-mnist\"\u001b[39;49;00m,\r\n", + "\t\u001b[34;01m\"namespace\"\u001b[39;49;00m: \u001b[33m\"seldon\"\u001b[39;49;00m\t\r\n", + " },\r\n", + " \u001b[34;01m\"spec\"\u001b[39;49;00m: {\r\n", + " \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"tf-mnist\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"predictors\"\u001b[39;49;00m: [\r\n", + " {\r\n", + " \u001b[34;01m\"componentSpecs\"\u001b[39;49;00m: [{\r\n", + " \u001b[34;01m\"spec\"\u001b[39;49;00m: {\r\n", + " \u001b[34;01m\"containers\"\u001b[39;49;00m: [\r\n", + " {\r\n", + " \u001b[34;01m\"image\"\u001b[39;49;00m: \u001b[33m\"seldonio/tfserving-proxy:0.1\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"tfserving-proxy\"\u001b[39;49;00m\r\n", + " },\r\n", + "\t\t\t {\r\n", + "\t\t\t\t\u001b[34;01m\"args\"\u001b[39;49;00m: [\r\n", + "\t\t\t\t \u001b[33m\"/usr/bin/tensorflow_model_server\"\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[33m\"--port=8000\"\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[33m\"--model_name=mnist-model\"\u001b[39;49;00m,\r\n", + "\t\t\t\t \u001b[33m\"--model_base_path=gs://seldon-tfserving-store/mnist-model\"\u001b[39;49;00m\r\n", + "\t\t\t\t],\r\n", + "\t\t\t\t\u001b[34;01m\"image\"\u001b[39;49;00m: \u001b[33m\"gcr.io/kubeflow-images-public/tensorflow-serving-1.7:v20180604-0da89b8a\"\u001b[39;49;00m,\r\n", + "\t\t\t\t\u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"mnist-model\"\u001b[39;49;00m,\r\n", + "\t\t\t\t\u001b[34;01m\"ports\"\u001b[39;49;00m: [\r\n", + "\t\t\t\t {\r\n", + "\t\t\t\t\t\u001b[34;01m\"containerPort\"\u001b[39;49;00m: \u001b[34m8000\u001b[39;49;00m,\r\n", + "\t\t\t\t\t\u001b[34;01m\"protocol\"\u001b[39;49;00m: \u001b[33m\"TCP\"\u001b[39;49;00m\r\n", + "\t\t\t\t }\r\n", + "\t\t\t\t],\r\n", + "\t\t\t\t\u001b[34;01m\"resources\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t \u001b[34;01m\"limits\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t\t\u001b[34;01m\"cpu\"\u001b[39;49;00m: \u001b[33m\"4\"\u001b[39;49;00m,\r\n", + "\t\t\t\t\t\u001b[34;01m\"memory\"\u001b[39;49;00m: \u001b[33m\"4Gi\"\u001b[39;49;00m\r\n", + "\t\t\t\t },\r\n", + "\t\t\t\t \u001b[34;01m\"requests\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t\t\u001b[34;01m\"cpu\"\u001b[39;49;00m: \u001b[33m\"1\"\u001b[39;49;00m,\r\n", + "\t\t\t\t\t\u001b[34;01m\"memory\"\u001b[39;49;00m: \u001b[33m\"1Gi\"\u001b[39;49;00m\r\n", + "\t\t\t\t }\r\n", + "\t\t\t\t},\r\n", + "\t\t\t\t\u001b[34;01m\"securityContext\"\u001b[39;49;00m: {\r\n", + "\t\t\t\t \u001b[34;01m\"runAsUser\"\u001b[39;49;00m: \u001b[34m1000\u001b[39;49;00m\r\n", + "\t\t\t\t}\r\n", + "\t\t\t }\r\n", + "\t\t\t],\r\n", + "\t\t\t\u001b[34;01m\"terminationGracePeriodSeconds\"\u001b[39;49;00m: \u001b[34m1\u001b[39;49;00m\r\n", + "\t\t }\r\n", + "\t\t}],\r\n", + " \u001b[34;01m\"graph\"\u001b[39;49;00m: {\r\n", + "\t\t \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"tfserving-proxy\"\u001b[39;49;00m,\r\n", + "\t\t \u001b[34;01m\"endpoint\"\u001b[39;49;00m: { \u001b[34;01m\"type\"\u001b[39;49;00m : \u001b[33m\"REST\"\u001b[39;49;00m },\r\n", + "\t\t \u001b[34;01m\"type\"\u001b[39;49;00m: \u001b[33m\"MODEL\"\u001b[39;49;00m,\r\n", + "\t\t \u001b[34;01m\"children\"\u001b[39;49;00m: [],\r\n", + "\t\t \u001b[34;01m\"parameters\"\u001b[39;49;00m:\r\n", + "\t\t [\r\n", + "\t\t\t{\r\n", + "\t\t\t \u001b[34;01m\"name\"\u001b[39;49;00m:\u001b[33m\"grpc_endpoint\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"type\"\u001b[39;49;00m:\u001b[33m\"STRING\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"value\"\u001b[39;49;00m:\u001b[33m\"localhost:8000\"\u001b[39;49;00m\r\n", + "\t\t\t},\r\n", + "\t\t\t{\r\n", + "\t\t\t \u001b[34;01m\"name\"\u001b[39;49;00m:\u001b[33m\"model_name\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"type\"\u001b[39;49;00m:\u001b[33m\"STRING\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"value\"\u001b[39;49;00m:\u001b[33m\"mnist-model\"\u001b[39;49;00m\r\n", + "\t\t\t},\r\n", + "\t\t\t{\r\n", + "\t\t\t \u001b[34;01m\"name\"\u001b[39;49;00m:\u001b[33m\"model_output\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"type\"\u001b[39;49;00m:\u001b[33m\"STRING\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"value\"\u001b[39;49;00m:\u001b[33m\"scores\"\u001b[39;49;00m\r\n", + "\t\t\t},\r\n", + "\t\t\t{\r\n", + "\t\t\t \u001b[34;01m\"name\"\u001b[39;49;00m:\u001b[33m\"model_input\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"type\"\u001b[39;49;00m:\u001b[33m\"STRING\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"value\"\u001b[39;49;00m:\u001b[33m\"images\"\u001b[39;49;00m\r\n", + "\t\t\t},\r\n", + "\t\t\t{\r\n", + "\t\t\t \u001b[34;01m\"name\"\u001b[39;49;00m:\u001b[33m\"signature_name\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"type\"\u001b[39;49;00m:\u001b[33m\"STRING\"\u001b[39;49;00m,\r\n", + "\t\t\t \u001b[34;01m\"value\"\u001b[39;49;00m:\u001b[33m\"predict_images\"\u001b[39;49;00m\r\n", + "\t\t\t}\r\n", + "\t\t ]\r\n", + "\t\t},\r\n", + " \u001b[34;01m\"name\"\u001b[39;49;00m: \u001b[33m\"mnist-tfserving\"\u001b[39;49;00m,\r\n", + " \u001b[34;01m\"replicas\"\u001b[39;49;00m: \u001b[34m1\u001b[39;49;00m\r\n", + " }\r\n", + " ]\r\n", + " }\r\n", + "}\r\n" + ] + } + ], + "source": [ + "!pygmentize mnist.json" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Port forward Ambassador**\n", + "\n", + "```\n", + "kubectl port-forward $(kubectl get pods -n seldon -l service=ambassador -o jsonpath='{.items[0].metadata.name}') -n seldon 8003:8080\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAADfRJREFUeJzt3X+I1Pedx/HX+7w2MdsSNK6ppObWNNLcEji9TOTAcNGUlHgIWoxSIXUPits/DJxJkQQhmj+yEI60vfxxGLYXqSs2bdeaiwGTawgHuYZSnIRgont3BtlYT9ldSUmtEUri+/7Y75aN2fnMON/vzHfW9/MBMjPf9/fHm0le+52Zz3fmY+4uAPH8RdkNACgH4QeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/ENRftvNgCxYs8J6ennYeEghldHRU58+ft0bWzRV+M3tA0rOS5kj6N3d/OrV+T0+PqtVqnkMCSKhUKg2v2/TLfjObI+lfJa2R1Ctps5n1Nrs/AO2V5z3/Cknvu/spd/+TpJ9JWldMWwBaLU/4b5H0u2mPz2TLPsPM+s2sambViYmJHIcDUKQ84Z/pQ4XPfT/Y3QfdveLule7u7hyHA1CkPOE/I2nxtMdflXQ2XzsA2iVP+I9KWmpmS8zsi5K+LelwMW0BaLWmh/rc/RMze1jSf2hyqG+vux8vrDMALZVrnN/dj0g6UlAvANqIy3uBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IKtcsvWY2KumCpE8lfeLulSKawmedOnUqWX/uuedq1kZHR5PbHjx4MFm/++67k/W77rorWT906FDN2ty5c5PbfvDBB8l6Pe5es7Z169bktoODg7mOPRvkCn9mtbufL2A/ANqIl/1AUHnD75J+ZWZvmVl/EQ0BaI+8L/tXuvtZM1so6TUz+293f2P6CtkfhX5JuvXWW3MeDkBRcp353f1sdjsu6UVJK2ZYZ9DdK+5e6e7uznM4AAVqOvxm1mVmX566L+mbkt4rqjEArZXnZf/Nkl40s6n9/NTdXy2kKwAt13T43f2UpL8psJewxsfHk/Xbb789Wc/+ADel3rbVajVZP3r0aK79t2rbeur1HQFDfUBQhB8IivADQRF+ICjCDwRF+IGgivhWH3J66qmnym4hnCVLlpTdQuk48wNBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIzzd4Dh4eFc28+fP79mbdu2bcltN2zYkOvYzzzzTLI+MjJSs9bb25vr2CdOnEjWK5XavyQ/MDCQ69jXAs78QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4/zXgNSYdX9/a6dQ3LdvX7J+8eLFmrWurq5cx07tu4j9X+s48wNBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUHXH+c1sr6S1ksbd/c5s2XxJP5fUI2lU0iZ3/33r2pzd9u/fn6yPjY0l6+6erN9www1X3VO7tHKsnXH8fBo58/9E0gNXLHtc0uvuvlTS69ljALNI3fC7+xuSPrxi8TpJU5d27ZO0vuC+ALRYs+/5b3b3c5KU3S4sriUA7dDyD/zMrN/MqmZWnZiYaPXhADSo2fCPmdkiScpux2ut6O6D7l5x90p3d3eThwNQtGbDf1hSX3a/T9JLxbQDoF3qht/MXpD0G0lfN7MzZvZdSU9Lut/MTkq6P3sMYBapO87v7ptrlL5RcC9hmVmu7fksBc3gCj8gKMIPBEX4gaAIPxAU4QeCIvxAUPx09zVgaGioZu2RRx5pYyeYTTjzA0ERfiAowg8ERfiBoAg/EBThB4Ii/EBQjPO3wdq1a5P1m266KVk/f/58sn7y5MmateXLlye3zevRRx9N1leuXFmzdttttxXdDq4CZ34gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIpx/jaYN29esj5nzpxc+7906VLN2rFjx3Ltu56+vr5kfenSpTVrAwMDyW0ffPDBpnpCYzjzA0ERfiAowg8ERfiBoAg/EBThB4Ii/EBQdcf5zWyvpLWSxt39zmzZk5K2SpqaG3qnux9pVZOz3fDwcLJe7/v67l5kO4Wq11vqtwZ27NiR3Db1WwCStGjRomQdaY2c+X8i6YEZlv/I3Zdl/wg+MMvUDb+7vyHpwzb0AqCN8rznf9jMjpnZXjNLX78KoOM0G/49kr4maZmkc5J+UGtFM+s3s6qZVScmJmqtBqDNmgq/u4+5+6fuflnSjyWtSKw76O4Vd690d3c32yeAgjUVfjOb/jHrtyS9V0w7ANqlkaG+FyStkrTAzM5I2i1plZktk+SSRiV9r4U9AmiBuuF3980zLH6+Bb1cszZu3Jisv/LKK8n6gQMHkvXe3t6atQ0bNiS37enpSdbr2bJlS7JuZjVrp0+fTm47NDSUrD/22GPJOtK4wg8IivADQRF+ICjCDwRF+IGgCD8QFD/d3QF27dqVrNf7eex77723yHauyssvv5ysHzx4sOl9j4yMNL0t6uPMDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBMc7fAep9rTbv125b6YknnkjW84zznzhxIlm/ePFist7V1dX0sSPgzA8ERfiBoAg/EBThB4Ii/EBQhB8IivADQTHOj1zyjOPXk/pJcolx/Lw48wNBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUHXH+c1ssaQhSV+RdFnSoLs/a2bzJf1cUo+kUUmb3P33rWt19rp06VKy/tFHH+Xa/4033lizNnfu3Fz7rufIkSPJurvXrF133XXJbe+7776mekJjGjnzfyLp++7+15L+TtI2M+uV9Lik1919qaTXs8cAZom64Xf3c+7+dnb/gqQRSbdIWidpX7baPknrW9UkgOJd1Xt+M+uRtFzSbyXd7O7npMk/EJIWFt0cgNZpOPxm9iVJv5S03d3/cBXb9ZtZ1cyqExMTzfQIoAUaCr+ZfUGTwT/g7oeyxWNmtiirL5I0PtO27j7o7hV3r3R3dxfRM4AC1A2/mZmk5yWNuPsPp5UOS5qaPrZP0kvFtwegVRr5Su9KSd+R9K6ZvZMt2ynpaUm/MLPvSjotaWNrWpz99uzZk6zv2LEjWU8Nl0nSPffcU7O2evXq5LZ33HFHsv7qq68m69VqNVmfPHfMbNWqVcltt2zZkqwjn7rhd/dfS6r1X/AbxbYDoF24wg8IivADQRF+ICjCDwRF+IGgCD8QFD/d3Qb79+9v6f7ffPPNpmpl6+SpxyPgzA8ERfiBoAg/EBThB4Ii/EBQhB8IivADQTHO3wa7d+9O1h966KFk/eOPPy6ynbYaGBioWdu2bVsbO8GVOPMDQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCM87fB+vXpOUyHh4eT9U2bNiXru3btqlnr6+urWZOk48ePJ+v1fre/3hTg27dvr1m7/vrrk9uitTjzA0ERfiAowg8ERfiBoAg/EBThB4Ii/EBQdcf5zWyxpCFJX5F0WdKguz9rZk9K2ippIlt1p7sfaVWj17I1a9Yk6xcuXGjZsRcuXJisr169umXHRrkaucjnE0nfd/e3zezLkt4ys9ey2o/c/ZnWtQegVeqG393PSTqX3b9gZiOSbml1YwBa66re85tZj6Tlkn6bLXrYzI6Z2V4zm1djm34zq5pZdWJiYqZVAJSg4fCb2Zck/VLSdnf/g6Q9kr4maZkmXxn8YKbt3H3Q3SvuXunu7i6gZQBFaCj8ZvYFTQb/gLsfkiR3H3P3T939sqQfS1rRujYBFK1u+M3MJD0vacTdfzht+aJpq31L0nvFtwegVRr5tH+lpO9IetfM3smW7ZS02cyWSXJJo5K+15IOAbREI5/2/1qSzVBiTB+YxbjCDwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EJS5e/sOZjYh6YNpixZIOt+2Bq5Op/bWqX1J9NasInv7K3dv6Pfy2hr+zx3crOruldIaSOjU3jq1L4nemlVWb7zsB4Ii/EBQZYd/sOTjp3Rqb53al0RvzSqlt1Lf8wMoT9lnfgAlKSX8ZvaAmf2Pmb1vZo+X0UMtZjZqZu+a2TtmVi25l71mNm5m701bNt/MXjOzk9ntjNOkldTbk2b2f9lz946Z/UNJvS02s/80sxEzO25m/5QtL/W5S/RVyvPW9pf9ZjZH0v9Kul/SGUlHJW129xNtbaQGMxuVVHH30seEzezvJf1R0pC735kt+2dJH7r709kfznnu/liH9PakpD+WPXNzNqHMoukzS0taL+kfVeJzl+hrk0p43so486+Q9L67n3L3P0n6maR1JfTR8dz9DUkfXrF4naR92f19mvyfp+1q9NYR3P2cu7+d3b8gaWpm6VKfu0RfpSgj/LdI+t20x2fUWVN+u6RfmdlbZtZfdjMzuDmbNn1q+vSFJfdzpbozN7fTFTNLd8xz18yM10UrI/wzzf7TSUMOK939byWtkbQte3mLxjQ0c3O7zDCzdEdodsbropUR/jOSFk97/FVJZ0voY0bufja7HZf0ojpv9uGxqUlSs9vxkvv5s06auXmmmaXVAc9dJ814XUb4j0paamZLzOyLkr4t6XAJfXyOmXVlH8TIzLokfVOdN/vwYUl92f0+SS+V2MtndMrMzbVmllbJz12nzXhdykU+2VDGv0iaI2mvuw+0vYkZmNltmjzbS5OTmP60zN7M7AVJqzT5ra8xSbsl/bukX0i6VdJpSRvdve0fvNXobZUmX7r+eebmqffYbe7tHkn/JeldSZezxTs1+f66tOcu0ddmlfC8cYUfEBRX+AFBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCOr/AYMJ4yvRMtjWAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "200\n", + "{\n", + " \"meta\": {\n", + " \"puid\": \"faun2l1ftmp3r90cm6s5vnjo8a\",\n", + " \"tags\": {\n", + " },\n", + " \"routing\": {\n", + " }\n", + " },\n", + " \"data\": {\n", + " \"names\": [\"t:0\", \"t:1\", \"t:2\", \"t:3\", \"t:4\", \"t:5\", \"t:6\", \"t:7\", \"t:8\", \"t:9\"],\n", + " \"tensor\": {\n", + " \"shape\": [1, 10],\n", + " \"values\": [1.124886839534156E-4, 9.059235132891086E-12, 0.0028196186758577824, 0.005111460108309984, 2.414313166809734E-5, 0.6415375471115112, 1.74979015810095E-6, 1.2785285052885342E-9, 0.34967318177223206, 7.198737002909184E-4]\n", + " }\n", + " }\n", + "}\n" + ] + } + ], + "source": [ + "data = gen_mnist_data(mnist)\n", + "data = data.reshape((784))\n", + "rest_request_ambassador(\"tfserving-mnist\",endpoint=\"localhost:8003\",arr=data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Analytics and Load Test" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME: seldon-core-analytics\n", + "LAST DEPLOYED: Mon Sep 24 15:52:34 2018\n", + "NAMESPACE: seldon\n", + "STATUS: DEPLOYED\n", + "\n", + "RESOURCES:\n", + "==> v1/Service\n", + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "alertmanager ClusterIP 10.19.253.159 80/TCP 1s\n", + "grafana-prom NodePort 10.19.251.82 80:30690/TCP 1s\n", + "prometheus-node-exporter ClusterIP None 9100/TCP 1s\n", + "prometheus-seldon ClusterIP 10.19.245.167 80/TCP 1s\n", + "\n", + "==> v1/Secret\n", + "NAME TYPE DATA AGE\n", + "grafana-prom-secret Opaque 1 2s\n", + "\n", + "==> v1/ServiceAccount\n", + "NAME SECRETS AGE\n", + "prometheus 1 2s\n", + "\n", + "==> v1/Job\n", + "NAME DESIRED SUCCESSFUL AGE\n", + "grafana-prom-import-dashboards 1 0 1s\n", + "\n", + "==> v1beta1/Deployment\n", + "NAME DESIRED CURRENT UP-TO-DATE AVAILABLE AGE\n", + "alertmanager-deployment 1 1 1 0 1s\n", + "grafana-prom-deployment 1 1 1 0 1s\n", + "prometheus-deployment 1 1 1 0 1s\n", + "\n", + "==> v1beta1/DaemonSet\n", + "NAME DESIRED CURRENT READY UP-TO-DATE AVAILABLE NODE SELECTOR AGE\n", + "prometheus-node-exporter 1 1 0 1 0 1s\n", + "\n", + "==> v1/Pod(related)\n", + "NAME READY STATUS RESTARTS AGE\n", + "grafana-prom-import-dashboards-wzl55 0/1 ContainerCreating 0 1s\n", + "alertmanager-deployment-557b99ccf8-m27mm 0/1 ContainerCreating 0 1s\n", + "grafana-prom-deployment-dd84b7788-gmdkc 0/1 ContainerCreating 0 1s\n", + "prometheus-node-exporter-pnv9k 0/1 ContainerCreating 0 1s\n", + "prometheus-deployment-78dd89b44f-7t62q 0/1 ContainerCreating 0 1s\n", + "\n", + "==> v1/ConfigMap\n", + "NAME DATA AGE\n", + "alertmanager-server-conf 1 2s\n", + "grafana-import-dashboards 7 2s\n", + "prometheus-rules 4 2s\n", + "prometheus-server-conf 1 2s\n", + "\n", + "==> v1beta1/ClusterRole\n", + "NAME AGE\n", + "prometheus 2s\n", + "\n", + "==> v1beta1/ClusterRoleBinding\n", + "NAME AGE\n", + "prometheus 2s\n", + "\n", + "\n", + "NOTES:\n", + "NOTES: TODO\n", + "\n", + "\n" + ] + } + ], + "source": [ + "!helm install ../../../helm-charts/seldon-core-analytics --name seldon-core-analytics \\\n", + " --set grafana_prom_admin_password=password \\\n", + " --set persistence.enabled=false \\\n", + " --namespace seldon" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "node \"gke-cluster-2-default-pool-54fbe785-wmcb\" labeled\r\n" + ] + } + ], + "source": [ + "!kubectl label nodes $(kubectl get nodes -o jsonpath='{.items[0].metadata.name}') role=locust" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME: loadtest\n", + "LAST DEPLOYED: Mon Sep 24 15:52:43 2018\n", + "NAMESPACE: seldon\n", + "STATUS: DEPLOYED\n", + "\n", + "RESOURCES:\n", + "==> v1/ReplicationController\n", + "NAME DESIRED CURRENT READY AGE\n", + "locust-slave-1 1 1 0 0s\n", + "locust-master-1 1 1 0 0s\n", + "\n", + "==> v1/Service\n", + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "locust-master-1 NodePort 10.19.255.204 5557:32342/TCP,5558:30493/TCP,8089:32034/TCP 0s\n", + "\n", + "==> v1/Pod(related)\n", + "NAME READY STATUS RESTARTS AGE\n", + "locust-slave-1-zncg4 0/1 ContainerCreating 0 0s\n", + "locust-master-1-9z8gm 0/1 ContainerCreating 0 0s\n", + "\n", + "\n" + ] + } + ], + "source": [ + "!helm install seldon-core-loadtesting --name loadtest \\\n", + " --namespace seldon \\\n", + " --repo https://storage.googleapis.com/seldon-charts \\\n", + " --set locust.script=mnist_rest_locust.py \\\n", + " --set locust.host=http://tf-mnist:8000 \\\n", + " --set oauth.enabled=false \\\n", + " --set locust.hatchRate=1 \\\n", + " --set locust.clients=1 \\\n", + " --set loadtest.sendFeedback=1 \\\n", + " --set locust.minWait=0 \\\n", + " --set locust.maxWait=0 \\\n", + " --set replicaCount=1 \\\n", + " --set data.size=784\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You should port-foward the grafana dashboard\n", + "\n", + "```\n", + "kubectl port-forward $(kubectl get pods -n seldon -l app=grafana-prom-server -o jsonpath='{.items[0].metadata.name}') -n seldon 3000:3000\n", + "```\n", + "\n", + "You can then view an analytics dashboard inside the cluster at http://localhost:3000/dashboard/db/prediction-analytics?refresh=5s&orgId=1. Your IP address may be different. get it via minikube ip. Login with:\n", + "\n", + " Username : admin\n", + "\n", + " password : password (as set when starting seldon-core-analytics above)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/models/tfserving-mnist/tfserving-mnist/Chart.yaml b/examples/models/tfserving-mnist/tfserving-mnist/Chart.yaml new file mode 100644 index 0000000000..e746d40542 --- /dev/null +++ b/examples/models/tfserving-mnist/tfserving-mnist/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +description: Seldon MNIST TFServing Example +name: tfserving-mnist +sources: +- https://github.com/SeldonIO/seldon-core +version: 0.1 diff --git a/examples/models/tfserving-mnist/tfserving-mnist/templates/mnist_tfserving_deployment.json b/examples/models/tfserving-mnist/tfserving-mnist/templates/mnist_tfserving_deployment.json new file mode 100644 index 0000000000..6e4637c3ff --- /dev/null +++ b/examples/models/tfserving-mnist/tfserving-mnist/templates/mnist_tfserving_deployment.json @@ -0,0 +1,94 @@ +{ + "apiVersion": "machinelearning.seldon.io/v1alpha2", + "kind": "SeldonDeployment", + "metadata": { + "labels": { + "app": "seldon" + }, + "name": "tfserving-mnist", + "namespace": "{{ .Release.Namespace }}" + }, + "spec": { + "name": "tf-mnist", + "predictors": [ + { + "componentSpecs": [{ + "spec": { + "containers": [ + { + "image": "seldonio/tfserving-proxy:0.1", + "name": "tfserving-proxy" + }, + { + "args": [ + "/usr/bin/tensorflow_model_server", + "--port={{ .Values.tfserving.port }}", + "--model_name=mnist-model", + "--model_base_path={{ .Values.tfserving.model_base_path }}" + ], + "image": "gcr.io/kubeflow-images-public/tensorflow-serving-1.7:v20180604-0da89b8a", + "name": "mnist-model", + "ports": [ + { + "containerPort": {{ .Values.tfserving.port }}, + "protocol": "TCP" + } + ], + "resources": { + "limits": { + "cpu": "4", + "memory": "4Gi" + }, + "requests": { + "cpu": "1", + "memory": "1Gi" + } + }, + "securityContext": { + "runAsUser": 1000 + } + } + ], + "terminationGracePeriodSeconds": 1 + } + }], + "graph": { + "name": "tfserving-proxy", + "endpoint": { "type" : "REST" }, + "type": "MODEL", + "children": [], + "parameters": + [ + { + "name":"grpc_endpoint", + "type":"STRING", + "value":"localhost:{{ .Values.tfserving.port }}" + }, + { + "name":"model_name", + "type":"STRING", + "value":"mnist-model" + }, + { + "name":"model_output", + "type":"STRING", + "value":"scores" + }, + { + "name":"model_input", + "type":"STRING", + "value":"images" + }, + { + "name":"signature_name", + "type":"STRING", + "value":"predict_images" + } + ] + }, + "name": "mnist-tfserving", + "replicas": 1 + } + ] + } +} diff --git a/examples/models/tfserving-mnist/tfserving-mnist/values.yaml b/examples/models/tfserving-mnist/tfserving-mnist/values.yaml new file mode 100644 index 0000000000..2049eb89ec --- /dev/null +++ b/examples/models/tfserving-mnist/tfserving-mnist/values.yaml @@ -0,0 +1,4 @@ +tfserving: + model_base_path: gs://seldon-tfserving-store/mnist-model + port: 8000 + diff --git a/examples/routers/epsilon_greedy/EpsilonGreedy.py b/examples/routers/epsilon_greedy/EpsilonGreedy.py index b1d25bc893..6cafd763bb 100644 --- a/examples/routers/epsilon_greedy/EpsilonGreedy.py +++ b/examples/routers/epsilon_greedy/EpsilonGreedy.py @@ -1,7 +1,7 @@ import random import numpy as np -__version__ = "v1.1" +__version__ = "v1.2" def n_success_failures(features,reward): n_predictions = features.shape[0] @@ -10,9 +10,9 @@ def n_success_failures(features,reward): return n_success, n_failures class EpsilonGreedy(object): - + def __init__(self,n_branches=None,epsilon=0.1,verbose=False): - print "Starting Epsilon Greedy Microservice, version {}".format(__version__) + print("Starting Epsilon Greedy Microservice, version {}".format(__version__)) if n_branches is None: raise Exception("n_branches parameter must be given") self.verbose = verbose @@ -22,29 +22,29 @@ def __init__(self,n_branches=None,epsilon=0.1,verbose=False): self.branches_tries = [0 for _ in range(n_branches)] self.n_branches = n_branches if self.verbose: - print "Router initialised" - print "# branches:",self.n_branches - print "Epsilon:",self.epsilon - print - + print("Router initialised") + print("# branches:",self.n_branches) + print("Epsilon:",self.epsilon) + print() + def route(self,features,feature_names): x = random.random() best_branch = self.best_branch other_branches = [i for i in range(self.n_branches) if i!=best_branch] selected_branch = best_branch if x>self.epsilon else random.choice(other_branches) if self.verbose: - print "Routing" - print "Current best branch:",best_branch - print "Selected branch:",selected_branch - print + print("Routing") + print("Current best branch:",best_branch) + print("Selected branch:",selected_branch) + print() return selected_branch - + def send_feedback(self,features,feature_names,routing,reward,truth): if self.verbose: - print "Training" - print "Prev success #", self.branches_success - print "Prev tries #", self.branches_tries - print "Prev best branch:", self.best_branch + print("Training") + print("Prev success #", self.branches_success) + print("Prev tries #", self.branches_tries) + print("Prev best branch:", self.best_branch) n_success, n_failures = n_success_failures(features,reward) self.branches_success[routing] += n_success self.branches_tries[routing] += n_success + n_failures @@ -55,8 +55,7 @@ def send_feedback(self,features,feature_names,routing,reward,truth): ] self.best_branch = np.argmax(perfs) if self.verbose: - print "New success #", self.branches_success - print "New tries #", self.branches_tries - print "New best branch:",self.best_branch - print - + print("New success #", self.branches_success) + print("New tries #", self.branches_tries) + print("New best branch:",self.best_branch) + print() diff --git a/examples/routers/epsilon_greedy/README.md b/examples/routers/epsilon_greedy/README.md index 3478efd6fd..549ac73f17 100644 --- a/examples/routers/epsilon_greedy/README.md +++ b/examples/routers/epsilon_greedy/README.md @@ -1,10 +1,22 @@ # Epsilon Greedy Router +## Description + +An epsilon-greedy router implements a [multi-armed bandit strategy](https://en.wikipedia.org/wiki/Multi-armed_bandit#Semi-uniform_strategies) in which, when presented with *n* models to make predictions, the currently +best performing model is selected with probability *1-e* while a random model is selected with probability *e*. +This strategy ensures sending traffic to the best performing model most of the time (exploitation) while allowing for +some evaluation of other models (exploration). A typical parameter value could be *e=0.1*, but this will depend on the +desired trade-off between exploration and exploitation. + +Note that in this implementation the parameter value *e* is static, but a related strategy called *epsilon-decreasing* +would see the value of *e* decrease as the number of predictions increases, resulting in a highly explorative behaviour +at the start and increasingly exploitative behaviour as time goes on. + ## Wrap using s2i ```bash -s2i build . seldonio/seldon-core-s2i-python2 egreedy-router +s2i build . seldonio/seldon-core-s2i-python3 egreedy-router ``` ## Smoke Test @@ -20,4 +32,8 @@ Send a data request. ```bash data='{"data":{"names":["a","b"],"ndarray":[[1.0,2.0]]}}' curl -d "json=${data}" http://0.0.0.0:5000/route -``` \ No newline at end of file +``` + +## Running on Seldon +An end-to-end example deploying an epsilon-greedy router to route traffic to 3 models in parallel is available [here]( +https://github.com/SeldonIO/seldon-core/blob/master/notebooks/epsilon_greedy_gcp.ipynb). diff --git a/examples/routers/epsilon_greedy/requirements.txt b/examples/routers/epsilon_greedy/requirements.txt index 0193419ad2..678d0e5d52 100644 --- a/examples/routers/epsilon_greedy/requirements.txt +++ b/examples/routers/epsilon_greedy/requirements.txt @@ -1 +1 @@ -numpy==1.11.2 +numpy==1.14.3 diff --git a/integrations/nvidia-inference-server/.s2i/environment b/integrations/nvidia-inference-server/.s2i/environment new file mode 100644 index 0000000000..00aba0e6f0 --- /dev/null +++ b/integrations/nvidia-inference-server/.s2i/environment @@ -0,0 +1,4 @@ +MODEL_NAME=TRTProxy +API_TYPE=REST +SERVICE_TYPE=MODEL +PERSISTENCE=0 diff --git a/integrations/nvidia-inference-server/Makefile b/integrations/nvidia-inference-server/Makefile new file mode 100644 index 0000000000..cac653c292 --- /dev/null +++ b/integrations/nvidia-inference-server/Makefile @@ -0,0 +1,12 @@ +IMAGE_VERSION=0.1 +IMAGE_NAME = docker.io/seldonio/nvidia-inference-server-proxy + +SELDON_CORE_DIR=../../.. + +.PHONY: build +build: + s2i build . seldonio/seldon-core-s2i-python3-tensorrt:0.1 $(IMAGE_NAME):$(IMAGE_VERSION) + +push_to_dockerhub: + docker push $(IMAGE_NAME):$(IMAGE_VERSION) + diff --git a/integrations/nvidia-inference-server/README.md b/integrations/nvidia-inference-server/README.md new file mode 100644 index 0000000000..2ab796bb45 --- /dev/null +++ b/integrations/nvidia-inference-server/README.md @@ -0,0 +1,167 @@ +# NVIDIA Inference Server Proxy + +The NVIDIA Inference Server Proxy provides a proxy to forward Seldon prediction requests to a running [NVIDIA Inference Server](https://docs.nvidia.com/deeplearning/sdk/inference-user-guide/index.html). + +## Configuration + +The Nvidia Proxy takes several parameters: + + | Parameter | Type | Value | Example | + |-----------|------|-------|---------| + | url | STRING | URL to Nvidia Inference Server endpoint | 127.0.0.1:8000 | + | model_name | STRING | model name | tensorrt_mnist | + | protocol | STRING | API protocol to use: HTTP or GRPC | HTTP | + + +An example SeldonDeployment Kubernetes resource taken from the MNIST demo is shown below to illustrate how these parameters are set. The graph consists of three containers + + 1. A Seldon transformer to do feature transformations on the raw input. + 1. A NVIDIA Inference Server Model Proxy. + 1. The NVIDIA Inference Server loaded with a model. + +![MNIST Example](./mnist-graph.png) + + +``` +{ + "apiVersion": "machinelearning.seldon.io/v1alpha2", + "kind": "SeldonDeployment", + "metadata": { + "labels": { + "app": "seldon" + }, + "name": "nvidia-mnist", + "namespace": "kubeflow" + }, + "spec": { + "name": "caffe2-mnist", + "predictors": [ + { + "componentSpecs": [{ + "spec": { + "containers": [ + { + "image": "seldonio/mnist-caffe2-transformer:0.1", + "name": "mnist-transformer" + }, + { + "image": "seldonio/nvidia-inference-server-proxy:0.1", + "name": "nvidia-proxy" + }, + { + "args": [ + "--model-store=gs://seldon-inference-server-model-store" + ], + "command": [ + "inference_server" + ], + "image": "nvcr.io/nvidia/inferenceserver:18.08.1-py2", + "livenessProbe": { + "failureThreshold": 3, + "handler":{ + "httpGet": { + "path": "/api/health/live", + "port": 8000, + "scheme": "HTTP" + } + }, + "initialDelaySeconds": 5, + "periodSeconds": 5, + "successThreshold": 1, + "timeoutSeconds": 1 + }, + "name": "inference-server", + "ports": [ + { + "containerPort": 8000, + "protocol": "TCP" + }, + { + "containerPort": 8001, + "protocol": "TCP" + }, + { + "containerPort": 8002, + "protocol": "TCP" + } + ], + "readinessProbe": { + "failureThreshold": 3, + "handler":{ + "httpGet": { + "path": "/api/health/ready", + "port": 8000, + "scheme": "HTTP" + } + }, + "initialDelaySeconds": 5, + "periodSeconds": 5, + "successThreshold": 1, + "timeoutSeconds": 1 + }, + "resources": { + "limits": { + "nvidia.com/gpu": "1" + }, + "requests": { + "cpu": "100m", + "nvidia.com/gpu": "1" + } + }, + "securityContext": { + "runAsUser": 1000 + } + } + ], + "terminationGracePeriodSeconds": 1, + "imagePullSecrets": [ + { + "name": "ngc" + } + ] + } + }], + "graph": { + "name": "mnist-transformer", + "endpoint": { "type" : "REST" }, + "type": "TRANSFORMER", + "children": [ + { + "name": "nvidia-proxy", + "endpoint": { "type" : "REST" }, + "type": "MODEL", + "children": [], + "parameters": + [ + { + "name":"url", + "type":"STRING", + "value":"127.0.0.1:8000" + }, + { + "name":"model_name", + "type":"STRING", + "value":"tensorrt_mnist" + }, + { + "name":"protocol", + "type":"STRING", + "value":"HTTP" + } + ] + } + ] + }, + "name": "mnist-nvidia", + "replicas": 1 + } + ] + } +} + + +``` + +Examples: + + * [MNIST with Nvidia Inference Server](../../examples/models/nvidia-mnist/nvidia_mnist.ipynb). diff --git a/integrations/nvidia-inference-server/TRTProxy.py b/integrations/nvidia-inference-server/TRTProxy.py new file mode 100644 index 0000000000..3b68c4f70c --- /dev/null +++ b/integrations/nvidia-inference-server/TRTProxy.py @@ -0,0 +1,81 @@ +from inference_server.api import * +import inference_server.api.model_config_pb2 as model_config + + +def model_dtype_to_np(model_dtype): + ''' + Helper function from https://github.com/NVIDIA/dl-inference-server/blob/18.08/src/clients/python/image_client.py + ''' + if model_dtype == model_config.TYPE_BOOL: + return np.bool + elif model_dtype == model_config.TYPE_INT8: + return np.int8 + elif model_dtype == model_config.TYPE_INT16: + return np.int16 + elif model_dtype == model_config.TYPE_INT32: + return np.int32 + elif model_dtype == model_config.TYPE_INT64: + return np.int64 + elif model_dtype == model_config.TYPE_UINT8: + return np.uint8 + elif model_dtype == model_config.TYPE_UINT16: + return np.uint16 + elif model_dtype == model_config.TYPE_FP16: + return np.float16 + elif model_dtype == model_config.TYPE_FP32: + return np.float32 + elif model_dtype == model_config.TYPE_FP64: + return np.float64 + return None + +def parse_model(url, protocol, model_name, verbose=False): + ctx = ServerStatusContext(url, protocol, model_name, verbose) + server_status = ctx.get_server_status() + + if model_name not in server_status.model_status: + raise Exception("unable to find model:"+model_name) + + status = server_status.model_status[model_name] + config = status.config + + input = config.input[0] + output = config.output[0] + + return (input.name, output.name, model_dtype_to_np(input.data_type), input.dims) + + +''' +A basic tensorflow serving proxy +''' +class TRTProxy(object): + + def __init__(self,url=None,protocol="HTTP",model_name=None,model_version=1): + print("URL:",url) + self.url = url + self.protocol_id = ProtocolType.from_str(protocol) + self.model_version = model_version + if protocol == "GRPC": + self.grpc = True + channel = grpc.insecure_channel(url) + #self.stub = prediction_service_pb2_grpc.PredictionServiceStub(channel) + else: + self.grpc = False + self.model_name = model_name + self.input_name, self.output_name, self.dtype, self.input_dims = parse_model(url, self.protocol_id, model_name, False) + self.ctx = InferContext(self.url, self.protocol_id,self.model_name, self.model_version, False) + + + + def predict(self,X,features_names): + X = X.astype(self.dtype) + if self.grpc: + print("not implemented") + else: + if len(X.shape) == len(self.input_dims): + X = [X] + results = self.ctx.run( + { self.input_name : X }, + { self.output_name : InferContext.ResultFormat.RAW }, + 1) + return results[self.output_name] + return [] diff --git a/integrations/nvidia-inference-server/mnist-graph.png b/integrations/nvidia-inference-server/mnist-graph.png new file mode 100644 index 0000000000..d78dab25fe Binary files /dev/null and b/integrations/nvidia-inference-server/mnist-graph.png differ diff --git a/integrations/tfserving/.s2i/environment b/integrations/tfserving/.s2i/environment new file mode 100644 index 0000000000..faf6e43bf8 --- /dev/null +++ b/integrations/tfserving/.s2i/environment @@ -0,0 +1,4 @@ +MODEL_NAME=TfServingProxy +API_TYPE=REST +SERVICE_TYPE=MODEL +PERSISTENCE=0 diff --git a/integrations/tfserving/Makefile b/integrations/tfserving/Makefile new file mode 100644 index 0000000000..3565a1b4f6 --- /dev/null +++ b/integrations/tfserving/Makefile @@ -0,0 +1,12 @@ +IMAGE_VERSION=0.1 +IMAGE_NAME = docker.io/seldonio/tfserving-proxy + +SELDON_CORE_DIR=../../.. + +.PHONY: build +build: + s2i build . seldonio/seldon-core-s2i-python3:0.2 $(IMAGE_NAME):$(IMAGE_VERSION) + +push_to_dockerhub: + docker push $(IMAGE_NAME):$(IMAGE_VERSION) + diff --git a/integrations/tfserving/README.md b/integrations/tfserving/README.md new file mode 100644 index 0000000000..11a82395a2 --- /dev/null +++ b/integrations/tfserving/README.md @@ -0,0 +1,122 @@ +# TensorFlow Serving Proxy + +The TensorFlow Serving Proxy provides a proxy to forward Seldon prediction requests to a running [TenserFlow Serving](https://www.tensorflow.org/serving/) server. + +## Configuration + +The tensorflow proxy takes several parameters: + + | Parameter | Type | Value | Example | + |-----------|------|-------|---------| + | rest_endpoint | STRING | URL of server HTTP endpoint | http://0.0.0.0:8000 | + | grpc_endpoint | STRING | host and port of gRPC endpoint | 0.0.0.0:8001 | + | model_name | STRING | model name | mnist-model | + | signature_name | STRING | model signature name | predict_images | + | model_input | STRING | model input name | images | + | model_output | STRING | model output name | scores | + +An example resource with the proxy and a Tensorflow Serving server is shown below. + + +``` +{ + "apiVersion": "machinelearning.seldon.io/v1alpha2", + "kind": "SeldonDeployment", + "metadata": { + "labels": { + "app": "seldon" + }, + "name": "tfserving-mnist", + "namespace": "seldon" + }, + "spec": { + "name": "tf-mnist", + "predictors": [ + { + "componentSpecs": [{ + "spec": { + "containers": [ + { + "image": "seldonio/tfserving-proxy:0.1", + "name": "tfserving-proxy" + }, + { + "args": [ + "/usr/bin/tensorflow_model_server", + "--port=8000", + "--model_name=mnist-model", + "--model_base_path=gs://seldon-tfserving-store/mnist-model" + ], + "image": "gcr.io/kubeflow-images-public/tensorflow-serving-1.7:v20180604-0da89b8a", + "name": "mnist-model", + "ports": [ + { + "containerPort": 8000, + "protocol": "TCP" + } + ], + "resources": { + "limits": { + "cpu": "4", + "memory": "4Gi" + }, + "requests": { + "cpu": "1", + "memory": "1Gi" + } + }, + "securityContext": { + "runAsUser": 1000 + } + } + ], + "terminationGracePeriodSeconds": 1 + } + }], + "graph": { + "name": "tfserving-proxy", + "endpoint": { "type" : "REST" }, + "type": "MODEL", + "children": [], + "parameters": + [ + { + "name":"grpc_endpoint", + "type":"STRING", + "value":"localhost:8000" + }, + { + "name":"model_name", + "type":"STRING", + "value":"mnist-model" + }, + { + "name":"model_output", + "type":"STRING", + "value":"scores" + }, + { + "name":"model_input", + "type":"STRING", + "value":"images" + }, + { + "name":"signature_name", + "type":"STRING", + "value":"predict_images" + } + ] + }, + "name": "mnist-tfserving", + "replicas": 1 + } + ] + } +} + + +``` + +Examples: + + * [MNIST with TensorFlow Serving Proxy](../../examples/models/tfserving-mnist/tfserving-mnist.ipynb). diff --git a/integrations/tfserving/TfServingProxy.py b/integrations/tfserving/TfServingProxy.py new file mode 100644 index 0000000000..15e88a252f --- /dev/null +++ b/integrations/tfserving/TfServingProxy.py @@ -0,0 +1,69 @@ +import grpc +import numpy +import tensorflow as tf + +from tensorflow.python.saved_model import signature_constants +from tensorflow_serving.apis import predict_pb2 +from tensorflow_serving.apis import prediction_service_pb2_grpc + +import requests +import json + +class TensorflowServerError(Exception): + + def __init__(self, message): + self.message = message + +''' +A basic tensorflow serving proxy +''' +class TfServingProxy(object): + + def __init__(self,rest_endpoint=None,grpc_endpoint=None,model_name=None,signature_name=None,model_input=None,model_output=None): + print("rest_endpoint:",rest_endpoint) + print("grpc_endpoint:",grpc_endpoint) + if not grpc_endpoint is None: + self.grpc = True + channel = grpc.insecure_channel(grpc_endpoint) + self.stub = prediction_service_pb2_grpc.PredictionServiceStub(channel) + else: + self.grpc = False + self.rest_endpoint = rest_endpoint + self.model_name = model_name + if signature_name is None: + self.signature_name = signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY + else: + self.signature_name = signature_name + self.model_input = model_input + self.model_output = model_output + + def predict(self,X,features_names): + if self.grpc: + request = predict_pb2.PredictRequest() + request.model_spec.name = self.model_name + request.model_spec.signature_name = self.signature_name + request.inputs[self.model_input].CopyFrom(tf.contrib.util.make_tensor_proto(X.tolist(), shape=X.shape)) + print(request) + result = self.stub.Predict(request) + print(result) + response = numpy.array(result.outputs[self.model_output].float_val) + if len(response.shape) == 1: + response = numpy.expand_dims(response, axis=0) + return response + else: + print(self.rest_endpoint) + data = {"instances":X.tolist()} + if not self.signature_name is None: + data["signature_name"] = self.signature_name + print(data) + response = requests.post( + self.rest_endpoint, + data = json.dumps(data)) + if response.status_code == 200: + result = numpy.array(response.json()["predictions"]) + if len(result.shape) == 1: + result = numpy.expand_dims(result, axis=0) + return result + else: + print("Error from server:",response) + raise TensorflowServerError(response.json()) diff --git a/integrations/tfserving/requirements.txt b/integrations/tfserving/requirements.txt new file mode 100644 index 0000000000..78b3517253 --- /dev/null +++ b/integrations/tfserving/requirements.txt @@ -0,0 +1,3 @@ +tensorflow==1.10.1 +tensorflow-serving-api==1.10.1 +requests diff --git a/notebooks/kubectl_demo_minikube_rbac.ipynb b/notebooks/kubectl_demo_minikube_rbac.ipynb index 84fef8574e..a2381ff3fc 100644 --- a/notebooks/kubectl_demo_minikube_rbac.ipynb +++ b/notebooks/kubectl_demo_minikube_rbac.ipynb @@ -34,6 +34,12 @@ "Your start command would then look like:\n", "```\n", "minikube start --vm-driver kvm2 --memory 4096 --feature-gates=CustomResourceValidation=true --extra-config=apiserver.Authorization.Mode=RBAC\n", + "```\n", + "**2018-09-24** : Alternatively, you can try this command with a new formatting of --extra-config parameter otherwise your start command will hang forever (see https://github.com/kubernetes/minikube/issues/2798#issuecomment-420402313 )\n", + "\n", + "Your start command would then look like:\n", + "```\n", + "minikube start --vm-driver kvm2 --memory 4096 --feature-gates=CustomResourceValidation=true --extra-config=apiserver.authorization-mode=RBAC\n", "```" ] }, diff --git a/notebooks/visualizer.py b/notebooks/visualizer.py index 405bbb2750..9792a98ab1 100644 --- a/notebooks/visualizer.py +++ b/notebooks/visualizer.py @@ -21,16 +21,11 @@ def get_graph(filename,predictor=0): deployment = json.load(open(filename,'r')) predictors = deployment.get("spec").get("predictors") dot = graphviz.Digraph() - - with dot.subgraph(name="cluster_0") as pdot: - graph = predictors[0].get("graph") - _populate_graph(pdot, graph, suffix='0') - pdot.attr(label="predictor") - - if len(predictors)>1: - with dot.subgraph(name="cluster_1") as cdot: - graph = predictors[1].get("graph") - _populate_graph(cdot, graph, suffix='1') - cdot.attr(label="canary") - + + for idx in range(len(predictors)): + with dot.subgraph(name="cluster_"+str(idx)) as pdot: + graph = predictors[idx].get("graph") + _populate_graph(pdot, graph, suffix=str(idx)) + pdot.attr(label="predictor-"+str(idx)) return dot + diff --git a/readme.md b/readme.md index 386c5c9cdc..e0d06c4410 100644 --- a/readme.md +++ b/readme.md @@ -114,7 +114,8 @@ Seldon-core allows various types of components to be built and plugged into the * [Train and deploy a PyTorch MNIST classififer using FfDL and Seldon.](https://github.com/IBM/FfDL/blob/master/community/FfDL-Seldon/pytorch-model/README.md) * [Istio and Seldon](./docs/istio.md) * [Canary deployemts using Istio and Seldon.](examples/istio/canary_update/canary.ipynb). - + * [NVIDIA TensorRT and DL Inference Server](./integrations/nvidia-inference-server/README.md) + * [Tensorflow Serving](./integrations/tfserving/README.md) ## Install @@ -190,7 +191,8 @@ Three steps: | [Seldon Java Runtime Wrapper for S2I](docs/wrappers/java.md) | [seldonio/seldon-core-s2i-java-runtime](https://hub.docker.com/r/seldonio/seldon-core-s2i-java-runtime/tags/) | 0.1 | | | [Seldon R Wrapper for S2I](docs/wrappers/r.md) | [seldonio/seldon-core-s2i-r](https://hub.docker.com/r/seldonio/seldon-core-s2i-r/tags/) | 0.1 | | | [Seldon NodeJS Wrapper for S2I](docs/wrappers/nodejs.md) | [seldonio/seldon-core-s2i-nodejs](https://hub.docker.com/r/seldonio/seldon-core-s2i-nodejs/tags/) | 0.1 | | - +| [Seldon Tensorflow Serving proxy](integrations/tfserving/README.md) | [seldonio/tfserving-proxy](https://hub.docker.com/r/seldonio/tfserving-proxy/tags/) | 0.1 | +| [Seldon NVIDIA inference server proxy](integrations/nvidia-inference-server/README.md) | [seldonio/nvidia-inference-server-proxy](https://hub.docker.com/r/seldonio/nvidia-inference-server-proxy/tags/) | 0.1 | #### Java Packages | Description | Package | Version | diff --git a/seldon-core/seldon-core/core.libsonnet b/seldon-core/seldon-core/core.libsonnet index 767c574d98..552ff6bc87 100644 --- a/seldon-core/seldon-core/core.libsonnet +++ b/seldon-core/seldon-core/core.libsonnet @@ -96,6 +96,7 @@ local getEnvNotRedis(x) = x.name != "SELDON_CLUSTER_MANAGER_REDIS_HOST"; { name: "JAVA_OPTS", value: javaOpts }, { name: "SPRING_OPTS", value: springOpts }, { name: "ENGINE_CONTAINER_IMAGE_AND_VERSION", value: engineImage }, + { name: "ENGINE_CONTAINER_IMAGE_PULL_POLICY", value: "IfNotPresent" }, { name: "SELDON_CLUSTER_MANAGER_REDIS_HOST", value: name+"-redis" }, { name: "SELDON_CLUSTER_MANAGER_POD_NAMESPACE", valueFrom: {fieldRef:{apiVersion: "v1",fieldPath: "metadata.namespace"}}}, ]; diff --git a/seldon-core/seldon-core/json/template.json b/seldon-core/seldon-core/json/template.json index 38e3614b2d..7f4f67b4fc 100644 --- a/seldon-core/seldon-core/json/template.json +++ b/seldon-core/seldon-core/json/template.json @@ -8,16 +8,16 @@ "apiVersion": "v1", "metadata": { "name": "seldon", - "namespace": "seldon", + "namespace": "default", "creationTimestamp": null } }, { "kind": "Role", - "apiVersion": "rbac.authorization.k8s.io/v1beta1", + "apiVersion": "rbac.authorization.k8s.io/v1", "metadata": { "name": "seldon-local", - "namespace": "seldon", + "namespace": "default", "creationTimestamp": null }, "rules": [ @@ -48,7 +48,7 @@ }, { "kind": "ClusterRole", - "apiVersion": "rbac.authorization.k8s.io/v1beta1", + "apiVersion": "rbac.authorization.k8s.io/v1", "metadata": { "name": "seldon-crd", "creationTimestamp": null @@ -72,14 +72,14 @@ "apiVersion": "rbac.authorization.k8s.io/v1", "metadata": { "name": "seldon", - "namespace": "seldon", + "namespace": "default", "creationTimestamp": null }, "subjects": [ { "kind": "ServiceAccount", "name": "seldon", - "namespace": "seldon" + "namespace": "default" } ], "roleRef": { @@ -99,7 +99,7 @@ { "kind": "ServiceAccount", "name": "seldon", - "namespace": "seldon" + "namespace": "default" } ], "roleRef": { @@ -110,7 +110,7 @@ }, { "kind": "Role", - "apiVersion": "rbac.authorization.k8s.io/v1beta1", + "apiVersion": "rbac.authorization.k8s.io/v1", "metadata": { "name": "ambassador", "creationTimestamp": null @@ -162,7 +162,7 @@ }, { "kind": "RoleBinding", - "apiVersion": "rbac.authorization.k8s.io/v1beta1", + "apiVersion": "rbac.authorization.k8s.io/v1", "metadata": { "name": "ambassador", "creationTimestamp": null @@ -171,7 +171,7 @@ { "kind": "ServiceAccount", "name": "seldon", - "namespace": "seldon" + "namespace": "default" } ], "roleRef": { @@ -245,7 +245,7 @@ }, { "kind": "Deployment", - "apiVersion": "extensions/v1beta1", + "apiVersion": "apps/v1", "metadata": { "name": "RELEASE-NAME-ambassador", "creationTimestamp": null, @@ -384,16 +384,16 @@ }, { "kind": "Deployment", - "apiVersion": "apps/v1beta1", + "apiVersion": "apps/v1", "metadata": { "name": "RELEASE-NAME-seldon-apiserver", - "namespace": "seldon", + "namespace": "default", "creationTimestamp": null, "labels": { "app": "seldon-apiserver-container-app", "app.kubernetes.io/component": "seldon-core-apiserver", "app.kubernetes.io/name": "RELEASE-NAME", - "chart": "seldon-core-0.2.3-SNAPSHOT", + "chart": "seldon-core-0.2.4-SNAPSHOT", "component": "seldon-core", "heritage": "Tiller", "release": "RELEASE-NAME" @@ -406,7 +406,7 @@ "app": "seldon-apiserver-container-app", "app.kubernetes.io/component": "seldon-core-apiserver", "app.kubernetes.io/name": "RELEASE-NAME", - "chart": "seldon-core-0.2.3-SNAPSHOT", + "chart": "seldon-core-0.2.4-SNAPSHOT", "component": "seldon-core", "heritage": "Tiller", "release": "RELEASE-NAME" @@ -419,7 +419,7 @@ "app": "seldon-apiserver-container-app", "app.kubernetes.io/component": "seldon-core-apiserver", "app.kubernetes.io/name": "RELEASE-NAME", - "chart": "seldon-core-0.2.3-SNAPSHOT", + "chart": "seldon-core-0.2.4-SNAPSHOT", "component": "seldon-core", "heritage": "Tiller", "release": "RELEASE-NAME" @@ -451,7 +451,7 @@ "containers": [ { "name": "seldon-apiserver-container", - "image": "seldonio/apife:0.2.3-SNAPSHOT", + "image": "seldonio/apife:0.2.4-SNAPSHOT", "ports": [ { "containerPort": 8080, @@ -557,16 +557,16 @@ }, { "kind": "Deployment", - "apiVersion": "extensions/v1beta1", + "apiVersion": "apps/v1", "metadata": { "name": "RELEASE-NAME-seldon-cluster-manager", - "namespace": "seldon", + "namespace": "default", "creationTimestamp": null, "labels": { "app": "seldon-cluster-manager-server", "app.kubernetes.io/component": "seldon-core-operator", "app.kubernetes.io/name": "RELEASE-NAME", - "chart": "seldon-core-0.2.3-SNAPSHOT", + "chart": "seldon-core-0.2.4-SNAPSHOT", "component": "seldon-core", "heritage": "Tiller", "release": "RELEASE-NAME" @@ -579,7 +579,7 @@ "app": "seldon-cluster-manager-server", "app.kubernetes.io/component": "seldon-core-operator", "app.kubernetes.io/name": "RELEASE-NAME", - "chart": "seldon-core-0.2.3-SNAPSHOT", + "chart": "seldon-core-0.2.4-SNAPSHOT", "component": "seldon-core", "heritage": "Tiller", "release": "RELEASE-NAME" @@ -592,7 +592,7 @@ "app": "seldon-cluster-manager-server", "app.kubernetes.io/component": "seldon-core-operator", "app.kubernetes.io/name": "RELEASE-NAME", - "chart": "seldon-core-0.2.3-SNAPSHOT", + "chart": "seldon-core-0.2.4-SNAPSHOT", "component": "seldon-core", "heritage": "Tiller", "release": "RELEASE-NAME" @@ -602,7 +602,7 @@ "containers": [ { "name": "seldon-cluster-manager-container", - "image": "seldonio/cluster-manager:0.2.3-SNAPSHOT", + "image": "seldonio/cluster-manager:0.2.4-SNAPSHOT", "ports": [ { "containerPort": 8080, @@ -622,7 +622,11 @@ }, { "name": "ENGINE_CONTAINER_IMAGE_AND_VERSION", - "value": "seldonio/engine:0.2.3-SNAPSHOT" + "value": "seldonio/engine:0.2.4-SNAPSHOT" + }, + { + "name": "ENGINE_CONTAINER_IMAGE_PULL_POLICY", + "value": "IfNotPresent" }, { "name": "SELDON_CLUSTER_MANAGER_POD_NAMESPACE", @@ -644,7 +648,7 @@ } ], "restartPolicy": "Always", - "terminationGracePeriodSeconds": 30, + "terminationGracePeriodSeconds": 1, "dnsPolicy": "ClusterFirst", "serviceAccountName": "seldon", "serviceAccount": "seldon", @@ -664,16 +668,16 @@ }, { "kind": "Deployment", - "apiVersion": "extensions/v1beta1", + "apiVersion": "apps/v1", "metadata": { "name": "RELEASE-NAME-redis", - "namespace": "seldon", + "namespace": "default", "creationTimestamp": null, "labels": { "app": "RELEASE-NAME-redis-app", "app.kubernetes.io/component": "seldon-core-redis", "app.kubernetes.io/name": "RELEASE-NAME", - "chart": "seldon-core-0.2.3-SNAPSHOT", + "chart": "seldon-core-0.2.4-SNAPSHOT", "component": "seldon-core", "heritage": "Tiller", "release": "RELEASE-NAME" @@ -686,7 +690,7 @@ "app": "RELEASE-NAME-redis-app", "app.kubernetes.io/component": "seldon-core-redis", "app.kubernetes.io/name": "RELEASE-NAME", - "chart": "seldon-core-0.2.3-SNAPSHOT", + "chart": "seldon-core-0.2.4-SNAPSHOT", "component": "seldon-core", "heritage": "Tiller", "release": "RELEASE-NAME" @@ -699,7 +703,7 @@ "app": "RELEASE-NAME-redis-app", "app.kubernetes.io/component": "seldon-core-redis", "app.kubernetes.io/name": "RELEASE-NAME", - "chart": "seldon-core-0.2.3-SNAPSHOT", + "chart": "seldon-core-0.2.4-SNAPSHOT", "component": "seldon-core", "heritage": "Tiller", "release": "RELEASE-NAME" diff --git a/wrappers/s2i/nodejs/Dockerfile b/wrappers/s2i/nodejs/Dockerfile index 440f304593..7f895e4a8f 100644 --- a/wrappers/s2i/nodejs/Dockerfile +++ b/wrappers/s2i/nodejs/Dockerfile @@ -5,9 +5,13 @@ LABEL io.openshift.s2i.scripts-url="image:///s2i/bin" RUN mkdir microservice WORKDIR /microservice -COPY microservice.js /microservice +COPY *microservice.js /microservice/ -COPY package.json /microservice +COPY package.json /microservice/ + +COPY prediction_grpc_pb.js /microservice/ + +COPY prediction_pb.js /microservice/ RUN npm install diff --git a/wrappers/s2i/nodejs/Makefile b/wrappers/s2i/nodejs/Makefile index 618cd4f9ed..32cbcfbb91 100644 --- a/wrappers/s2i/nodejs/Makefile +++ b/wrappers/s2i/nodejs/Makefile @@ -1,4 +1,4 @@ -VERSION=0.1 +VERSION=0.2-SNAPSHOT IMAGE_NAME = docker.io/seldonio/seldon-core-s2i-nodejs:${VERSION} SELDON_CORE_DIR=../../.. diff --git a/wrappers/s2i/nodejs/README.md b/wrappers/s2i/nodejs/README.md index ac7c19dc85..97d2a10f58 100644 --- a/wrappers/s2i/nodejs/README.md +++ b/wrappers/s2i/nodejs/README.md @@ -57,7 +57,7 @@ This builds the base wrapper image needed for any nodejs model to be deployed on ### Building s2i nodejs model Image ``` -s2i build test/model-template-app seldonio/seldon-core-s2i-nodejs seldon-core-template-model +s2i build -E ./test/model-template-app/.s2i/environment test/model-template-app seldonio/seldon-core-s2i-nodejs seldon-core-template-model ``` This creates the actual nodejs model image as a seldon component @@ -77,3 +77,22 @@ Make sure the current user can run npm commands. ``` make test ``` + +### GRPC code-generated Proto JS Files + +This is the code pre-generated using protoc and the Node gRPC protoc plugin, and the generated code can be found in various `*_pb.js` files. +The creation of the grpc srevice assumes these files to be present. + +``` +cd ../../../proto/ +npm install -g grpc-tools +grpc_tools_node_protoc --js_out=import_style=commonjs,binary:../wrappers/s2i/nodejs/ --grpc_out=../wrappers/s2i/nodejs --plugin=protoc-gen-grpc=`which grpc_tools_node_protoc_plugin` prediction.proto +cd ../wrappers/s2i/nodejs/ +``` + +### Test using GRPC client + +``` +npm i +node grpc_client.js +``` diff --git a/wrappers/s2i/nodejs/grpc_client.js b/wrappers/s2i/nodejs/grpc_client.js new file mode 100644 index 0000000000..757b751afe --- /dev/null +++ b/wrappers/s2i/nodejs/grpc_client.js @@ -0,0 +1,61 @@ +/* + * + * Copyright 2015 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +var messages = require("./prediction_pb"); +var services = require("./prediction_grpc_pb"); + +var grpc = require("grpc"); + +function main() { + var client = new services.ModelClient( + "localhost:5000", + grpc.credentials.createInsecure() + ); + var tensorData = new messages.Tensor(); + tensorData.setShapeList([1, 10]); + tensorData.setValuesList([0, 0, 1, 1, 5, 6, 7, 8, 4, 3]); + + var defdata = new messages.DefaultData(); + defdata.setTensor(tensorData); + defdata.setNamesList([]); + + var request = new messages.SeldonMessage(); + request.setData(defdata); + client.predict(request, function(err, response) { + if (err) { + console.log(err); + } else { + console.log( + "Seldon Message => \n\nNames: ", + response.getData().getNamesList(), + "\n\nShape: ", + response + .getData() + .getTensor() + .getShapeList(), + "\n\nValues: ", + response + .getData() + .getTensor() + .getValuesList() + ); + } + }); +} + +main(); diff --git a/wrappers/s2i/nodejs/microservice.js b/wrappers/s2i/nodejs/microservice.js index 64d1237120..72b668f352 100644 --- a/wrappers/s2i/nodejs/microservice.js +++ b/wrappers/s2i/nodejs/microservice.js @@ -1,23 +1,9 @@ const argparse = require("argparse"); -const express = require("express"); -const app = express(); -const bodyParser = require("body-parser"); const nj = require("numjs"); -app.use(bodyParser.urlencoded({ extended: true })); -let predict = null; +const grpc_messages = require("./prediction_pb"); +const process = require("process"); const port = process.env.PREDICTIVE_UNIT_SERVICE_PORT || 5000; -const loadModel = async function(model) { - model = "./model/" + model; - try { - const MyModel = require(model); - console.log("Loading Model", model); - let x = new MyModel(); - await x.init(); - return x.predict.bind(x); - } catch (msg) { - return msg; - } -}; +let user_model = null; const get_predict_classNames = function(size) { let className = []; @@ -30,6 +16,7 @@ const get_predict_classNames = function(size) { }; const rest_data_to_array = function(data) { + let features = null; if (data["tensor"]) { features = nj .array(data["tensor"]["values"]) @@ -58,6 +45,33 @@ const array_to_rest_data = function(array, original_datadef) { return data; }; +const array_to_grpc_data = function(array, original_datadef) { + array = nj.array(array); + + var defdata = new grpc_messages.DefaultData(); + defdata.setNamesList(get_predict_classNames(array.shape[1])); + if (original_datadef["tensor"]) { + var tensorData = new grpc_messages.Tensor(); + tensorData.setShapeList(array.shape.length > 1 ? array.shape : []); + tensorData.setValuesList(array.flatten().tolist()); + defdata.setTensor(tensorData); + } else if (original_datadef["ndarray"]) { + datadef.setNdarray(array.tolist()); + } else { + datadef.setNdarray(array.tolist()); + } + var data = new grpc_messages.SeldonMessage(); + data.setData(defdata); + return data; +}; + +const dataFunctions = [ + rest_data_to_array, + array_to_rest_data, + array_to_grpc_data, + get_predict_classNames +]; + const parser = new argparse.ArgumentParser({ description: "Seldon-core nodejs microservice builder", addHelp: true @@ -81,34 +95,53 @@ parser.addArgument("--persistence", { }); const args = parser.parseArgs(); -console.log(args.model, args.api, args.service, args.persistence); +const loadModel = async function(model) { + model = "./model/" + model; + try { + const MyModel = require(model); + console.log("Loading Model", model); + let x = new MyModel(); + await x.init(); + return x; + } catch (msg) { + return msg; + } +}; -if (args.service === "MODEL" && args.api === "REST") { - app.post("/predict", async (req, res) => { - try { - body = JSON.parse(req.body.json); - body = body.data; - } catch (msg) { - console.log(msg); - res.status(500).send("Cannot parse predict input json " + req.body); - } - if (predict && typeof predict === "function") { - result = predict(rest_data_to_array(body), body.names); - result = { data: array_to_rest_data(result, body) }; - res.status(200).send(result); - } else { - console.log("Predict function not Found"); - res.status(500).send(predict); - } - }); -} +const createServer = () => { + if (args.service === "MODEL") { + require("./model_microservice.js")( + user_model, + args.api, + port, + ...dataFunctions + ); + } + if (args.service === "TRANSFORMER") { + require("./transformer_microservice.js")( + user_model, + args.api, + port, + ...dataFunctions + ); + } +}; -app.listen(port, async () => { - predict = await loadModel( +const getModelFunction = async () => { + user_model = await loadModel( args.model, args.api, args.service, args.persistence ); - console.log(`NodeJs Microservice listening on port ${port}!`); -}); + if (user_model) { + console.log("Model Class loaded successfully"); + createServer(); + } else { + console.log("Model Class could not be loaded ", user_model); + process.exit(1); + } +}; + +getModelFunction(); +console.log(args.model, args.api, args.service, args.persistence); diff --git a/wrappers/s2i/nodejs/model_microservice.js b/wrappers/s2i/nodejs/model_microservice.js new file mode 100644 index 0000000000..b4409c998c --- /dev/null +++ b/wrappers/s2i/nodejs/model_microservice.js @@ -0,0 +1,155 @@ +const express = require("express"); +const app = express(); +const bodyParser = require("body-parser"); +const grpc = require("grpc"); +const grpc_services = require("./prediction_grpc_pb"); + +module.exports = ( + user_model, + api, + port, + rest_data_to_array, + array_to_rest_data, + array_to_grpc_data +) => { + if (user_model.predict && typeof user_model.predict === "function") { + console.log("Predict function loaded successfully"); + } else if ( + user_model.send_feedback && + typeof user_model.send_feedback === "function" + ) { + console.log("Send feedback function loaded successfully"); + } else { + console.log(user_model); + process.exit(1); + } + let predict = user_model.predict ? user_model.predict.bind(user_model) : null; + let send_feedback = user_model.send_feedback + ? user_model.send_feedback.bind(user_model) + : null; + + if (api === "REST") { + app.use(bodyParser.urlencoded({ extended: true })); + app.post("/predict", (req, res) => { + try { + body = JSON.parse(req.body.json); + body = body.data; + } catch (msg) { + console.log(msg); + res.status(500).send("Cannot parse predict input json " + req.body); + } + if (predict && typeof predict === "function") { + result = predict(rest_data_to_array(body), body.names); + result = { data: array_to_rest_data(result, body) }; + res.status(200).send(result); + } else { + console.log("Predict function not Found"); + res.status(500).send(null); + } + }); + app.post("/send-feedback", (req, res) => { + try { + body = JSON.parse(req.body.json); + request = body.request; + body = request.data; + } catch (msg) { + console.log(msg); + res + .status(500) + .send("Cannot parse Send feedback input json " + req.body); + } + if (send_feedback && typeof send_feedback === "function") { + result = send_feedback( + rest_data_to_array(body), + body.names, + rest_data_to_array(request.truth), + request.reward + ); + // result = { data: array_to_rest_data(result, body) }; + res.status(200).send({}); + } else { + console.log("Send feedback function not Found"); + res.status(500).send(null); + } + }); + var server = app.listen(port, () => { + console.log(`NodeJs REST Microservice listening on port ${port}!`); + }); + function stopServer(code) { + server.close(); + console.log(`About to exit with code: ${code}`); + } + process.on("SIGINT", stopServer.bind(this)); + process.on("SIGTERM", stopServer.bind(this)); + } + + if (api === "GRPC") { + function predictEndpoint(call, callback) { + let data = call.request.getData(); + let body = { names: data.getNamesList() }; + + if (data.hasTensor()) { + data = data.getTensor(); + body["tensor"] = { + shape: data.getShapeList(), + values: data.getValuesList() + }; + } else { + body["ndarray"] = data.getNdarray(); + } + result = predict(rest_data_to_array(body), body.names); + callback(null, array_to_grpc_data(result, body)); + } + function feedbackEndpoint(call, callback) { + let request = call.request.getRequest(); + let data = request.getData(); + let body = { names: data.getNamesList() }; + + if (data.hasTensor()) { + data = data.getTensor(); + body["tensor"] = { + shape: data.getShapeList(), + values: data.getValuesList() + }; + } else { + body["ndarray"] = data.getNdarray(); + } + + let truth = call.request.getTruth(); + let truth_data = truth.getData(); + let truth_body = { names: truth_data.getNamesList() }; + + if (truth_data.hasTensor()) { + truth_data = truth_data.getTensor(); + truth_body["tensor"] = { + shape: truth_data.getShapeList(), + values: truth_data.getValuesList() + }; + } else { + truth_body["ndarray"] = truth_data.getNdarray(); + } + result = send_feedback( + rest_data_to_array(body), + body.names, + rest_data_to_array(truth_body), + call.request.getReward() + ); + callback(null, {}); + } + var server = new grpc.Server(); + server.addService(grpc_services.ModelService, { predict: predictEndpoint }); + // server.addService(grpc_services.SeldonService, { + // predict: predictEndpoint, + // sendFeedback: feedbackEndpoint + // }); + server.bind("0.0.0.0:" + port, grpc.ServerCredentials.createInsecure()); + server.start(); + console.log(`NodeJs GRPC Microservice listening on port ${port}!`); + function stopServer(code) { + server.forceShutdown(); + console.log(`About to exit with code: ${code}`); + } + process.on("SIGINT", stopServer.bind(this)); + process.on("SIGTERM", stopServer.bind(this)); + } +}; diff --git a/wrappers/s2i/nodejs/package.json b/wrappers/s2i/nodejs/package.json index fd688cc3f6..8cd63e3093 100644 --- a/wrappers/s2i/nodejs/package.json +++ b/wrappers/s2i/nodejs/package.json @@ -15,6 +15,8 @@ "argparse": "1.0.10", "body-parser": "1.18.3", "express": "4.16.3", + "google-protobuf": "3.6.1", + "grpc": "1.15.1", "numjs": "0.16.0" } } diff --git a/wrappers/s2i/nodejs/prediction_grpc_pb.js b/wrappers/s2i/nodejs/prediction_grpc_pb.js new file mode 100644 index 0000000000..afa057cf6f --- /dev/null +++ b/wrappers/s2i/nodejs/prediction_grpc_pb.js @@ -0,0 +1,216 @@ +// GENERATED CODE -- DO NOT EDIT! + +'use strict'; +var grpc = require('grpc'); +var prediction_pb = require('./prediction_pb.js'); +var google_protobuf_struct_pb = require('google-protobuf/google/protobuf/struct_pb.js'); + +function serialize_seldon_protos_Feedback(arg) { + if (!(arg instanceof prediction_pb.Feedback)) { + throw new Error('Expected argument of type seldon.protos.Feedback'); + } + return new Buffer(arg.serializeBinary()); +} + +function deserialize_seldon_protos_Feedback(buffer_arg) { + return prediction_pb.Feedback.deserializeBinary(new Uint8Array(buffer_arg)); +} + +function serialize_seldon_protos_SeldonMessage(arg) { + if (!(arg instanceof prediction_pb.SeldonMessage)) { + throw new Error('Expected argument of type seldon.protos.SeldonMessage'); + } + return new Buffer(arg.serializeBinary()); +} + +function deserialize_seldon_protos_SeldonMessage(buffer_arg) { + return prediction_pb.SeldonMessage.deserializeBinary(new Uint8Array(buffer_arg)); +} + +function serialize_seldon_protos_SeldonMessageList(arg) { + if (!(arg instanceof prediction_pb.SeldonMessageList)) { + throw new Error('Expected argument of type seldon.protos.SeldonMessageList'); + } + return new Buffer(arg.serializeBinary()); +} + +function deserialize_seldon_protos_SeldonMessageList(buffer_arg) { + return prediction_pb.SeldonMessageList.deserializeBinary(new Uint8Array(buffer_arg)); +} + + +// [END Messages] +// +// [START Services] +// +var GenericService = exports.GenericService = { + transformInput: { + path: '/seldon.protos.Generic/TransformInput', + requestStream: false, + responseStream: false, + requestType: prediction_pb.SeldonMessage, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_SeldonMessage, + requestDeserialize: deserialize_seldon_protos_SeldonMessage, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, + transformOutput: { + path: '/seldon.protos.Generic/TransformOutput', + requestStream: false, + responseStream: false, + requestType: prediction_pb.SeldonMessage, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_SeldonMessage, + requestDeserialize: deserialize_seldon_protos_SeldonMessage, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, + route: { + path: '/seldon.protos.Generic/Route', + requestStream: false, + responseStream: false, + requestType: prediction_pb.SeldonMessage, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_SeldonMessage, + requestDeserialize: deserialize_seldon_protos_SeldonMessage, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, + aggregate: { + path: '/seldon.protos.Generic/Aggregate', + requestStream: false, + responseStream: false, + requestType: prediction_pb.SeldonMessageList, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_SeldonMessageList, + requestDeserialize: deserialize_seldon_protos_SeldonMessageList, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, + sendFeedback: { + path: '/seldon.protos.Generic/SendFeedback', + requestStream: false, + responseStream: false, + requestType: prediction_pb.Feedback, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_Feedback, + requestDeserialize: deserialize_seldon_protos_Feedback, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, +}; + +exports.GenericClient = grpc.makeGenericClientConstructor(GenericService); +var ModelService = exports.ModelService = { + predict: { + path: '/seldon.protos.Model/Predict', + requestStream: false, + responseStream: false, + requestType: prediction_pb.SeldonMessage, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_SeldonMessage, + requestDeserialize: deserialize_seldon_protos_SeldonMessage, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, +}; + +exports.ModelClient = grpc.makeGenericClientConstructor(ModelService); +var RouterService = exports.RouterService = { + route: { + path: '/seldon.protos.Router/Route', + requestStream: false, + responseStream: false, + requestType: prediction_pb.SeldonMessage, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_SeldonMessage, + requestDeserialize: deserialize_seldon_protos_SeldonMessage, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, + sendFeedback: { + path: '/seldon.protos.Router/SendFeedback', + requestStream: false, + responseStream: false, + requestType: prediction_pb.Feedback, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_Feedback, + requestDeserialize: deserialize_seldon_protos_Feedback, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, +}; + +exports.RouterClient = grpc.makeGenericClientConstructor(RouterService); +var TransformerService = exports.TransformerService = { + transformInput: { + path: '/seldon.protos.Transformer/TransformInput', + requestStream: false, + responseStream: false, + requestType: prediction_pb.SeldonMessage, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_SeldonMessage, + requestDeserialize: deserialize_seldon_protos_SeldonMessage, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, +}; + +exports.TransformerClient = grpc.makeGenericClientConstructor(TransformerService); +var OutputTransformerService = exports.OutputTransformerService = { + transformOutput: { + path: '/seldon.protos.OutputTransformer/TransformOutput', + requestStream: false, + responseStream: false, + requestType: prediction_pb.SeldonMessage, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_SeldonMessage, + requestDeserialize: deserialize_seldon_protos_SeldonMessage, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, +}; + +exports.OutputTransformerClient = grpc.makeGenericClientConstructor(OutputTransformerService); +var CombinerService = exports.CombinerService = { + aggregate: { + path: '/seldon.protos.Combiner/Aggregate', + requestStream: false, + responseStream: false, + requestType: prediction_pb.SeldonMessageList, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_SeldonMessageList, + requestDeserialize: deserialize_seldon_protos_SeldonMessageList, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, +}; + +exports.CombinerClient = grpc.makeGenericClientConstructor(CombinerService); +var SeldonService = exports.SeldonService = { + predict: { + path: '/seldon.protos.Seldon/Predict', + requestStream: false, + responseStream: false, + requestType: prediction_pb.SeldonMessage, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_SeldonMessage, + requestDeserialize: deserialize_seldon_protos_SeldonMessage, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, + sendFeedback: { + path: '/seldon.protos.Seldon/SendFeedback', + requestStream: false, + responseStream: false, + requestType: prediction_pb.Feedback, + responseType: prediction_pb.SeldonMessage, + requestSerialize: serialize_seldon_protos_Feedback, + requestDeserialize: deserialize_seldon_protos_Feedback, + responseSerialize: serialize_seldon_protos_SeldonMessage, + responseDeserialize: deserialize_seldon_protos_SeldonMessage, + }, +}; + +exports.SeldonClient = grpc.makeGenericClientConstructor(SeldonService); diff --git a/wrappers/s2i/nodejs/prediction_pb.js b/wrappers/s2i/nodejs/prediction_pb.js new file mode 100644 index 0000000000..40bfa71853 --- /dev/null +++ b/wrappers/s2i/nodejs/prediction_pb.js @@ -0,0 +1,1961 @@ +/** + * @fileoverview + * @enhanceable + * @suppress {messageConventions} JS Compiler reports an error if a variable or + * field starts with 'MSG_' and isn't a translatable message. + * @public + */ +// GENERATED CODE -- DO NOT EDIT! + +var jspb = require('google-protobuf'); +var goog = jspb; +var global = Function('return this')(); + +var google_protobuf_struct_pb = require('google-protobuf/google/protobuf/struct_pb.js'); +goog.exportSymbol('proto.seldon.protos.DefaultData', null, global); +goog.exportSymbol('proto.seldon.protos.Feedback', null, global); +goog.exportSymbol('proto.seldon.protos.Meta', null, global); +goog.exportSymbol('proto.seldon.protos.RequestResponse', null, global); +goog.exportSymbol('proto.seldon.protos.SeldonMessage', null, global); +goog.exportSymbol('proto.seldon.protos.SeldonMessageList', null, global); +goog.exportSymbol('proto.seldon.protos.Status', null, global); +goog.exportSymbol('proto.seldon.protos.Status.StatusFlag', null, global); +goog.exportSymbol('proto.seldon.protos.Tensor', null, global); + +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.seldon.protos.SeldonMessage = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, null, proto.seldon.protos.SeldonMessage.oneofGroups_); +}; +goog.inherits(proto.seldon.protos.SeldonMessage, jspb.Message); +if (goog.DEBUG && !COMPILED) { + proto.seldon.protos.SeldonMessage.displayName = 'proto.seldon.protos.SeldonMessage'; +} +/** + * Oneof group definitions for this message. Each group defines the field + * numbers belonging to that group. When of these fields' value is set, all + * other fields in the group are cleared. During deserialization, if multiple + * fields are encountered for a group, only the last value seen will be kept. + * @private {!Array>} + * @const + */ +proto.seldon.protos.SeldonMessage.oneofGroups_ = [[3,4,5]]; + +/** + * @enum {number} + */ +proto.seldon.protos.SeldonMessage.DataOneofCase = { + DATA_ONEOF_NOT_SET: 0, + DATA: 3, + BINDATA: 4, + STRDATA: 5 +}; + +/** + * @return {proto.seldon.protos.SeldonMessage.DataOneofCase} + */ +proto.seldon.protos.SeldonMessage.prototype.getDataOneofCase = function() { + return /** @type {proto.seldon.protos.SeldonMessage.DataOneofCase} */(jspb.Message.computeOneofCase(this, proto.seldon.protos.SeldonMessage.oneofGroups_[0])); +}; + + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto suitable for use in Soy templates. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * com.google.apps.jspb.JsClassTemplate.JS_RESERVED_WORDS. + * @param {boolean=} opt_includeInstance Whether to include the JSPB instance + * for transitional soy proto support: http://goto/soy-param-migration + * @return {!Object} + */ +proto.seldon.protos.SeldonMessage.prototype.toObject = function(opt_includeInstance) { + return proto.seldon.protos.SeldonMessage.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Whether to include the JSPB + * instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.seldon.protos.SeldonMessage} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.SeldonMessage.toObject = function(includeInstance, msg) { + var f, obj = { + status: (f = msg.getStatus()) && proto.seldon.protos.Status.toObject(includeInstance, f), + meta: (f = msg.getMeta()) && proto.seldon.protos.Meta.toObject(includeInstance, f), + data: (f = msg.getData()) && proto.seldon.protos.DefaultData.toObject(includeInstance, f), + bindata: msg.getBindata_asB64(), + strdata: jspb.Message.getFieldWithDefault(msg, 5, "") + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.seldon.protos.SeldonMessage} + */ +proto.seldon.protos.SeldonMessage.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.seldon.protos.SeldonMessage; + return proto.seldon.protos.SeldonMessage.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.seldon.protos.SeldonMessage} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.seldon.protos.SeldonMessage} + */ +proto.seldon.protos.SeldonMessage.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = new proto.seldon.protos.Status; + reader.readMessage(value,proto.seldon.protos.Status.deserializeBinaryFromReader); + msg.setStatus(value); + break; + case 2: + var value = new proto.seldon.protos.Meta; + reader.readMessage(value,proto.seldon.protos.Meta.deserializeBinaryFromReader); + msg.setMeta(value); + break; + case 3: + var value = new proto.seldon.protos.DefaultData; + reader.readMessage(value,proto.seldon.protos.DefaultData.deserializeBinaryFromReader); + msg.setData(value); + break; + case 4: + var value = /** @type {!Uint8Array} */ (reader.readBytes()); + msg.setBindata(value); + break; + case 5: + var value = /** @type {string} */ (reader.readString()); + msg.setStrdata(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.seldon.protos.SeldonMessage.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.seldon.protos.SeldonMessage.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.seldon.protos.SeldonMessage} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.SeldonMessage.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getStatus(); + if (f != null) { + writer.writeMessage( + 1, + f, + proto.seldon.protos.Status.serializeBinaryToWriter + ); + } + f = message.getMeta(); + if (f != null) { + writer.writeMessage( + 2, + f, + proto.seldon.protos.Meta.serializeBinaryToWriter + ); + } + f = message.getData(); + if (f != null) { + writer.writeMessage( + 3, + f, + proto.seldon.protos.DefaultData.serializeBinaryToWriter + ); + } + f = /** @type {!(string|Uint8Array)} */ (jspb.Message.getField(message, 4)); + if (f != null) { + writer.writeBytes( + 4, + f + ); + } + f = /** @type {string} */ (jspb.Message.getField(message, 5)); + if (f != null) { + writer.writeString( + 5, + f + ); + } +}; + + +/** + * optional Status status = 1; + * @return {?proto.seldon.protos.Status} + */ +proto.seldon.protos.SeldonMessage.prototype.getStatus = function() { + return /** @type{?proto.seldon.protos.Status} */ ( + jspb.Message.getWrapperField(this, proto.seldon.protos.Status, 1)); +}; + + +/** @param {?proto.seldon.protos.Status|undefined} value */ +proto.seldon.protos.SeldonMessage.prototype.setStatus = function(value) { + jspb.Message.setWrapperField(this, 1, value); +}; + + +proto.seldon.protos.SeldonMessage.prototype.clearStatus = function() { + this.setStatus(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.SeldonMessage.prototype.hasStatus = function() { + return jspb.Message.getField(this, 1) != null; +}; + + +/** + * optional Meta meta = 2; + * @return {?proto.seldon.protos.Meta} + */ +proto.seldon.protos.SeldonMessage.prototype.getMeta = function() { + return /** @type{?proto.seldon.protos.Meta} */ ( + jspb.Message.getWrapperField(this, proto.seldon.protos.Meta, 2)); +}; + + +/** @param {?proto.seldon.protos.Meta|undefined} value */ +proto.seldon.protos.SeldonMessage.prototype.setMeta = function(value) { + jspb.Message.setWrapperField(this, 2, value); +}; + + +proto.seldon.protos.SeldonMessage.prototype.clearMeta = function() { + this.setMeta(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.SeldonMessage.prototype.hasMeta = function() { + return jspb.Message.getField(this, 2) != null; +}; + + +/** + * optional DefaultData data = 3; + * @return {?proto.seldon.protos.DefaultData} + */ +proto.seldon.protos.SeldonMessage.prototype.getData = function() { + return /** @type{?proto.seldon.protos.DefaultData} */ ( + jspb.Message.getWrapperField(this, proto.seldon.protos.DefaultData, 3)); +}; + + +/** @param {?proto.seldon.protos.DefaultData|undefined} value */ +proto.seldon.protos.SeldonMessage.prototype.setData = function(value) { + jspb.Message.setOneofWrapperField(this, 3, proto.seldon.protos.SeldonMessage.oneofGroups_[0], value); +}; + + +proto.seldon.protos.SeldonMessage.prototype.clearData = function() { + this.setData(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.SeldonMessage.prototype.hasData = function() { + return jspb.Message.getField(this, 3) != null; +}; + + +/** + * optional bytes binData = 4; + * @return {!(string|Uint8Array)} + */ +proto.seldon.protos.SeldonMessage.prototype.getBindata = function() { + return /** @type {!(string|Uint8Array)} */ (jspb.Message.getFieldWithDefault(this, 4, "")); +}; + + +/** + * optional bytes binData = 4; + * This is a type-conversion wrapper around `getBindata()` + * @return {string} + */ +proto.seldon.protos.SeldonMessage.prototype.getBindata_asB64 = function() { + return /** @type {string} */ (jspb.Message.bytesAsB64( + this.getBindata())); +}; + + +/** + * optional bytes binData = 4; + * Note that Uint8Array is not supported on all browsers. + * @see http://caniuse.com/Uint8Array + * This is a type-conversion wrapper around `getBindata()` + * @return {!Uint8Array} + */ +proto.seldon.protos.SeldonMessage.prototype.getBindata_asU8 = function() { + return /** @type {!Uint8Array} */ (jspb.Message.bytesAsU8( + this.getBindata())); +}; + + +/** @param {!(string|Uint8Array)} value */ +proto.seldon.protos.SeldonMessage.prototype.setBindata = function(value) { + jspb.Message.setOneofField(this, 4, proto.seldon.protos.SeldonMessage.oneofGroups_[0], value); +}; + + +proto.seldon.protos.SeldonMessage.prototype.clearBindata = function() { + jspb.Message.setOneofField(this, 4, proto.seldon.protos.SeldonMessage.oneofGroups_[0], undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.SeldonMessage.prototype.hasBindata = function() { + return jspb.Message.getField(this, 4) != null; +}; + + +/** + * optional string strData = 5; + * @return {string} + */ +proto.seldon.protos.SeldonMessage.prototype.getStrdata = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 5, "")); +}; + + +/** @param {string} value */ +proto.seldon.protos.SeldonMessage.prototype.setStrdata = function(value) { + jspb.Message.setOneofField(this, 5, proto.seldon.protos.SeldonMessage.oneofGroups_[0], value); +}; + + +proto.seldon.protos.SeldonMessage.prototype.clearStrdata = function() { + jspb.Message.setOneofField(this, 5, proto.seldon.protos.SeldonMessage.oneofGroups_[0], undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.SeldonMessage.prototype.hasStrdata = function() { + return jspb.Message.getField(this, 5) != null; +}; + + + +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.seldon.protos.DefaultData = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, proto.seldon.protos.DefaultData.repeatedFields_, proto.seldon.protos.DefaultData.oneofGroups_); +}; +goog.inherits(proto.seldon.protos.DefaultData, jspb.Message); +if (goog.DEBUG && !COMPILED) { + proto.seldon.protos.DefaultData.displayName = 'proto.seldon.protos.DefaultData'; +} +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.seldon.protos.DefaultData.repeatedFields_ = [1]; + +/** + * Oneof group definitions for this message. Each group defines the field + * numbers belonging to that group. When of these fields' value is set, all + * other fields in the group are cleared. During deserialization, if multiple + * fields are encountered for a group, only the last value seen will be kept. + * @private {!Array>} + * @const + */ +proto.seldon.protos.DefaultData.oneofGroups_ = [[2,3]]; + +/** + * @enum {number} + */ +proto.seldon.protos.DefaultData.DataOneofCase = { + DATA_ONEOF_NOT_SET: 0, + TENSOR: 2, + NDARRAY: 3 +}; + +/** + * @return {proto.seldon.protos.DefaultData.DataOneofCase} + */ +proto.seldon.protos.DefaultData.prototype.getDataOneofCase = function() { + return /** @type {proto.seldon.protos.DefaultData.DataOneofCase} */(jspb.Message.computeOneofCase(this, proto.seldon.protos.DefaultData.oneofGroups_[0])); +}; + + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto suitable for use in Soy templates. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * com.google.apps.jspb.JsClassTemplate.JS_RESERVED_WORDS. + * @param {boolean=} opt_includeInstance Whether to include the JSPB instance + * for transitional soy proto support: http://goto/soy-param-migration + * @return {!Object} + */ +proto.seldon.protos.DefaultData.prototype.toObject = function(opt_includeInstance) { + return proto.seldon.protos.DefaultData.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Whether to include the JSPB + * instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.seldon.protos.DefaultData} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.DefaultData.toObject = function(includeInstance, msg) { + var f, obj = { + namesList: jspb.Message.getRepeatedField(msg, 1), + tensor: (f = msg.getTensor()) && proto.seldon.protos.Tensor.toObject(includeInstance, f), + ndarray: (f = msg.getNdarray()) && google_protobuf_struct_pb.ListValue.toObject(includeInstance, f) + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.seldon.protos.DefaultData} + */ +proto.seldon.protos.DefaultData.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.seldon.protos.DefaultData; + return proto.seldon.protos.DefaultData.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.seldon.protos.DefaultData} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.seldon.protos.DefaultData} + */ +proto.seldon.protos.DefaultData.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = /** @type {string} */ (reader.readString()); + msg.addNames(value); + break; + case 2: + var value = new proto.seldon.protos.Tensor; + reader.readMessage(value,proto.seldon.protos.Tensor.deserializeBinaryFromReader); + msg.setTensor(value); + break; + case 3: + var value = new google_protobuf_struct_pb.ListValue; + reader.readMessage(value,google_protobuf_struct_pb.ListValue.deserializeBinaryFromReader); + msg.setNdarray(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.seldon.protos.DefaultData.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.seldon.protos.DefaultData.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.seldon.protos.DefaultData} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.DefaultData.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getNamesList(); + if (f.length > 0) { + writer.writeRepeatedString( + 1, + f + ); + } + f = message.getTensor(); + if (f != null) { + writer.writeMessage( + 2, + f, + proto.seldon.protos.Tensor.serializeBinaryToWriter + ); + } + f = message.getNdarray(); + if (f != null) { + writer.writeMessage( + 3, + f, + google_protobuf_struct_pb.ListValue.serializeBinaryToWriter + ); + } +}; + + +/** + * repeated string names = 1; + * @return {!Array.} + */ +proto.seldon.protos.DefaultData.prototype.getNamesList = function() { + return /** @type {!Array.} */ (jspb.Message.getRepeatedField(this, 1)); +}; + + +/** @param {!Array.} value */ +proto.seldon.protos.DefaultData.prototype.setNamesList = function(value) { + jspb.Message.setField(this, 1, value || []); +}; + + +/** + * @param {!string} value + * @param {number=} opt_index + */ +proto.seldon.protos.DefaultData.prototype.addNames = function(value, opt_index) { + jspb.Message.addToRepeatedField(this, 1, value, opt_index); +}; + + +proto.seldon.protos.DefaultData.prototype.clearNamesList = function() { + this.setNamesList([]); +}; + + +/** + * optional Tensor tensor = 2; + * @return {?proto.seldon.protos.Tensor} + */ +proto.seldon.protos.DefaultData.prototype.getTensor = function() { + return /** @type{?proto.seldon.protos.Tensor} */ ( + jspb.Message.getWrapperField(this, proto.seldon.protos.Tensor, 2)); +}; + + +/** @param {?proto.seldon.protos.Tensor|undefined} value */ +proto.seldon.protos.DefaultData.prototype.setTensor = function(value) { + jspb.Message.setOneofWrapperField(this, 2, proto.seldon.protos.DefaultData.oneofGroups_[0], value); +}; + + +proto.seldon.protos.DefaultData.prototype.clearTensor = function() { + this.setTensor(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.DefaultData.prototype.hasTensor = function() { + return jspb.Message.getField(this, 2) != null; +}; + + +/** + * optional google.protobuf.ListValue ndarray = 3; + * @return {?proto.google.protobuf.ListValue} + */ +proto.seldon.protos.DefaultData.prototype.getNdarray = function() { + return /** @type{?proto.google.protobuf.ListValue} */ ( + jspb.Message.getWrapperField(this, google_protobuf_struct_pb.ListValue, 3)); +}; + + +/** @param {?proto.google.protobuf.ListValue|undefined} value */ +proto.seldon.protos.DefaultData.prototype.setNdarray = function(value) { + jspb.Message.setOneofWrapperField(this, 3, proto.seldon.protos.DefaultData.oneofGroups_[0], value); +}; + + +proto.seldon.protos.DefaultData.prototype.clearNdarray = function() { + this.setNdarray(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.DefaultData.prototype.hasNdarray = function() { + return jspb.Message.getField(this, 3) != null; +}; + + + +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.seldon.protos.Tensor = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, proto.seldon.protos.Tensor.repeatedFields_, null); +}; +goog.inherits(proto.seldon.protos.Tensor, jspb.Message); +if (goog.DEBUG && !COMPILED) { + proto.seldon.protos.Tensor.displayName = 'proto.seldon.protos.Tensor'; +} +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.seldon.protos.Tensor.repeatedFields_ = [1,2]; + + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto suitable for use in Soy templates. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * com.google.apps.jspb.JsClassTemplate.JS_RESERVED_WORDS. + * @param {boolean=} opt_includeInstance Whether to include the JSPB instance + * for transitional soy proto support: http://goto/soy-param-migration + * @return {!Object} + */ +proto.seldon.protos.Tensor.prototype.toObject = function(opt_includeInstance) { + return proto.seldon.protos.Tensor.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Whether to include the JSPB + * instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.seldon.protos.Tensor} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.Tensor.toObject = function(includeInstance, msg) { + var f, obj = { + shapeList: jspb.Message.getRepeatedField(msg, 1), + valuesList: jspb.Message.getRepeatedFloatingPointField(msg, 2) + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.seldon.protos.Tensor} + */ +proto.seldon.protos.Tensor.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.seldon.protos.Tensor; + return proto.seldon.protos.Tensor.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.seldon.protos.Tensor} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.seldon.protos.Tensor} + */ +proto.seldon.protos.Tensor.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = /** @type {!Array.} */ (reader.readPackedInt32()); + msg.setShapeList(value); + break; + case 2: + var value = /** @type {!Array.} */ (reader.readPackedDouble()); + msg.setValuesList(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.seldon.protos.Tensor.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.seldon.protos.Tensor.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.seldon.protos.Tensor} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.Tensor.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getShapeList(); + if (f.length > 0) { + writer.writePackedInt32( + 1, + f + ); + } + f = message.getValuesList(); + if (f.length > 0) { + writer.writePackedDouble( + 2, + f + ); + } +}; + + +/** + * repeated int32 shape = 1; + * @return {!Array.} + */ +proto.seldon.protos.Tensor.prototype.getShapeList = function() { + return /** @type {!Array.} */ (jspb.Message.getRepeatedField(this, 1)); +}; + + +/** @param {!Array.} value */ +proto.seldon.protos.Tensor.prototype.setShapeList = function(value) { + jspb.Message.setField(this, 1, value || []); +}; + + +/** + * @param {!number} value + * @param {number=} opt_index + */ +proto.seldon.protos.Tensor.prototype.addShape = function(value, opt_index) { + jspb.Message.addToRepeatedField(this, 1, value, opt_index); +}; + + +proto.seldon.protos.Tensor.prototype.clearShapeList = function() { + this.setShapeList([]); +}; + + +/** + * repeated double values = 2; + * @return {!Array.} + */ +proto.seldon.protos.Tensor.prototype.getValuesList = function() { + return /** @type {!Array.} */ (jspb.Message.getRepeatedFloatingPointField(this, 2)); +}; + + +/** @param {!Array.} value */ +proto.seldon.protos.Tensor.prototype.setValuesList = function(value) { + jspb.Message.setField(this, 2, value || []); +}; + + +/** + * @param {!number} value + * @param {number=} opt_index + */ +proto.seldon.protos.Tensor.prototype.addValues = function(value, opt_index) { + jspb.Message.addToRepeatedField(this, 2, value, opt_index); +}; + + +proto.seldon.protos.Tensor.prototype.clearValuesList = function() { + this.setValuesList([]); +}; + + + +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.seldon.protos.Meta = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, null, null); +}; +goog.inherits(proto.seldon.protos.Meta, jspb.Message); +if (goog.DEBUG && !COMPILED) { + proto.seldon.protos.Meta.displayName = 'proto.seldon.protos.Meta'; +} + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto suitable for use in Soy templates. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * com.google.apps.jspb.JsClassTemplate.JS_RESERVED_WORDS. + * @param {boolean=} opt_includeInstance Whether to include the JSPB instance + * for transitional soy proto support: http://goto/soy-param-migration + * @return {!Object} + */ +proto.seldon.protos.Meta.prototype.toObject = function(opt_includeInstance) { + return proto.seldon.protos.Meta.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Whether to include the JSPB + * instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.seldon.protos.Meta} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.Meta.toObject = function(includeInstance, msg) { + var f, obj = { + puid: jspb.Message.getFieldWithDefault(msg, 1, ""), + tagsMap: (f = msg.getTagsMap()) ? f.toObject(includeInstance, proto.google.protobuf.Value.toObject) : [], + routingMap: (f = msg.getRoutingMap()) ? f.toObject(includeInstance, undefined) : [] + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.seldon.protos.Meta} + */ +proto.seldon.protos.Meta.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.seldon.protos.Meta; + return proto.seldon.protos.Meta.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.seldon.protos.Meta} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.seldon.protos.Meta} + */ +proto.seldon.protos.Meta.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = /** @type {string} */ (reader.readString()); + msg.setPuid(value); + break; + case 2: + var value = msg.getTagsMap(); + reader.readMessage(value, function(message, reader) { + jspb.Map.deserializeBinary(message, reader, jspb.BinaryReader.prototype.readString, jspb.BinaryReader.prototype.readMessage, proto.google.protobuf.Value.deserializeBinaryFromReader); + }); + break; + case 3: + var value = msg.getRoutingMap(); + reader.readMessage(value, function(message, reader) { + jspb.Map.deserializeBinary(message, reader, jspb.BinaryReader.prototype.readString, jspb.BinaryReader.prototype.readInt32); + }); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.seldon.protos.Meta.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.seldon.protos.Meta.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.seldon.protos.Meta} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.Meta.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getPuid(); + if (f.length > 0) { + writer.writeString( + 1, + f + ); + } + f = message.getTagsMap(true); + if (f && f.getLength() > 0) { + f.serializeBinary(2, writer, jspb.BinaryWriter.prototype.writeString, jspb.BinaryWriter.prototype.writeMessage, proto.google.protobuf.Value.serializeBinaryToWriter); + } + f = message.getRoutingMap(true); + if (f && f.getLength() > 0) { + f.serializeBinary(3, writer, jspb.BinaryWriter.prototype.writeString, jspb.BinaryWriter.prototype.writeInt32); + } +}; + + +/** + * optional string puid = 1; + * @return {string} + */ +proto.seldon.protos.Meta.prototype.getPuid = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, "")); +}; + + +/** @param {string} value */ +proto.seldon.protos.Meta.prototype.setPuid = function(value) { + jspb.Message.setField(this, 1, value); +}; + + +/** + * map tags = 2; + * @param {boolean=} opt_noLazyCreate Do not create the map if + * empty, instead returning `undefined` + * @return {!jspb.Map} + */ +proto.seldon.protos.Meta.prototype.getTagsMap = function(opt_noLazyCreate) { + return /** @type {!jspb.Map} */ ( + jspb.Message.getMapField(this, 2, opt_noLazyCreate, + proto.google.protobuf.Value)); +}; + + +proto.seldon.protos.Meta.prototype.clearTagsMap = function() { + this.getTagsMap().clear(); +}; + + +/** + * map routing = 3; + * @param {boolean=} opt_noLazyCreate Do not create the map if + * empty, instead returning `undefined` + * @return {!jspb.Map} + */ +proto.seldon.protos.Meta.prototype.getRoutingMap = function(opt_noLazyCreate) { + return /** @type {!jspb.Map} */ ( + jspb.Message.getMapField(this, 3, opt_noLazyCreate, + null)); +}; + + +proto.seldon.protos.Meta.prototype.clearRoutingMap = function() { + this.getRoutingMap().clear(); +}; + + + +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.seldon.protos.SeldonMessageList = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, proto.seldon.protos.SeldonMessageList.repeatedFields_, null); +}; +goog.inherits(proto.seldon.protos.SeldonMessageList, jspb.Message); +if (goog.DEBUG && !COMPILED) { + proto.seldon.protos.SeldonMessageList.displayName = 'proto.seldon.protos.SeldonMessageList'; +} +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.seldon.protos.SeldonMessageList.repeatedFields_ = [1]; + + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto suitable for use in Soy templates. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * com.google.apps.jspb.JsClassTemplate.JS_RESERVED_WORDS. + * @param {boolean=} opt_includeInstance Whether to include the JSPB instance + * for transitional soy proto support: http://goto/soy-param-migration + * @return {!Object} + */ +proto.seldon.protos.SeldonMessageList.prototype.toObject = function(opt_includeInstance) { + return proto.seldon.protos.SeldonMessageList.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Whether to include the JSPB + * instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.seldon.protos.SeldonMessageList} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.SeldonMessageList.toObject = function(includeInstance, msg) { + var f, obj = { + seldonmessagesList: jspb.Message.toObjectList(msg.getSeldonmessagesList(), + proto.seldon.protos.SeldonMessage.toObject, includeInstance) + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.seldon.protos.SeldonMessageList} + */ +proto.seldon.protos.SeldonMessageList.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.seldon.protos.SeldonMessageList; + return proto.seldon.protos.SeldonMessageList.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.seldon.protos.SeldonMessageList} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.seldon.protos.SeldonMessageList} + */ +proto.seldon.protos.SeldonMessageList.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = new proto.seldon.protos.SeldonMessage; + reader.readMessage(value,proto.seldon.protos.SeldonMessage.deserializeBinaryFromReader); + msg.addSeldonmessages(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.seldon.protos.SeldonMessageList.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.seldon.protos.SeldonMessageList.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.seldon.protos.SeldonMessageList} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.SeldonMessageList.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getSeldonmessagesList(); + if (f.length > 0) { + writer.writeRepeatedMessage( + 1, + f, + proto.seldon.protos.SeldonMessage.serializeBinaryToWriter + ); + } +}; + + +/** + * repeated SeldonMessage seldonMessages = 1; + * @return {!Array.} + */ +proto.seldon.protos.SeldonMessageList.prototype.getSeldonmessagesList = function() { + return /** @type{!Array.} */ ( + jspb.Message.getRepeatedWrapperField(this, proto.seldon.protos.SeldonMessage, 1)); +}; + + +/** @param {!Array.} value */ +proto.seldon.protos.SeldonMessageList.prototype.setSeldonmessagesList = function(value) { + jspb.Message.setRepeatedWrapperField(this, 1, value); +}; + + +/** + * @param {!proto.seldon.protos.SeldonMessage=} opt_value + * @param {number=} opt_index + * @return {!proto.seldon.protos.SeldonMessage} + */ +proto.seldon.protos.SeldonMessageList.prototype.addSeldonmessages = function(opt_value, opt_index) { + return jspb.Message.addToRepeatedWrapperField(this, 1, opt_value, proto.seldon.protos.SeldonMessage, opt_index); +}; + + +proto.seldon.protos.SeldonMessageList.prototype.clearSeldonmessagesList = function() { + this.setSeldonmessagesList([]); +}; + + + +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.seldon.protos.Status = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, null, null); +}; +goog.inherits(proto.seldon.protos.Status, jspb.Message); +if (goog.DEBUG && !COMPILED) { + proto.seldon.protos.Status.displayName = 'proto.seldon.protos.Status'; +} + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto suitable for use in Soy templates. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * com.google.apps.jspb.JsClassTemplate.JS_RESERVED_WORDS. + * @param {boolean=} opt_includeInstance Whether to include the JSPB instance + * for transitional soy proto support: http://goto/soy-param-migration + * @return {!Object} + */ +proto.seldon.protos.Status.prototype.toObject = function(opt_includeInstance) { + return proto.seldon.protos.Status.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Whether to include the JSPB + * instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.seldon.protos.Status} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.Status.toObject = function(includeInstance, msg) { + var f, obj = { + code: jspb.Message.getFieldWithDefault(msg, 1, 0), + info: jspb.Message.getFieldWithDefault(msg, 2, ""), + reason: jspb.Message.getFieldWithDefault(msg, 3, ""), + status: jspb.Message.getFieldWithDefault(msg, 4, 0) + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.seldon.protos.Status} + */ +proto.seldon.protos.Status.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.seldon.protos.Status; + return proto.seldon.protos.Status.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.seldon.protos.Status} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.seldon.protos.Status} + */ +proto.seldon.protos.Status.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = /** @type {number} */ (reader.readInt32()); + msg.setCode(value); + break; + case 2: + var value = /** @type {string} */ (reader.readString()); + msg.setInfo(value); + break; + case 3: + var value = /** @type {string} */ (reader.readString()); + msg.setReason(value); + break; + case 4: + var value = /** @type {!proto.seldon.protos.Status.StatusFlag} */ (reader.readEnum()); + msg.setStatus(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.seldon.protos.Status.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.seldon.protos.Status.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.seldon.protos.Status} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.Status.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getCode(); + if (f !== 0) { + writer.writeInt32( + 1, + f + ); + } + f = message.getInfo(); + if (f.length > 0) { + writer.writeString( + 2, + f + ); + } + f = message.getReason(); + if (f.length > 0) { + writer.writeString( + 3, + f + ); + } + f = message.getStatus(); + if (f !== 0.0) { + writer.writeEnum( + 4, + f + ); + } +}; + + +/** + * @enum {number} + */ +proto.seldon.protos.Status.StatusFlag = { + SUCCESS: 0, + FAILURE: 1 +}; + +/** + * optional int32 code = 1; + * @return {number} + */ +proto.seldon.protos.Status.prototype.getCode = function() { + return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 1, 0)); +}; + + +/** @param {number} value */ +proto.seldon.protos.Status.prototype.setCode = function(value) { + jspb.Message.setField(this, 1, value); +}; + + +/** + * optional string info = 2; + * @return {string} + */ +proto.seldon.protos.Status.prototype.getInfo = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, "")); +}; + + +/** @param {string} value */ +proto.seldon.protos.Status.prototype.setInfo = function(value) { + jspb.Message.setField(this, 2, value); +}; + + +/** + * optional string reason = 3; + * @return {string} + */ +proto.seldon.protos.Status.prototype.getReason = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 3, "")); +}; + + +/** @param {string} value */ +proto.seldon.protos.Status.prototype.setReason = function(value) { + jspb.Message.setField(this, 3, value); +}; + + +/** + * optional StatusFlag status = 4; + * @return {!proto.seldon.protos.Status.StatusFlag} + */ +proto.seldon.protos.Status.prototype.getStatus = function() { + return /** @type {!proto.seldon.protos.Status.StatusFlag} */ (jspb.Message.getFieldWithDefault(this, 4, 0)); +}; + + +/** @param {!proto.seldon.protos.Status.StatusFlag} value */ +proto.seldon.protos.Status.prototype.setStatus = function(value) { + jspb.Message.setField(this, 4, value); +}; + + + +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.seldon.protos.Feedback = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, null, null); +}; +goog.inherits(proto.seldon.protos.Feedback, jspb.Message); +if (goog.DEBUG && !COMPILED) { + proto.seldon.protos.Feedback.displayName = 'proto.seldon.protos.Feedback'; +} + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto suitable for use in Soy templates. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * com.google.apps.jspb.JsClassTemplate.JS_RESERVED_WORDS. + * @param {boolean=} opt_includeInstance Whether to include the JSPB instance + * for transitional soy proto support: http://goto/soy-param-migration + * @return {!Object} + */ +proto.seldon.protos.Feedback.prototype.toObject = function(opt_includeInstance) { + return proto.seldon.protos.Feedback.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Whether to include the JSPB + * instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.seldon.protos.Feedback} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.Feedback.toObject = function(includeInstance, msg) { + var f, obj = { + request: (f = msg.getRequest()) && proto.seldon.protos.SeldonMessage.toObject(includeInstance, f), + response: (f = msg.getResponse()) && proto.seldon.protos.SeldonMessage.toObject(includeInstance, f), + reward: +jspb.Message.getFieldWithDefault(msg, 3, 0.0), + truth: (f = msg.getTruth()) && proto.seldon.protos.SeldonMessage.toObject(includeInstance, f) + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.seldon.protos.Feedback} + */ +proto.seldon.protos.Feedback.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.seldon.protos.Feedback; + return proto.seldon.protos.Feedback.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.seldon.protos.Feedback} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.seldon.protos.Feedback} + */ +proto.seldon.protos.Feedback.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = new proto.seldon.protos.SeldonMessage; + reader.readMessage(value,proto.seldon.protos.SeldonMessage.deserializeBinaryFromReader); + msg.setRequest(value); + break; + case 2: + var value = new proto.seldon.protos.SeldonMessage; + reader.readMessage(value,proto.seldon.protos.SeldonMessage.deserializeBinaryFromReader); + msg.setResponse(value); + break; + case 3: + var value = /** @type {number} */ (reader.readFloat()); + msg.setReward(value); + break; + case 4: + var value = new proto.seldon.protos.SeldonMessage; + reader.readMessage(value,proto.seldon.protos.SeldonMessage.deserializeBinaryFromReader); + msg.setTruth(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.seldon.protos.Feedback.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.seldon.protos.Feedback.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.seldon.protos.Feedback} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.Feedback.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getRequest(); + if (f != null) { + writer.writeMessage( + 1, + f, + proto.seldon.protos.SeldonMessage.serializeBinaryToWriter + ); + } + f = message.getResponse(); + if (f != null) { + writer.writeMessage( + 2, + f, + proto.seldon.protos.SeldonMessage.serializeBinaryToWriter + ); + } + f = message.getReward(); + if (f !== 0.0) { + writer.writeFloat( + 3, + f + ); + } + f = message.getTruth(); + if (f != null) { + writer.writeMessage( + 4, + f, + proto.seldon.protos.SeldonMessage.serializeBinaryToWriter + ); + } +}; + + +/** + * optional SeldonMessage request = 1; + * @return {?proto.seldon.protos.SeldonMessage} + */ +proto.seldon.protos.Feedback.prototype.getRequest = function() { + return /** @type{?proto.seldon.protos.SeldonMessage} */ ( + jspb.Message.getWrapperField(this, proto.seldon.protos.SeldonMessage, 1)); +}; + + +/** @param {?proto.seldon.protos.SeldonMessage|undefined} value */ +proto.seldon.protos.Feedback.prototype.setRequest = function(value) { + jspb.Message.setWrapperField(this, 1, value); +}; + + +proto.seldon.protos.Feedback.prototype.clearRequest = function() { + this.setRequest(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.Feedback.prototype.hasRequest = function() { + return jspb.Message.getField(this, 1) != null; +}; + + +/** + * optional SeldonMessage response = 2; + * @return {?proto.seldon.protos.SeldonMessage} + */ +proto.seldon.protos.Feedback.prototype.getResponse = function() { + return /** @type{?proto.seldon.protos.SeldonMessage} */ ( + jspb.Message.getWrapperField(this, proto.seldon.protos.SeldonMessage, 2)); +}; + + +/** @param {?proto.seldon.protos.SeldonMessage|undefined} value */ +proto.seldon.protos.Feedback.prototype.setResponse = function(value) { + jspb.Message.setWrapperField(this, 2, value); +}; + + +proto.seldon.protos.Feedback.prototype.clearResponse = function() { + this.setResponse(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.Feedback.prototype.hasResponse = function() { + return jspb.Message.getField(this, 2) != null; +}; + + +/** + * optional float reward = 3; + * @return {number} + */ +proto.seldon.protos.Feedback.prototype.getReward = function() { + return /** @type {number} */ (+jspb.Message.getFieldWithDefault(this, 3, 0.0)); +}; + + +/** @param {number} value */ +proto.seldon.protos.Feedback.prototype.setReward = function(value) { + jspb.Message.setField(this, 3, value); +}; + + +/** + * optional SeldonMessage truth = 4; + * @return {?proto.seldon.protos.SeldonMessage} + */ +proto.seldon.protos.Feedback.prototype.getTruth = function() { + return /** @type{?proto.seldon.protos.SeldonMessage} */ ( + jspb.Message.getWrapperField(this, proto.seldon.protos.SeldonMessage, 4)); +}; + + +/** @param {?proto.seldon.protos.SeldonMessage|undefined} value */ +proto.seldon.protos.Feedback.prototype.setTruth = function(value) { + jspb.Message.setWrapperField(this, 4, value); +}; + + +proto.seldon.protos.Feedback.prototype.clearTruth = function() { + this.setTruth(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.Feedback.prototype.hasTruth = function() { + return jspb.Message.getField(this, 4) != null; +}; + + + +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.seldon.protos.RequestResponse = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, null, null); +}; +goog.inherits(proto.seldon.protos.RequestResponse, jspb.Message); +if (goog.DEBUG && !COMPILED) { + proto.seldon.protos.RequestResponse.displayName = 'proto.seldon.protos.RequestResponse'; +} + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto suitable for use in Soy templates. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * com.google.apps.jspb.JsClassTemplate.JS_RESERVED_WORDS. + * @param {boolean=} opt_includeInstance Whether to include the JSPB instance + * for transitional soy proto support: http://goto/soy-param-migration + * @return {!Object} + */ +proto.seldon.protos.RequestResponse.prototype.toObject = function(opt_includeInstance) { + return proto.seldon.protos.RequestResponse.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Whether to include the JSPB + * instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.seldon.protos.RequestResponse} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.RequestResponse.toObject = function(includeInstance, msg) { + var f, obj = { + request: (f = msg.getRequest()) && proto.seldon.protos.SeldonMessage.toObject(includeInstance, f), + response: (f = msg.getResponse()) && proto.seldon.protos.SeldonMessage.toObject(includeInstance, f) + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.seldon.protos.RequestResponse} + */ +proto.seldon.protos.RequestResponse.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.seldon.protos.RequestResponse; + return proto.seldon.protos.RequestResponse.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.seldon.protos.RequestResponse} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.seldon.protos.RequestResponse} + */ +proto.seldon.protos.RequestResponse.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = new proto.seldon.protos.SeldonMessage; + reader.readMessage(value,proto.seldon.protos.SeldonMessage.deserializeBinaryFromReader); + msg.setRequest(value); + break; + case 2: + var value = new proto.seldon.protos.SeldonMessage; + reader.readMessage(value,proto.seldon.protos.SeldonMessage.deserializeBinaryFromReader); + msg.setResponse(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.seldon.protos.RequestResponse.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.seldon.protos.RequestResponse.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.seldon.protos.RequestResponse} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.seldon.protos.RequestResponse.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getRequest(); + if (f != null) { + writer.writeMessage( + 1, + f, + proto.seldon.protos.SeldonMessage.serializeBinaryToWriter + ); + } + f = message.getResponse(); + if (f != null) { + writer.writeMessage( + 2, + f, + proto.seldon.protos.SeldonMessage.serializeBinaryToWriter + ); + } +}; + + +/** + * optional SeldonMessage request = 1; + * @return {?proto.seldon.protos.SeldonMessage} + */ +proto.seldon.protos.RequestResponse.prototype.getRequest = function() { + return /** @type{?proto.seldon.protos.SeldonMessage} */ ( + jspb.Message.getWrapperField(this, proto.seldon.protos.SeldonMessage, 1)); +}; + + +/** @param {?proto.seldon.protos.SeldonMessage|undefined} value */ +proto.seldon.protos.RequestResponse.prototype.setRequest = function(value) { + jspb.Message.setWrapperField(this, 1, value); +}; + + +proto.seldon.protos.RequestResponse.prototype.clearRequest = function() { + this.setRequest(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.RequestResponse.prototype.hasRequest = function() { + return jspb.Message.getField(this, 1) != null; +}; + + +/** + * optional SeldonMessage response = 2; + * @return {?proto.seldon.protos.SeldonMessage} + */ +proto.seldon.protos.RequestResponse.prototype.getResponse = function() { + return /** @type{?proto.seldon.protos.SeldonMessage} */ ( + jspb.Message.getWrapperField(this, proto.seldon.protos.SeldonMessage, 2)); +}; + + +/** @param {?proto.seldon.protos.SeldonMessage|undefined} value */ +proto.seldon.protos.RequestResponse.prototype.setResponse = function(value) { + jspb.Message.setWrapperField(this, 2, value); +}; + + +proto.seldon.protos.RequestResponse.prototype.clearResponse = function() { + this.setResponse(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {!boolean} + */ +proto.seldon.protos.RequestResponse.prototype.hasResponse = function() { + return jspb.Message.getField(this, 2) != null; +}; + + +goog.object.extend(exports, proto.seldon.protos); diff --git a/wrappers/s2i/nodejs/test/model-template-app/.s2i/environment b/wrappers/s2i/nodejs/test/model-template-app/.s2i/environment index afcae69f8c..aab3ea3f7f 100644 --- a/wrappers/s2i/nodejs/test/model-template-app/.s2i/environment +++ b/wrappers/s2i/nodejs/test/model-template-app/.s2i/environment @@ -1,4 +1,4 @@ MODEL_NAME=MyModel.js API_TYPE=REST SERVICE_TYPE=MODEL -PERSISTENCE=0 +PERSISTENCE=0 \ No newline at end of file diff --git a/wrappers/s2i/nodejs/test/model-template-app/README.md b/wrappers/s2i/nodejs/test/model-template-app/README.md index d4033b8e73..9758a68243 100644 --- a/wrappers/s2i/nodejs/test/model-template-app/README.md +++ b/wrappers/s2i/nodejs/test/model-template-app/README.md @@ -1,4 +1,4 @@ -## NodeJs tensorflow example +## NodeJs model example This model example takes an input of 10 different features and predicts a out for the same. For the training part it uses a random normally distributed input set of 100 rows i.e a data set of [100,10] and trains it for another random normally distributed data set of size [100,1]. diff --git a/wrappers/s2i/nodejs/test/model-template-app/package.json b/wrappers/s2i/nodejs/test/model-template-app/package.json index eb9fe2383e..ae1e3493cf 100644 --- a/wrappers/s2i/nodejs/test/model-template-app/package.json +++ b/wrappers/s2i/nodejs/test/model-template-app/package.json @@ -1,5 +1,5 @@ { - "name": "node_tensorflow", + "name": "model-template-app", "version": "1.0.0", "description": "", "main": "predict.js", diff --git a/wrappers/s2i/nodejs/test/run b/wrappers/s2i/nodejs/test/run index c69bcd4f9c..ac95bc98c4 100755 --- a/wrappers/s2i/nodejs/test/run +++ b/wrappers/s2i/nodejs/test/run @@ -171,7 +171,7 @@ test_feedback() { # Build the application image twice to ensure the 'save-artifacts' and # 'restore-artifacts' scripts are working properly -array=( 'model' ) +array=( 'model' 'transformer' ) for i in "${array[@]}" do cid_file=$($MKTEMP_EXEC -u --suffix=.cid) diff --git a/wrappers/s2i/nodejs/test/transformer-template-app/.s2i/environment b/wrappers/s2i/nodejs/test/transformer-template-app/.s2i/environment new file mode 100644 index 0000000000..6bb7e6ac4b --- /dev/null +++ b/wrappers/s2i/nodejs/test/transformer-template-app/.s2i/environment @@ -0,0 +1,4 @@ +MODEL_NAME=MyTransformer.js +API_TYPE=REST +SERVICE_TYPE=TRANSFORMER +PERSISTENCE=0 \ No newline at end of file diff --git a/wrappers/s2i/nodejs/test/transformer-template-app/MyTransformer.js b/wrappers/s2i/nodejs/test/transformer-template-app/MyTransformer.js new file mode 100644 index 0000000000..2fae2f4d3b --- /dev/null +++ b/wrappers/s2i/nodejs/test/transformer-template-app/MyTransformer.js @@ -0,0 +1,17 @@ +let MyTransformer = function() {}; + +MyTransformer.prototype.init = function() { + console.log("Initializing Transform ..."); +}; + +MyTransformer.prototype.transform_input = function(X, names) { + console.log("Identity Transform ..."); + return X; +}; + +MyTransformer.prototype.transform_output = function(X, names) { + console.log("Identity Transform ..."); + return X; +}; + +module.exports = MyTransformer; diff --git a/wrappers/s2i/nodejs/test/transformer-template-app/README.md b/wrappers/s2i/nodejs/test/transformer-template-app/README.md new file mode 100644 index 0000000000..3a86c39a55 --- /dev/null +++ b/wrappers/s2i/nodejs/test/transformer-template-app/README.md @@ -0,0 +1,3 @@ +## NodeJs transformer example + +This is an example transformer with an identity input and output tansform functions diff --git a/wrappers/s2i/nodejs/test/transformer-template-app/package.json b/wrappers/s2i/nodejs/test/transformer-template-app/package.json new file mode 100644 index 0000000000..fa9eb5bad7 --- /dev/null +++ b/wrappers/s2i/nodejs/test/transformer-template-app/package.json @@ -0,0 +1,10 @@ +{ + "name": "transformer-template-app", + "version": "1.0.0", + "description": "", + "main": "MyTransformer.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "Sachin.Varghese" +} diff --git a/wrappers/s2i/nodejs/transformer_microservice.js b/wrappers/s2i/nodejs/transformer_microservice.js new file mode 100644 index 0000000000..1237da7ea7 --- /dev/null +++ b/wrappers/s2i/nodejs/transformer_microservice.js @@ -0,0 +1,133 @@ +const express = require("express"); +const app = express(); +const bodyParser = require("body-parser"); +const grpc = require("grpc"); +const grpc_services = require("./prediction_grpc_pb"); + +module.exports = ( + user_model, + api, + port, + rest_data_to_array, + array_to_rest_data, + array_to_grpc_data +) => { + if ( + user_model.transform_input && + typeof user_model.transform_input === "function" + ) { + console.log("Transform function loaded successfully"); + } else if ( + user_model.transform_output && + typeof user_model.transform_output === "function" + ) { + console.log("Transform function loaded successfully"); + } else { + console.log(user_model); + process.exit(1); + } + let transform_input = user_model.transform_input + ? user_model.transform_input.bind(user_model) + : null; + let transform_output = user_model.transform_output + ? user_model.transform_output.bind(user_model) + : null; + + if (api === "REST") { + app.use(bodyParser.urlencoded({ extended: true })); + app.post("/transform-input", (req, res) => { + try { + body = JSON.parse(req.body.json); + body = body.data; + } catch (msg) { + console.log(msg); + res.status(500).send("Cannot parse transform input json " + req.body); + } + if (transform_input && typeof transform_input === "function") { + result = transform_input(rest_data_to_array(body), body.names); + result = { data: array_to_rest_data(result, body) }; + res.status(200).send(result); + } else { + console.log("Transform function not Found"); + res.status(500).send(null); + } + }); + app.post("/transform-output", (req, res) => { + try { + body = JSON.parse(req.body.json); + body = body.data; + } catch (msg) { + console.log(msg); + res.status(500).send("Cannot parse transform input json " + req.body); + } + if (transform_output && typeof transform_output === "function") { + result = transform_output(rest_data_to_array(body), body.names); + result = { data: array_to_rest_data(result, body) }; + res.status(200).send(result); + } else { + console.log("Transform function not Found"); + res.status(500).send(null); + } + }); + var server = app.listen(port, () => { + console.log(`NodeJs REST Microservice listening on port ${port}!`); + }); + function stopServer(code) { + server.close(); + console.log(`About to exit with code: ${code}`); + } + process.on("SIGINT", stopServer.bind(this)); + process.on("SIGTERM", stopServer.bind(this)); + } + + if (api === "GRPC") { + function inputEndpoint(call, callback) { + let data = call.request.getData(); + let body = { names: data.getNamesList() }; + + if (data.hasTensor()) { + data = data.getTensor(); + body["tensor"] = { + shape: data.getShapeList(), + values: data.getValuesList() + }; + } else { + body["ndarray"] = data.getNdarray(); + } + result = transform_input(rest_data_to_array(body), body.names); + callback(null, array_to_grpc_data(result, body)); + } + function outputEndpoint(call, callback) { + let data = call.request.getData(); + let body = { names: data.getNamesList() }; + + if (data.hasTensor()) { + data = data.getTensor(); + body["tensor"] = { + shape: data.getShapeList(), + values: data.getValuesList() + }; + } else { + body["ndarray"] = data.getNdarray(); + } + result = transform_output(rest_data_to_array(body), body.names); + callback(null, array_to_grpc_data(result, body)); + } + var server = new grpc.Server(); + server.addService(grpc_services.TransformerService, { + transformInput: inputEndpoint + }); + server.addService(grpc_services.OutputTransformerService, { + transformOutput: outputEndpoint + }); + server.bind("0.0.0.0:" + port, grpc.ServerCredentials.createInsecure()); + server.start(); + console.log(`NodeJs GRPC Microservice listening on port ${port}!`); + function stopServer(code) { + server.forceShutdown(); + console.log(`About to exit with code: ${code}`); + } + process.on("SIGINT", stopServer.bind(this)); + process.on("SIGTERM", stopServer.bind(this)); + } +}; diff --git a/wrappers/s2i/python-ngraph-onnx/Makefile b/wrappers/s2i/python-ngraph-onnx/Makefile index 4c5265a503..f9fa9abceb 100644 --- a/wrappers/s2i/python-ngraph-onnx/Makefile +++ b/wrappers/s2i/python-ngraph-onnx/Makefile @@ -3,22 +3,9 @@ IMAGE_NAME = docker.io/seldonio/seldon-core-s2i-python3-ngraph-onnx SELDON_CORE_DIR=../../.. - .PHONY: build build: docker build -t $(IMAGE_NAME):$(IMAGE_VERSION) . push_to_dockerhub: docker push $(IMAGE_NAME):$(IMAGE_VERSION) - - -.PHONY: test -test: - docker build -t $(IMAGE_NAME)-candidate . - IMAGE_NAME=$(IMAGE_NAME)-candidate test/run - -.PHONY: clean -clean: - rm -rf test/model-template-app/.git - rm -rf test/router-template-app/.git - rm -rf test/transformer-template-app/.git diff --git a/wrappers/s2i/python-tensorrt/Dockerfile b/wrappers/s2i/python-tensorrt/Dockerfile new file mode 100644 index 0000000000..16c4123cd5 --- /dev/null +++ b/wrappers/s2i/python-tensorrt/Dockerfile @@ -0,0 +1,21 @@ +FROM seldonio/seldon-core-s2i-python3:0.2 + +RUN apt-get update && apt-get install -y \ + build-essential libssl1.0-dev libopencv-dev libopencv-core-dev python-pil \ + software-properties-common autoconf automake libtool pkg-config + +WORKDIR /home + +RUN git clone --single-branch -b change_ld_flags https://github.com/cliveseldon/dl-inference-server.git + +RUN pip install --no-cache-dir --upgrade setuptools grpcio-tools + +RUN cd dl-inference-server && \ + make -j4 -f Makefile.clients all pip + +RUN pip install --no-cache-dir --upgrade dl-inference-server/build/dist/dist/inference_server-0.5.0-cp36-cp36m-linux_x86_64.whl + +RUN rm -rf dl-inference-server/build + +WORKDIR /microservice + diff --git a/wrappers/s2i/python-tensorrt/Makefile b/wrappers/s2i/python-tensorrt/Makefile new file mode 100644 index 0000000000..cb171a045b --- /dev/null +++ b/wrappers/s2i/python-tensorrt/Makefile @@ -0,0 +1,12 @@ +IMAGE_VERSION=0.1 +IMAGE_NAME = docker.io/seldonio/seldon-core-s2i-python3-tensorrt + +SELDON_CORE_DIR=../../.. + +.PHONY: build +build: + docker build -t $(IMAGE_NAME):$(IMAGE_VERSION) . + +push_to_dockerhub: + docker push $(IMAGE_NAME):$(IMAGE_VERSION) +