Skip to content

Commit

Permalink
Revert formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
Eric Meadows committed Sep 2, 2020
1 parent 378a9b9 commit 3827b79
Showing 1 changed file with 8 additions and 24 deletions.
32 changes: 8 additions & 24 deletions python/seldon_core/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,7 @@
logger = logging.getLogger(__name__)

PRED_UNIT_ID = os.environ.get("PREDICTIVE_UNIT_ID", "0")
METRICS_ENDPOINT = os.environ.get(
"PREDICTIVE_UNIT_METRICS_ENDPOINT", "/metrics"
)
METRICS_ENDPOINT = os.environ.get("PREDICTIVE_UNIT_METRICS_ENDPOINT", "/metrics")


def get_rest_microservice(user_model, seldon_metrics):
Expand Down Expand Up @@ -132,9 +130,7 @@ def HealthPing():
@app.route("/health/status", methods=["GET"])
def HealthStatus():
logger.debug("REST Health Status Request")
response = seldon_core.seldon_methods.health_status(
user_model, seldon_metrics
)
response = seldon_core.seldon_methods.health_status(user_model, seldon_metrics)
logger.debug("REST Health Status Response: %s", response)
return jsonify(response)

Expand Down Expand Up @@ -216,9 +212,7 @@ def __init__(self, user_model, seldon_metrics):
self.user_model = user_model
self.seldon_metrics = seldon_metrics

self.metadata_data = seldon_core.seldon_methods.init_metadata(
user_model
)
self.metadata_data = seldon_core.seldon_methods.init_metadata(user_model)

def Predict(self, request_grpc, context):
return seldon_core.seldon_methods.predict(
Expand Down Expand Up @@ -260,28 +254,20 @@ def ModelMetadata(self, request_grpc, context):

def GraphMetadata(self, request_grpc, context):
"""GraphMetadata method of rpc Seldon service"""
raise NotImplementedError(
"GraphMetadata not available on the Model level."
)
raise NotImplementedError("GraphMetadata not available on the Model level.")


def get_grpc_server(
user_model, seldon_metrics, annotations={}, trace_interceptor=None
):
def get_grpc_server(user_model, seldon_metrics, annotations={}, trace_interceptor=None):
seldon_model = SeldonModelGRPC(user_model, seldon_metrics)
options = []
if ANNOTATION_GRPC_MAX_MSG_SIZE in annotations:
max_msg = int(annotations[ANNOTATION_GRPC_MAX_MSG_SIZE])
logger.info(
"Setting grpc max message and receive length to %d", max_msg
)
logger.info("Setting grpc max message and receive length to %d", max_msg)
options.append(("grpc.max_message_length", max_msg))
options.append(("grpc.max_send_message_length", max_msg))
options.append(("grpc.max_receive_message_length", max_msg))

server = grpc.server(
futures.ThreadPoolExecutor(max_workers=10), options=options
)
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10), options=options)

if trace_interceptor:
from grpc_opentracing.grpcext import intercept_server
Expand All @@ -291,9 +277,7 @@ def get_grpc_server(
prediction_pb2_grpc.add_GenericServicer_to_server(seldon_model, server)
prediction_pb2_grpc.add_ModelServicer_to_server(seldon_model, server)
prediction_pb2_grpc.add_TransformerServicer_to_server(seldon_model, server)
prediction_pb2_grpc.add_OutputTransformerServicer_to_server(
seldon_model, server
)
prediction_pb2_grpc.add_OutputTransformerServicer_to_server(seldon_model, server)
prediction_pb2_grpc.add_CombinerServicer_to_server(seldon_model, server)
prediction_pb2_grpc.add_RouterServicer_to_server(seldon_model, server)
prediction_pb2_grpc.add_SeldonServicer_to_server(seldon_model, server)
Expand Down

0 comments on commit 3827b79

Please sign in to comment.