Skip to content

Commit

Permalink
removing defaults
Browse files Browse the repository at this point in the history
  • Loading branch information
udaij12 committed Sep 23, 2024
1 parent 83ebb39 commit 5095ba8
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@
public class ModelConfig {
private static final Logger logger = LoggerFactory.getLogger(ModelConfig.class);

public static final int defaultMinWorkers = 1;
public static final int defaultBatchSize = 1;
public static final int defaultStartupTimeout = 120; // unit: sec
public static final int defaultResponseTimeout = 120; // unit: sec

Expand All @@ -21,7 +19,7 @@ public class ModelConfig {
/** the maximum number of workers of a model */
private int maxWorkers;
/** the batch size of a model */
private int batchSize = defaultBatchSize;
private int batchSize;
/** the maximum delay in msec of a batch of a model */
private int maxBatchDelay;
/** the timeout in sec of a specific model's response. */
Expand Down
12 changes: 2 additions & 10 deletions frontend/server/src/main/java/org/pytorch/serve/wlm/Model.java
Original file line number Diff line number Diff line change
Expand Up @@ -190,13 +190,10 @@ public JsonObject getModelState(boolean isDefaultVersion) {
}

public void setModelState(JsonObject modelInfo) {
minWorkers =
modelInfo.has(MIN_WORKERS) && !modelInfo.get(MIN_WORKERS).isJsonNull()
? modelInfo.get(MIN_WORKERS).getAsInt()
: modelArchive.getModelConfig()
.defaultMinWorkers; // default value for minWorkers
minWorkers = modelInfo.get(MIN_WORKERS).getAsInt();
maxWorkers = modelInfo.get(MAX_WORKERS).getAsInt();
maxBatchDelay = modelInfo.get(MAX_BATCH_DELAY).getAsInt();
batchSize = modelInfo.get(BATCH_SIZE).getAsInt();
responseTimeout =
modelInfo.has(RESPONSE_TIMEOUT) && !modelInfo.get(RESPONSE_TIMEOUT).isJsonNull()
? modelInfo.get(RESPONSE_TIMEOUT).getAsInt()
Expand All @@ -207,11 +204,6 @@ public void setModelState(JsonObject modelInfo) {
? modelInfo.get(STARTUP_TIMEOUT).getAsInt()
: modelArchive.getModelConfig()
.defaultStartupTimeout; // default value for startupTimeout
batchSize =
modelInfo.has(BATCH_SIZE) && !modelInfo.get(BATCH_SIZE).isJsonNull()
? modelInfo.get(BATCH_SIZE).getAsInt()
: modelArchive.getModelConfig()
.defaultBatchSize; // default value for batchSize

JsonElement runtime = modelInfo.get(RUNTIME_TYPE);
String runtime_str = Manifest.RuntimeType.PYTHON.getValue();
Expand Down

0 comments on commit 5095ba8

Please sign in to comment.