Skip to content

Commit

Permalink
Add OPENAI_API_VERSION constant to routes (pytorch#1015)
Browse files Browse the repository at this point in the history
  • Loading branch information
vmpuri authored Aug 6, 2024
1 parent 67f678b commit a9d386e
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 4 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ is plaintext and will not be formatted to the OpenAI API specification. If `stre
[skip default]: begin

```
curl http://127.0.0.1:5000/chat \
curl http://127.0.0.1:5000/v1/chat \
-H "Content-Type: application/json" \
-d '{
"model": "llama3.1",
Expand Down
8 changes: 5 additions & 3 deletions server.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
from flask import Flask, request, Response
from generate import GeneratorArgs

OPENAI_API_VERSION = "v1"


def create_app(args):
"""
Expand All @@ -33,7 +35,7 @@ def _del_none(d: Union[Dict, List]) -> Union[Dict, List]:
return [_del_none(v) for v in d if v]
return d

@app.route("/chat", methods=["POST"])
@app.route(f"/{OPENAI_API_VERSION}/chat", methods=["POST"])
def chat_endpoint():
"""
Endpoint for the Chat API. This endpoint is used to generate a response to a user prompt.
Expand Down Expand Up @@ -75,11 +77,11 @@ def chunk_processor(chunked_completion_generator):

return json.dumps(_del_none(asdict(response)))

@app.route("/models", methods=["GET"])
@app.route(f"/{OPENAI_API_VERSION}/models", methods=["GET"])
def models_endpoint():
return json.dumps(asdict(get_model_info_list(args)))

@app.route("/models/<model_id>", methods=["GET"])
@app.route(f"/{OPENAI_API_VERSION}/models/<model_id>", methods=["GET"])
def models_retrieve_endpoint(model_id):
if response := retrieve_model_info(args, model_id):
return json.dumps(asdict(response))
Expand Down

0 comments on commit a9d386e

Please sign in to comment.