Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature bedrock cohere instrumentation #955

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions newrelic/hooks/external_botocore.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,9 +140,29 @@ def extract_bedrock_ai21_j2_model(request_body, response_body):
return message_list, chat_completion_summary_dict


def extract_bedrock_cohere_model(request_body, response_body):
response_body = json.loads(response_body)
request_body = json.loads(request_body)

message_list = [{"role": "user", "content": request_body.get("prompt", "")}]
message_list.extend(
{"role": "assistant", "content": result["text"]} for result in response_body.get("generations", [])
)

chat_completion_summary_dict = {
"request.max_tokens": request_body.get("max_tokens", ""),
"request.temperature": request_body.get("temperature", ""),
"response.choices.finish_reason": response_body["generations"][0]["finish_reason"],
"response.number_of_messages": len(message_list),
"response_id": str(response_body.get("id", "")),
}
return message_list, chat_completion_summary_dict


MODEL_EXTRACTORS = {
"amazon.titan": extract_bedrock_titan_model,
"ai21.j2": extract_bedrock_ai21_j2_model,
"cohere": extract_bedrock_cohere_model,
}


Expand Down
14 changes: 14 additions & 0 deletions tests/mlmodel_bedrock/_mock_external_bedrock_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,20 @@
],
},
],
"cohere.command-text-v14::What is 212 degrees Fahrenheit converted to Celsius?": [
{"content-type": "application/json", "x-amzn-requestid": "c5188fb5-dc58-4cbe-948d-af173c69ce0d"},
{
"generations": [
{
"finish_reason": "MAX_TOKENS",
"id": "0730f5c0-9a49-4f35-af94-cf8f77327740",
"text": " To convert 212 degrees Fahrenheit to Celsius, we can use the conversion factor that Celsius is equal to (Fahrenheit - 32) x 5/9. \\n\\nApplying this formula, we have:\\n212°F = (212°F - 32) x 5/9\\n= (180) x 5/9\\n= 100°C.\\n\\nTherefore, 212 degrees F",
}
],
"id": "a9cc8ce6-50b6-40b6-bf77-cf24561d8de7",
"prompt": "What is 212 degrees Fahrenheit converted to Celsius?",
},
],
"ai21.j2-mid-v1::What is 212 degrees Fahrenheit converted to Celsius?": [
{"content-type": "application/json", "x-amzn-requestid": "3bf1bb6b-b6f0-4901-85a1-2fa0e814440e"},
{
Expand Down
64 changes: 64 additions & 0 deletions tests/mlmodel_bedrock/_test_chat_completion.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
chat_completion_payload_templates = {
"amazon.titan-text-express-v1": '{ "inputText": "%s", "textGenerationConfig": {"temperature": %f, "maxTokenCount": %d }}',
"ai21.j2-mid-v1": '{"prompt": "%s", "temperature": %f, "maxTokens": %d}',
"cohere.command-text-v14": '{"prompt": "%s", "temperature": %f, "max_tokens": %d}',
}

chat_completion_expected_events = {
Expand Down Expand Up @@ -132,4 +133,67 @@
},
),
],
"cohere.command-text-v14": [
(
{"type": "LlmChatCompletionSummary"},
{
"id": None, # UUID that varies with each run
"appName": "Python Agent Test (mlmodel_bedrock)",
"conversation_id": "my-awesome-id",
"transaction_id": None,
"span_id": "span-id",
"trace_id": "trace-id",
"request_id": "c5188fb5-dc58-4cbe-948d-af173c69ce0d",
"response_id": None, # UUID that varies with each run
"api_key_last_four_digits": "CRET",
"duration": None, # Response time varies each test run
"request.model": "cohere.command-text-v14",
"response.model": "cohere.command-text-v14",
"request.temperature": 0.7,
"request.max_tokens": 100,
"response.choices.finish_reason": "MAX_TOKENS",
"vendor": "bedrock",
"ingest_source": "Python",
"response.number_of_messages": 2,
},
),
(
{"type": "LlmChatCompletionMessage"},
{
"id": None, # UUID that varies with each run
"appName": "Python Agent Test (mlmodel_bedrock)",
"conversation_id": "my-awesome-id",
"request_id": "c5188fb5-dc58-4cbe-948d-af173c69ce0d",
"span_id": "span-id",
"trace_id": "trace-id",
"transaction_id": None,
"content": "What is 212 degrees Fahrenheit converted to Celsius?",
"role": "user",
"completion_id": None,
"sequence": 0,
"response.model": "cohere.command-text-v14",
"vendor": "bedrock",
"ingest_source": "Python",
},
),
(
{"type": "LlmChatCompletionMessage"},
{
"id": None, # UUID that varies with each run
"appName": "Python Agent Test (mlmodel_bedrock)",
"conversation_id": "my-awesome-id",
"request_id": "c5188fb5-dc58-4cbe-948d-af173c69ce0d",
"span_id": "span-id",
"trace_id": "trace-id",
"transaction_id": None,
"content": " To convert 212 degrees Fahrenheit to Celsius, we can use the conversion factor that Celsius is equal to (Fahrenheit - 32) x 5/9. \\n\\nApplying this formula, we have:\\n212°F = (212°F - 32) x 5/9\\n= (180) x 5/9\\n= 100°C.\\n\\nTherefore, 212 degrees F",
"role": "assistant",
"completion_id": None,
"sequence": 1,
"response.model": "cohere.command-text-v14",
"vendor": "bedrock",
"ingest_source": "Python",
},
),
],
}
2 changes: 1 addition & 1 deletion tests/mlmodel_bedrock/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def bedrock_server():
if not _environ_as_bool("NEW_RELIC_TESTING_RECORD_BEDROCK_RESPONSES", False):
# Use mocked Bedrock backend and prerecorded responses
with MockExternalBedrockServer() as server:
client = boto3.client(
client = boto3.client( # nosec
"bedrock-runtime",
"us-east-1",
endpoint_url="http://localhost:%d" % server.port,
Expand Down
2 changes: 1 addition & 1 deletion tests/mlmodel_bedrock/test_chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def is_file_payload(request):
"amazon.titan-text-express-v1",
"ai21.j2-mid-v1",
# ("anthropic.claude-instant-v1", '{"prompt": "Human: {prompt}\n\nAssistant:", "max_tokens_to_sample": {max_tokens:d}}'),
# ("cohere.command-text-v14", '{"prompt": "{prompt}", "max_tokens": {max_tokens:d}, "temperature": {temperature:f}}'),
"cohere.command-text-v14",
],
)
def model_id(request):
Expand Down
Loading