Skip to content

Commit

Permalink
Add support for mistral bedrock model
Browse files Browse the repository at this point in the history
  • Loading branch information
hmstepanek committed May 15, 2024
1 parent 3e5be52 commit 292b1ff
Show file tree
Hide file tree
Showing 5 changed files with 447 additions and 1 deletion.
38 changes: 38 additions & 0 deletions newrelic/hooks/external_botocore.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,14 @@ def extract_bedrock_titan_text_model_request(request_body, bedrock_attrs):
return bedrock_attrs


def extract_bedrock_mistral_text_model_request(request_body, bedrock_attrs):
request_body = json.loads(request_body)
bedrock_attrs["input_message_list"] = [{"role": "user", "content": request_body.get("prompt")}]
bedrock_attrs["request.max_tokens"] = request_body.get("max_tokens")
bedrock_attrs["request.temperature"] = request_body.get("temperature")
return bedrock_attrs


def extract_bedrock_titan_text_model_response(response_body, bedrock_attrs):
if response_body:
response_body = json.loads(response_body)
Expand All @@ -203,6 +211,18 @@ def extract_bedrock_titan_text_model_response(response_body, bedrock_attrs):
return bedrock_attrs


def extract_bedrock_mistral_text_model_response(response_body, bedrock_attrs):
if response_body:
response_body = json.loads(response_body)
outputs = response_body.get("outputs")
if outputs:
bedrock_attrs["response.choices.finish_reason"] = outputs[0]["stop_reason"]
bedrock_attrs["output_message_list"] = [
{"role": "assistant", "content": result["text"]} for result in outputs
]
return bedrock_attrs


def extract_bedrock_titan_text_model_streaming_response(response_body, bedrock_attrs):
if response_body:
if "outputText" in response_body:
Expand All @@ -214,6 +234,18 @@ def extract_bedrock_titan_text_model_streaming_response(response_body, bedrock_a
return bedrock_attrs


def extract_bedrock_mistral_text_model_streaming_response(response_body, bedrock_attrs):
if response_body:
outputs = response_body.get("outputs")
if outputs:
bedrock_attrs["output_message_list"] = bedrock_attrs.get(
"output_message_list", [{"role": "assistant", "content": ""}]
)
bedrock_attrs["output_message_list"][0]["content"] += outputs[0].get("text", "")
bedrock_attrs["response.choices.finish_reason"] = outputs[0].get("stop_reason", None)
return bedrock_attrs


def extract_bedrock_titan_embedding_model_request(request_body, bedrock_attrs):
request_body = json.loads(request_body)

Expand Down Expand Up @@ -407,6 +439,12 @@ def extract_bedrock_cohere_model_streaming_response(response_body, bedrock_attrs
extract_bedrock_llama_model_response,
extract_bedrock_llama_model_streaming_response,
),
(
"mistral",
extract_bedrock_mistral_text_model_request,
extract_bedrock_mistral_text_model_response,
extract_bedrock_mistral_text_model_streaming_response,
),
]


Expand Down
Loading

0 comments on commit 292b1ff

Please sign in to comment.