Skip to content

Commit

Permalink
fix: capture anthropic model name from response (#1124)
Browse files Browse the repository at this point in the history
  • Loading branch information
RogerHYang authored Nov 20, 2024
1 parent 2a6d57a commit 8e915f2
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
if TYPE_CHECKING:
from pydantic import BaseModel

from anthropic.types import Usage
from anthropic.types import Message, Usage


class _WithTracer(ABC):
Expand Down Expand Up @@ -92,7 +92,7 @@ def __call__(
attributes=dict(
chain(
get_attributes_from_context(),
_get_llm_model(arguments),
_get_llm_model_name_from_input(arguments),
_get_llm_provider(),
_get_llm_system(),
_get_llm_span_kind(),
Expand Down Expand Up @@ -145,7 +145,7 @@ async def __call__(
attributes=dict(
chain(
get_attributes_from_context(),
_get_llm_model(arguments),
_get_llm_model_name_from_input(arguments),
_get_llm_provider(),
_get_llm_system(),
_get_llm_span_kind(),
Expand Down Expand Up @@ -198,7 +198,7 @@ def __call__(
attributes=dict(
chain(
get_attributes_from_context(),
_get_llm_model(arguments),
_get_llm_model_name_from_input(arguments),
_get_llm_provider(),
_get_llm_system(),
_get_llm_span_kind(),
Expand All @@ -223,6 +223,7 @@ def __call__(
span.set_attributes(
dict(
chain(
_get_llm_model_name_from_response(response),
_get_output_messages(response),
_get_llm_token_counts(response.usage),
_get_outputs(response),
Expand Down Expand Up @@ -260,7 +261,7 @@ async def __call__(
get_attributes_from_context(),
_get_llm_provider(),
_get_llm_system(),
_get_llm_model(arguments),
_get_llm_model_name_from_input(arguments),
_get_llm_span_kind(),
_get_llm_input_messages(llm_input_messages),
_get_llm_invocation_parameters(invocation_parameters),
Expand All @@ -283,6 +284,7 @@ async def __call__(
span.set_attributes(
dict(
chain(
_get_llm_model_name_from_response(response),
_get_output_messages(response),
_get_llm_token_counts(response.usage),
_get_outputs(response),
Expand Down Expand Up @@ -326,11 +328,16 @@ def _get_llm_token_counts(usage: "Usage") -> Iterator[Tuple[str, Any]]:
yield LLM_TOKEN_COUNT_COMPLETION, usage.output_tokens


def _get_llm_model(arguments: Mapping[str, Any]) -> Iterator[Tuple[str, Any]]:
def _get_llm_model_name_from_input(arguments: Mapping[str, Any]) -> Iterator[Tuple[str, Any]]:
if model_name := arguments.get("model"):
yield LLM_MODEL_NAME, model_name


def _get_llm_model_name_from_response(message: "Message") -> Iterator[Tuple[str, Any]]:
if model_name := getattr(message, "model"):
yield LLM_MODEL_NAME, model_name


def _get_llm_invocation_parameters(
invocation_parameters: Mapping[str, Any],
) -> Iterator[Tuple[str, Any]]:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
interactions:
- request:
body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": "What''s
the capital of France?"}], "model": "claude-3-opus-20240229"}'
the capital of France?"}], "model": "claude-3-opus-latest"}'
headers: {}
method: POST
uri: https://api.anthropic.com/v1/messages
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ def test_anthropic_instrumentation_messages(
client = Anthropic(api_key="fake")
input_message = "What's the capital of France?"

invocation_params = {"max_tokens": 1024, "model": "claude-3-opus-20240229"}
invocation_params = {"max_tokens": 1024, "model": "claude-3-opus-latest"}

client.messages.create(
max_tokens=1024,
Expand All @@ -289,7 +289,7 @@ def test_anthropic_instrumentation_messages(
"content": input_message,
}
],
model="claude-3-opus-20240229",
model="claude-3-opus-latest",
)

spans = in_memory_span_exporter.get_finished_spans()
Expand Down

0 comments on commit 8e915f2

Please sign in to comment.