Skip to content

Commit

Permalink
feat(anthropic): add llm provider and system attributes to anthropic …
Browse files Browse the repository at this point in the history
…instrumentation (#1084)
  • Loading branch information
axiomofjoy authored Oct 28, 2024
1 parent 5c0f1e5 commit 32756ed
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
DocumentAttributes,
EmbeddingAttributes,
MessageAttributes,
OpenInferenceLLMProviderValues,
OpenInferenceLLMSystemValues,
OpenInferenceMimeTypeValues,
OpenInferenceSpanKindValues,
SpanAttributes,
Expand Down Expand Up @@ -88,6 +90,8 @@ def __call__(
span.set_attributes(
{
**dict(_get_llm_model(arguments)),
**dict(_get_llm_provider()),
**dict(_get_llm_system()),
OPENINFERENCE_SPAN_KIND: LLM,
LLM_PROMPTS: [llm_prompt],
INPUT_VALUE: safe_json_dumps(arguments),
Expand Down Expand Up @@ -146,6 +150,8 @@ async def __call__(
span.set_attributes(
{
**dict(_get_llm_model(arguments)),
**dict(_get_llm_provider()),
**dict(_get_llm_system()),
OPENINFERENCE_SPAN_KIND: LLM,
LLM_PROMPTS: [llm_prompt],
INPUT_VALUE: safe_json_dumps(arguments),
Expand Down Expand Up @@ -204,6 +210,8 @@ def __call__(
span.set_attributes(
{
**dict(_get_llm_model(arguments)),
**dict(_get_llm_provider()),
**dict(_get_llm_system()),
OPENINFERENCE_SPAN_KIND: LLM,
**dict(_get_input_messages(llm_input_messages)),
LLM_INVOCATION_PARAMETERS: safe_json_dumps(invocation_parameters),
Expand Down Expand Up @@ -263,6 +271,8 @@ async def __call__(

span.set_attributes(
{
**dict(_get_llm_provider()),
**dict(_get_llm_system()),
**dict(_get_llm_model(arguments)),
OPENINFERENCE_SPAN_KIND: LLM,
**dict(_get_input_messages(llm_input_messages)),
Expand Down Expand Up @@ -295,6 +305,14 @@ async def __call__(
return response


def _get_llm_provider() -> Iterator[Tuple[str, Any]]:
yield LLM_PROVIDER, LLM_PROVIDER_ANTHROPIC


def _get_llm_system() -> Iterator[Tuple[str, Any]]:
yield LLM_SYSTEM, LLM_SYSTEM_ANTHROPIC


def _get_llm_model(arguments: Mapping[str, Any]) -> Iterator[Tuple[str, Any]]:
if model_name := arguments.get("model"):
yield LLM_MODEL_NAME, model_name
Expand Down Expand Up @@ -450,3 +468,7 @@ def _validate_invocation_parameter(parameter: Any) -> bool:
TOOL_CALL_FUNCTION_ARGUMENTS_JSON = ToolCallAttributes.TOOL_CALL_FUNCTION_ARGUMENTS_JSON
TOOL_CALL_FUNCTION_NAME = ToolCallAttributes.TOOL_CALL_FUNCTION_NAME
USER_ID = SpanAttributes.USER_ID
LLM_PROVIDER = SpanAttributes.LLM_PROVIDER
LLM_SYSTEM = SpanAttributes.LLM_SYSTEM
LLM_PROVIDER_ANTHROPIC = OpenInferenceLLMProviderValues.ANTHROPIC.value
LLM_SYSTEM_ANTHROPIC = OpenInferenceLLMSystemValues.ANTHROPIC.value
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
DocumentAttributes,
EmbeddingAttributes,
MessageAttributes,
OpenInferenceLLMProviderValues,
OpenInferenceLLMSystemValues,
OpenInferenceMimeTypeValues,
OpenInferenceSpanKindValues,
SpanAttributes,
Expand Down Expand Up @@ -147,6 +149,8 @@ def test_anthropic_instrumentation_completions_streaming(
print(attributes)

assert attributes.pop(OPENINFERENCE_SPAN_KIND) == "LLM"
assert attributes.pop(LLM_PROVIDER) == LLM_PROVIDER_ANTHROPIC
assert attributes.pop(LLM_SYSTEM) == LLM_SYSTEM_ANTHROPIC
assert isinstance(attributes.pop(INPUT_VALUE), str)
assert attributes.pop(INPUT_MIME_TYPE) == JSON
assert isinstance(attributes.pop(OUTPUT_VALUE), str)
Expand All @@ -159,6 +163,7 @@ def test_anthropic_instrumentation_completions_streaming(
invocation_params = {"model": "claude-2.1", "max_tokens_to_sample": 1000, "stream": True}
assert json.loads(inv_params) == invocation_params
assert attributes.pop(LLM_OUTPUT_MESSAGES) == " Light scatters blue."
assert not attributes


@pytest.mark.asyncio
Expand Down Expand Up @@ -196,6 +201,8 @@ async def test_anthropic_instrumentation_async_completions_streaming(
print(attributes)

assert attributes.pop(OPENINFERENCE_SPAN_KIND) == "LLM"
assert attributes.pop(LLM_PROVIDER) == LLM_PROVIDER_ANTHROPIC
assert attributes.pop(LLM_SYSTEM) == LLM_SYSTEM_ANTHROPIC
assert isinstance(attributes.pop(INPUT_VALUE), str)
assert attributes.pop(INPUT_MIME_TYPE) == JSON
assert isinstance(attributes.pop(OUTPUT_VALUE), str)
Expand All @@ -208,6 +215,7 @@ async def test_anthropic_instrumentation_async_completions_streaming(
invocation_params = {"model": "claude-2.1", "max_tokens_to_sample": 1000, "stream": True}
assert json.loads(inv_params) == invocation_params
assert attributes.pop(LLM_OUTPUT_MESSAGES) == " Light scatters blue."
assert not attributes


@pytest.mark.vcr(
Expand Down Expand Up @@ -242,6 +250,8 @@ def test_anthropic_instrumentation_completions(
attributes = dict(spans[0].attributes or {})

assert attributes.pop(OPENINFERENCE_SPAN_KIND) == "LLM"
assert attributes.pop(LLM_PROVIDER) == LLM_PROVIDER_ANTHROPIC
assert attributes.pop(LLM_SYSTEM) == LLM_SYSTEM_ANTHROPIC
assert isinstance(attributes.pop(INPUT_VALUE), str)
assert attributes.pop(INPUT_MIME_TYPE) == JSON
assert isinstance(attributes.pop(OUTPUT_VALUE), str)
Expand Down Expand Up @@ -286,6 +296,8 @@ def test_anthropic_instrumentation_messages(
attributes = dict(spans[0].attributes or {})

assert attributes.pop(OPENINFERENCE_SPAN_KIND) == "LLM"
assert attributes.pop(LLM_PROVIDER) == LLM_PROVIDER_ANTHROPIC
assert attributes.pop(LLM_SYSTEM) == LLM_SYSTEM_ANTHROPIC
assert attributes.pop(f"{LLM_INPUT_MESSAGES}.0.{MESSAGE_CONTENT}") == input_message
assert attributes.pop(f"{LLM_INPUT_MESSAGES}.0.{MESSAGE_ROLE}") == "user"
assert isinstance(
Expand Down Expand Up @@ -343,6 +355,8 @@ def test_anthropic_instrumentation_messages_streaming(
attributes = dict(spans[0].attributes or {})

assert attributes.pop(OPENINFERENCE_SPAN_KIND) == "LLM"
assert attributes.pop(LLM_PROVIDER) == LLM_PROVIDER_ANTHROPIC
assert attributes.pop(LLM_SYSTEM) == LLM_SYSTEM_ANTHROPIC
assert attributes.pop(f"{LLM_INPUT_MESSAGES}.0.{MESSAGE_CONTENT}") == input_message
assert attributes.pop(f"{LLM_INPUT_MESSAGES}.0.{MESSAGE_ROLE}") == "user"
assert isinstance(
Expand Down Expand Up @@ -405,6 +419,8 @@ async def test_anthropic_instrumentation_async_messages_streaming(
attributes = dict(spans[0].attributes or {})

assert attributes.pop(OPENINFERENCE_SPAN_KIND) == "LLM"
assert attributes.pop(LLM_PROVIDER) == LLM_PROVIDER_ANTHROPIC
assert attributes.pop(LLM_SYSTEM) == LLM_SYSTEM_ANTHROPIC
assert attributes.pop(f"{LLM_INPUT_MESSAGES}.0.{MESSAGE_CONTENT}") == input_message
assert attributes.pop(f"{LLM_INPUT_MESSAGES}.0.{MESSAGE_ROLE}") == "user"
assert isinstance(
Expand Down Expand Up @@ -462,6 +478,8 @@ async def test_anthropic_instrumentation_async_completions(
attributes = dict(spans[0].attributes or {})

assert attributes.pop(OPENINFERENCE_SPAN_KIND) == "LLM"
assert attributes.pop(LLM_PROVIDER) == LLM_PROVIDER_ANTHROPIC
assert attributes.pop(LLM_SYSTEM) == LLM_SYSTEM_ANTHROPIC
assert isinstance(attributes.pop(INPUT_VALUE), str)
assert attributes.pop(INPUT_MIME_TYPE) == JSON
assert isinstance(attributes.pop(OUTPUT_VALUE), str)
Expand Down Expand Up @@ -506,6 +524,8 @@ async def test_anthropic_instrumentation_async_messages(
attributes = dict(spans[0].attributes or {})

assert attributes.pop(OPENINFERENCE_SPAN_KIND) == "LLM"
assert attributes.pop(LLM_PROVIDER) == LLM_PROVIDER_ANTHROPIC
assert attributes.pop(LLM_SYSTEM) == LLM_SYSTEM_ANTHROPIC
assert attributes.pop(f"{LLM_INPUT_MESSAGES}.0.{MESSAGE_CONTENT}") == input_message
assert attributes.pop(f"{LLM_INPUT_MESSAGES}.0.{MESSAGE_ROLE}") == "user"
assert isinstance(
Expand Down Expand Up @@ -625,6 +645,8 @@ def test_anthropic_instrumentation_multiple_tool_calling(
assert isinstance(attributes.pop(OUTPUT_VALUE), str)
assert isinstance(attributes.pop(OUTPUT_MIME_TYPE), str)
assert attributes.pop(OPENINFERENCE_SPAN_KIND) == "LLM"
assert attributes.pop(LLM_PROVIDER) == LLM_PROVIDER_ANTHROPIC
assert attributes.pop(LLM_SYSTEM) == LLM_SYSTEM_ANTHROPIC
assert not attributes


Expand Down Expand Up @@ -728,6 +750,8 @@ def test_anthropic_instrumentation_multiple_tool_calling_streaming(
assert isinstance(attributes.pop(OUTPUT_VALUE), str)
assert attributes.pop(OUTPUT_MIME_TYPE) == "application/json"
assert attributes.pop(OPENINFERENCE_SPAN_KIND) == "LLM"
assert attributes.pop(LLM_PROVIDER) == LLM_PROVIDER_ANTHROPIC
assert attributes.pop(LLM_SYSTEM) == LLM_SYSTEM_ANTHROPIC
assert not attributes


Expand Down Expand Up @@ -993,3 +1017,7 @@ def test_oitracer(
LLM_PROMPT_TEMPLATE = SpanAttributes.LLM_PROMPT_TEMPLATE
LLM_PROMPT_TEMPLATE_VARIABLES = SpanAttributes.LLM_PROMPT_TEMPLATE_VARIABLES
USER_ID = SpanAttributes.USER_ID
LLM_PROVIDER = SpanAttributes.LLM_PROVIDER
LLM_SYSTEM = SpanAttributes.LLM_SYSTEM
LLM_PROVIDER_ANTHROPIC = OpenInferenceLLMProviderValues.ANTHROPIC.value
LLM_SYSTEM_ANTHROPIC = OpenInferenceLLMSystemValues.ANTHROPIC.value

0 comments on commit 32756ed

Please sign in to comment.