From a477083ada43d5854ab882dcca54c16a75dccd62 Mon Sep 17 00:00:00 2001 From: Tony Powell Date: Mon, 21 Oct 2024 14:33:16 -0400 Subject: [PATCH] fix: Remove system and provider attributes from enmbeddings spans --- .../src/instrumentation.ts | 2 - .../test/openai.test.ts | 218 ++++++++++-------- 2 files changed, 118 insertions(+), 102 deletions(-) diff --git a/js/packages/openinference-instrumentation-openai/src/instrumentation.ts b/js/packages/openinference-instrumentation-openai/src/instrumentation.ts index 0b215e08c..c566d0ae8 100644 --- a/js/packages/openinference-instrumentation-openai/src/instrumentation.ts +++ b/js/packages/openinference-instrumentation-openai/src/instrumentation.ts @@ -300,8 +300,6 @@ export class OpenAIInstrumentation extends InstrumentationBase { ? MimeType.TEXT : MimeType.JSON, - [SemanticConventions.LLM_SYSTEM]: LLMSystem.OPENAI, - [SemanticConventions.LLM_PROVIDER]: LLMProvider.OPENAI, ...getEmbeddingTextAttributes(body), }, }); diff --git a/js/packages/openinference-instrumentation-openai/test/openai.test.ts b/js/packages/openinference-instrumentation-openai/test/openai.test.ts index 0e8e996a2..4f636b49b 100644 --- a/js/packages/openinference-instrumentation-openai/test/openai.test.ts +++ b/js/packages/openinference-instrumentation-openai/test/openai.test.ts @@ -100,23 +100,25 @@ describe("OpenAIInstrumentation", () => { const span = spans[0]; expect(span.name).toBe("OpenAI Chat Completions"); expect(span.attributes).toMatchInlineSnapshot(` - { - "input.mime_type": "application/json", - "input.value": "{"messages":[{"role":"user","content":"Say this is a test"}],"model":"gpt-3.5-turbo"}", - "llm.input_messages.0.message.content": "Say this is a test", - "llm.input_messages.0.message.role": "user", - "llm.invocation_parameters": "{"model":"gpt-3.5-turbo"}", - "llm.model_name": "gpt-3.5-turbo-0613", - "llm.output_messages.0.message.content": "This is a test.", - "llm.output_messages.0.message.role": "assistant", - "llm.token_count.completion": 5, - "llm.token_count.prompt": 12, - "llm.token_count.total": 17, - "openinference.span.kind": "LLM", - "output.mime_type": "application/json", - "output.value": "{"id":"chatcmpl-8adq9JloOzNZ9TyuzrKyLpGXexh6p","object":"chat.completion","created":1703743645,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"message":{"role":"assistant","content":"This is a test."},"logprobs":null,"finish_reason":"stop"}],"usage":{"prompt_tokens":12,"completion_tokens":5,"total_tokens":17}}", - } - `); +{ + "input.mime_type": "application/json", + "input.value": "{"messages":[{"role":"user","content":"Say this is a test"}],"model":"gpt-3.5-turbo"}", + "llm.input_messages.0.message.content": "Say this is a test", + "llm.input_messages.0.message.role": "user", + "llm.invocation_parameters": "{"model":"gpt-3.5-turbo"}", + "llm.model_name": "gpt-3.5-turbo-0613", + "llm.output_messages.0.message.content": "This is a test.", + "llm.output_messages.0.message.role": "assistant", + "llm.provider": "openai", + "llm.system": "openai", + "llm.token_count.completion": 5, + "llm.token_count.prompt": 12, + "llm.token_count.total": 17, + "openinference.span.kind": "LLM", + "output.mime_type": "application/json", + "output.value": "{"id":"chatcmpl-8adq9JloOzNZ9TyuzrKyLpGXexh6p","object":"chat.completion","created":1703743645,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"message":{"role":"assistant","content":"This is a test."},"logprobs":null,"finish_reason":"stop"}],"usage":{"prompt_tokens":12,"completion_tokens":5,"total_tokens":17}}", +} +`); }); it("creates a span for completions", async () => { const response = { @@ -150,19 +152,21 @@ describe("OpenAIInstrumentation", () => { const span = spans[0]; expect(span.name).toBe("OpenAI Completions"); expect(span.attributes).toMatchInlineSnapshot(` - { - "input.mime_type": "text/plain", - "input.value": "Say this is a test", - "llm.invocation_parameters": "{"model":"gpt-3.5-turbo-instruct"}", - "llm.model_name": "gpt-3.5-turbo-instruct", - "llm.token_count.completion": 5, - "llm.token_count.prompt": 12, - "llm.token_count.total": 17, - "openinference.span.kind": "LLM", - "output.mime_type": "text/plain", - "output.value": "This is a test", - } - `); +{ + "input.mime_type": "text/plain", + "input.value": "Say this is a test", + "llm.invocation_parameters": "{"model":"gpt-3.5-turbo-instruct"}", + "llm.model_name": "gpt-3.5-turbo-instruct", + "llm.provider": "openai", + "llm.system": "openai", + "llm.token_count.completion": 5, + "llm.token_count.prompt": 12, + "llm.token_count.total": 17, + "openinference.span.kind": "LLM", + "output.mime_type": "text/plain", + "output.value": "This is a test", +} +`); }); it("creates a span for embedding create", async () => { const response = { @@ -231,20 +235,22 @@ describe("OpenAIInstrumentation", () => { const span = spans[0]; expect(span.name).toBe("OpenAI Chat Completions"); expect(span.attributes).toMatchInlineSnapshot(` - { - "input.mime_type": "application/json", - "input.value": "{"messages":[{"role":"user","content":"Say this is a test"}],"model":"gpt-3.5-turbo","stream":true}", - "llm.input_messages.0.message.content": "Say this is a test", - "llm.input_messages.0.message.role": "user", - "llm.invocation_parameters": "{"model":"gpt-3.5-turbo","stream":true}", - "llm.model_name": "gpt-3.5-turbo", - "llm.output_messages.0.message.content": "This is a test.", - "llm.output_messages.0.message.role": "assistant", - "openinference.span.kind": "LLM", - "output.mime_type": "text/plain", - "output.value": "This is a test.", - } - `); +{ + "input.mime_type": "application/json", + "input.value": "{"messages":[{"role":"user","content":"Say this is a test"}],"model":"gpt-3.5-turbo","stream":true}", + "llm.input_messages.0.message.content": "Say this is a test", + "llm.input_messages.0.message.role": "user", + "llm.invocation_parameters": "{"model":"gpt-3.5-turbo","stream":true}", + "llm.model_name": "gpt-3.5-turbo", + "llm.output_messages.0.message.content": "This is a test.", + "llm.output_messages.0.message.role": "assistant", + "llm.provider": "openai", + "llm.system": "openai", + "openinference.span.kind": "LLM", + "output.mime_type": "text/plain", + "output.value": "This is a test.", +} +`); }); it("should capture tool calls", async () => { // Mock out the embedding create endpoint @@ -382,26 +388,28 @@ describe("OpenAIInstrumentation", () => { const [span1, span2, span3] = spans; expect(span1.name).toBe("OpenAI Chat Completions"); expect(span1.attributes).toMatchInlineSnapshot(` - { - "input.mime_type": "application/json", - "input.value": "{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"How is the weather this week?"}],"tools":[{"type":"function","function":{"name":"getCurrentLocation","parameters":{"type":"object","properties":{}},"description":"Get the current location of the user."}},{"type":"function","function":{"name":"getWeather","parameters":{"type":"object","properties":{"location":{"type":"string"}}},"description":"Get the weather for a location."}}],"tool_choice":"auto","stream":false}", - "llm.input_messages.0.message.content": "How is the weather this week?", - "llm.input_messages.0.message.role": "user", - "llm.invocation_parameters": "{"model":"gpt-3.5-turbo","tools":[{"type":"function","function":{"name":"getCurrentLocation","parameters":{"type":"object","properties":{}},"description":"Get the current location of the user."}},{"type":"function","function":{"name":"getWeather","parameters":{"type":"object","properties":{"location":{"type":"string"}}},"description":"Get the weather for a location."}}],"tool_choice":"auto","stream":false}", - "llm.model_name": "gpt-3.5-turbo-0613", - "llm.output_messages.0.message.role": "assistant", - "llm.output_messages.0.message.tool_calls.0.tool_call.function.arguments": "{}", - "llm.output_messages.0.message.tool_calls.0.tool_call.function.name": "getCurrentLocation", - "llm.token_count.completion": 7, - "llm.token_count.prompt": 70, - "llm.token_count.total": 77, - "llm.tools.0.tool.json_schema": "{"type":"function","function":{"name":"getCurrentLocation","parameters":{"type":"object","properties":{}},"description":"Get the current location of the user."}}", - "llm.tools.1.tool.json_schema": "{"type":"function","function":{"name":"getWeather","parameters":{"type":"object","properties":{"location":{"type":"string"}}},"description":"Get the weather for a location."}}", - "openinference.span.kind": "LLM", - "output.mime_type": "application/json", - "output.value": "{"id":"chatcmpl-8hhqZDFTRD0vzExhqWnMLE7viVl7E","object":"chat.completion","created":1705427343,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"message":{"role":"assistant","content":null,"tool_calls":[{"id":"call_5ERYvu4iTGSvDlcDQjDP3g3J","type":"function","function":{"name":"getCurrentLocation","arguments":"{}"}}]},"logprobs":null,"finish_reason":"tool_calls"}],"usage":{"prompt_tokens":70,"completion_tokens":7,"total_tokens":77},"system_fingerprint":null}", - } - `); +{ + "input.mime_type": "application/json", + "input.value": "{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"How is the weather this week?"}],"tools":[{"type":"function","function":{"name":"getCurrentLocation","parameters":{"type":"object","properties":{}},"description":"Get the current location of the user."}},{"type":"function","function":{"name":"getWeather","parameters":{"type":"object","properties":{"location":{"type":"string"}}},"description":"Get the weather for a location."}}],"tool_choice":"auto","stream":false}", + "llm.input_messages.0.message.content": "How is the weather this week?", + "llm.input_messages.0.message.role": "user", + "llm.invocation_parameters": "{"model":"gpt-3.5-turbo","tools":[{"type":"function","function":{"name":"getCurrentLocation","parameters":{"type":"object","properties":{}},"description":"Get the current location of the user."}},{"type":"function","function":{"name":"getWeather","parameters":{"type":"object","properties":{"location":{"type":"string"}}},"description":"Get the weather for a location."}}],"tool_choice":"auto","stream":false}", + "llm.model_name": "gpt-3.5-turbo-0613", + "llm.output_messages.0.message.role": "assistant", + "llm.output_messages.0.message.tool_calls.0.tool_call.function.arguments": "{}", + "llm.output_messages.0.message.tool_calls.0.tool_call.function.name": "getCurrentLocation", + "llm.provider": "openai", + "llm.system": "openai", + "llm.token_count.completion": 7, + "llm.token_count.prompt": 70, + "llm.token_count.total": 77, + "llm.tools.0.tool.json_schema": "{"type":"function","function":{"name":"getCurrentLocation","parameters":{"type":"object","properties":{}},"description":"Get the current location of the user."}}", + "llm.tools.1.tool.json_schema": "{"type":"function","function":{"name":"getWeather","parameters":{"type":"object","properties":{"location":{"type":"string"}}},"description":"Get the weather for a location."}}", + "openinference.span.kind": "LLM", + "output.mime_type": "application/json", + "output.value": "{"id":"chatcmpl-8hhqZDFTRD0vzExhqWnMLE7viVl7E","object":"chat.completion","created":1705427343,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"message":{"role":"assistant","content":null,"tool_calls":[{"id":"call_5ERYvu4iTGSvDlcDQjDP3g3J","type":"function","function":{"name":"getCurrentLocation","arguments":"{}"}}]},"logprobs":null,"finish_reason":"tool_calls"}],"usage":{"prompt_tokens":70,"completion_tokens":7,"total_tokens":77},"system_fingerprint":null}", +} +`); expect(span2.name).toBe("OpenAI Chat Completions"); expect(span2.attributes).toMatchInlineSnapshot(` { @@ -421,6 +429,8 @@ describe("OpenAIInstrumentation", () => { "location": "Boston" }", "llm.output_messages.0.message.tool_calls.0.tool_call.function.name": "getWeather", + "llm.provider": "openai", + "llm.system": "openai", "llm.token_count.completion": 15, "llm.token_count.prompt": 86, "llm.token_count.total": 101, @@ -454,6 +464,8 @@ describe("OpenAIInstrumentation", () => { "llm.model_name": "gpt-3.5-turbo-0613", "llm.output_messages.0.message.content": "The weather in Boston this week is expected to be rainy with a temperature of 52 degrees.", "llm.output_messages.0.message.role": "assistant", + "llm.provider": "openai", + "llm.system": "openai", "llm.token_count.completion": 20, "llm.token_count.prompt": 121, "llm.token_count.total": 141, @@ -586,6 +598,8 @@ describe("OpenAIInstrumentation", () => { "llm.output_messages.0.message.role": "assistant", "llm.output_messages.0.message.tool_calls.0.tool_call.function.arguments": "{}", "llm.output_messages.0.message.tool_calls.0.tool_call.function.name": "getWeather", + "llm.provider": "openai", + "llm.system": "openai", "llm.tools.0.tool.json_schema": "{"type":"function","function":{"name":"getCurrentLocation","parameters":{"type":"object","properties":{}},"description":"Get the current location of the user."}}", "llm.tools.1.tool.json_schema": "{"type":"function","function":{"name":"getWeather","description":"Get the weather for a location.","parameters":{"type":"object","properties":{"location":{"type":"string"}}}}}", "openinference.span.kind": "LLM", @@ -689,22 +703,24 @@ describe("OpenAIInstrumentation", () => { const span = spans[0]; expect(span.name).toBe("OpenAI Chat Completions"); expect(span.attributes).toMatchInlineSnapshot(` - { - "input.mime_type": "application/json", - "input.value": "{"messages":[{"role":"user","content":"What's the weather today?"}],"model":"gpt-3.5-turbo","functions":[{"name":"getWeather","description":"Get the weather for a location.","parameters":{"type":"object","properties":{"location":{"type":"string"}}}},{"name":"getCurrentLocation","description":"Get the current location of the user.","parameters":{"type":"object","properties":{}}}],"stream":true}", - "llm.input_messages.0.message.content": "What's the weather today?", - "llm.input_messages.0.message.role": "user", - "llm.invocation_parameters": "{"model":"gpt-3.5-turbo","functions":[{"name":"getWeather","description":"Get the weather for a location.","parameters":{"type":"object","properties":{"location":{"type":"string"}}}},{"name":"getCurrentLocation","description":"Get the current location of the user.","parameters":{"type":"object","properties":{}}}],"stream":true}", - "llm.model_name": "gpt-3.5-turbo", - "llm.output_messages.0.message.content": "", - "llm.output_messages.0.message.function_call_arguments_json": "{}", - "llm.output_messages.0.message.function_call_name": "getWeather", - "llm.output_messages.0.message.role": "assistant", - "openinference.span.kind": "LLM", - "output.mime_type": "text/plain", - "output.value": "", - } - `); +{ + "input.mime_type": "application/json", + "input.value": "{"messages":[{"role":"user","content":"What's the weather today?"}],"model":"gpt-3.5-turbo","functions":[{"name":"getWeather","description":"Get the weather for a location.","parameters":{"type":"object","properties":{"location":{"type":"string"}}}},{"name":"getCurrentLocation","description":"Get the current location of the user.","parameters":{"type":"object","properties":{}}}],"stream":true}", + "llm.input_messages.0.message.content": "What's the weather today?", + "llm.input_messages.0.message.role": "user", + "llm.invocation_parameters": "{"model":"gpt-3.5-turbo","functions":[{"name":"getWeather","description":"Get the weather for a location.","parameters":{"type":"object","properties":{"location":{"type":"string"}}}},{"name":"getCurrentLocation","description":"Get the current location of the user.","parameters":{"type":"object","properties":{}}}],"stream":true}", + "llm.model_name": "gpt-3.5-turbo", + "llm.output_messages.0.message.content": "", + "llm.output_messages.0.message.function_call_arguments_json": "{}", + "llm.output_messages.0.message.function_call_name": "getWeather", + "llm.output_messages.0.message.role": "assistant", + "llm.provider": "openai", + "llm.system": "openai", + "openinference.span.kind": "LLM", + "output.mime_type": "text/plain", + "output.value": "", +} +`); }); it("should not emit a span if tracing is suppressed", async () => { const response = { @@ -801,25 +817,27 @@ describe("OpenAIInstrumentation", () => { const span = spans[0]; expect(span.name).toBe("OpenAI Chat Completions"); expect(span.attributes).toMatchInlineSnapshot(` - { - "input.mime_type": "application/json", - "input.value": "{"messages":[{"role":"user","content":[{"type":"text","text":"Say this is a test"},{"type":"image_url","image_url":{"url":"data:image/gif;base64,R0lGODlhAQABAIAAAP///wAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw=="}}]}],"model":"gpt-3.5-turbo"}", - "llm.input_messages.0.message.contents.0.message_content.text": "Say this is a test", - "llm.input_messages.0.message.contents.0.message_content.type": "text", - "llm.input_messages.0.message.contents.1.message_content.image": "data:image/gif;base64,R0lGODlhAQABAIAAAP///wAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw==", - "llm.input_messages.0.message.contents.1.message_content.type": "image", - "llm.input_messages.0.message.role": "user", - "llm.invocation_parameters": "{"model":"gpt-3.5-turbo"}", - "llm.model_name": "gpt-3.5-turbo-0613", - "llm.output_messages.0.message.content": "This is a test.", - "llm.output_messages.0.message.role": "assistant", - "llm.token_count.completion": 5, - "llm.token_count.prompt": 12, - "llm.token_count.total": 17, - "openinference.span.kind": "LLM", - "output.mime_type": "application/json", - "output.value": "{"id":"chatcmpl-8adq9JloOzNZ9TyuzrKyLpGXexh6p","object":"chat.completion","created":1703743645,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"message":{"role":"assistant","content":"This is a test."},"logprobs":null,"finish_reason":"stop"}],"usage":{"prompt_tokens":12,"completion_tokens":5,"total_tokens":17}}", - } - `); +{ + "input.mime_type": "application/json", + "input.value": "{"messages":[{"role":"user","content":[{"type":"text","text":"Say this is a test"},{"type":"image_url","image_url":{"url":"data:image/gif;base64,R0lGODlhAQABAIAAAP///wAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw=="}}]}],"model":"gpt-3.5-turbo"}", + "llm.input_messages.0.message.contents.0.message_content.text": "Say this is a test", + "llm.input_messages.0.message.contents.0.message_content.type": "text", + "llm.input_messages.0.message.contents.1.message_content.image": "data:image/gif;base64,R0lGODlhAQABAIAAAP///wAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw==", + "llm.input_messages.0.message.contents.1.message_content.type": "image", + "llm.input_messages.0.message.role": "user", + "llm.invocation_parameters": "{"model":"gpt-3.5-turbo"}", + "llm.model_name": "gpt-3.5-turbo-0613", + "llm.output_messages.0.message.content": "This is a test.", + "llm.output_messages.0.message.role": "assistant", + "llm.provider": "openai", + "llm.system": "openai", + "llm.token_count.completion": 5, + "llm.token_count.prompt": 12, + "llm.token_count.total": 17, + "openinference.span.kind": "LLM", + "output.mime_type": "application/json", + "output.value": "{"id":"chatcmpl-8adq9JloOzNZ9TyuzrKyLpGXexh6p","object":"chat.completion","created":1703743645,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"message":{"role":"assistant","content":"This is a test."},"logprobs":null,"finish_reason":"stop"}],"usage":{"prompt_tokens":12,"completion_tokens":5,"total_tokens":17}}", +} +`); }); });