Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(js): Add tool call id to semantic conventions and parse via instrumentation #1105

Merged
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions js/.changeset/cool-wasps-float.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
"@arizeai/openinference-instrumentation-openai": minor
"@arizeai/openinference-semantic-conventions": minor
"@arizeai/openinference-vercel": minor
---

Support tool_call_id and tool_call.id
Original file line number Diff line number Diff line change
Expand Up @@ -503,15 +503,21 @@ function getChatCompletionInputMessageAttributes(
case "assistant":
if (message.tool_calls) {
message.tool_calls.forEach((toolCall, index) => {
const toolCallIndexPrefix = `${SemanticConventions.MESSAGE_TOOL_CALLS}.${index}.`;

// Add the tool call id if it exists
if (toolCall.id) {
attributes[
`${toolCallIndexPrefix}${SemanticConventions.TOOL_CALL_ID}`
] = toolCall.id;
}
// Make sure the tool call has a function
if (toolCall.function) {
const toolCallIndexPrefix = `${SemanticConventions.MESSAGE_TOOL_CALLS}.${index}.`;
attributes[
toolCallIndexPrefix + SemanticConventions.TOOL_CALL_FUNCTION_NAME
`${toolCallIndexPrefix}${SemanticConventions.TOOL_CALL_FUNCTION_NAME}`
] = toolCall.function.name;
attributes[
toolCallIndexPrefix +
SemanticConventions.TOOL_CALL_FUNCTION_ARGUMENTS_JSON
`${toolCallIndexPrefix}${SemanticConventions.TOOL_CALL_FUNCTION_ARGUMENTS_JSON}`
] = toolCall.function.arguments;
}
});
Expand All @@ -521,8 +527,10 @@ function getChatCompletionInputMessageAttributes(
attributes[SemanticConventions.MESSAGE_FUNCTION_CALL_NAME] = message.name;
break;
case "tool":
// There's nothing to add for the tool. There is a tool_id, but there are no
// semantic conventions for it
if (message.tool_call_id) {
attributes[`${SemanticConventions.MESSAGE_TOOL_CALL_ID}`] =
message.tool_call_id;
}
cephalization marked this conversation as resolved.
Show resolved Hide resolved
break;
case "system":
// There's nothing to add for the system. Content is captured above
Expand Down Expand Up @@ -613,6 +621,12 @@ function getChatCompletionOutputMessageAttributes(
if (message.tool_calls) {
message.tool_calls.forEach((toolCall, index) => {
const toolCallIndexPrefix = `${SemanticConventions.MESSAGE_TOOL_CALLS}.${index}.`;
// Add the tool call id if it exists
if (toolCall.id) {
attributes[
`${toolCallIndexPrefix}${SemanticConventions.TOOL_CALL_ID}`
] = toolCall.id;
}
// Double check that the tool call has a function
// NB: OpenAI only supports tool calls with functions right now but this may change
if (toolCall.function) {
Expand Down Expand Up @@ -759,6 +773,12 @@ function getToolAndFunctionCallAttributesFromStreamChunk(
if (choice.delta.tool_calls) {
choice.delta.tool_calls.forEach((toolCall, index) => {
const toolCallIndexPrefix = `${SemanticConventions.MESSAGE_TOOL_CALLS}.${index}.`;
// Add the tool call id if it exists
if (toolCall.id) {
attributes[
`${toolCallIndexPrefix}${SemanticConventions.TOOL_CALL_ID}`
] = toolCall.id;
}
// Double check that the tool call has a function
// NB: OpenAI only supports tool calls with functions right now but this may change
if (toolCall.function) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -398,6 +398,7 @@ describe("OpenAIInstrumentation", () => {
"llm.output_messages.0.message.role": "assistant",
"llm.output_messages.0.message.tool_calls.0.tool_call.function.arguments": "{}",
"llm.output_messages.0.message.tool_calls.0.tool_call.function.name": "getCurrentLocation",
"llm.output_messages.0.message.tool_calls.0.tool_call.id": "call_5ERYvu4iTGSvDlcDQjDP3g3J",
"llm.provider": "openai",
"llm.system": "openai",
"llm.token_count.completion": 7,
Expand All @@ -420,15 +421,18 @@ describe("OpenAIInstrumentation", () => {
"llm.input_messages.1.message.role": "assistant",
"llm.input_messages.1.message.tool_calls.0.tool_call.function.arguments": "{}",
"llm.input_messages.1.message.tool_calls.0.tool_call.function.name": "getCurrentLocation",
"llm.input_messages.1.message.tool_calls.0.tool_call.id": "call_5ERYvu4iTGSvDlcDQjDP3g3J",
"llm.input_messages.2.message.content": "Boston",
"llm.input_messages.2.message.role": "tool",
"llm.input_messages.2.message.tool_call_id": "call_5ERYvu4iTGSvDlcDQjDP3g3J",
"llm.invocation_parameters": "{"model":"gpt-3.5-turbo","tools":[{"type":"function","function":{"name":"getCurrentLocation","parameters":{"type":"object","properties":{}},"description":"Get the current location of the user."}},{"type":"function","function":{"name":"getWeather","parameters":{"type":"object","properties":{"location":{"type":"string"}}},"description":"Get the weather for a location."}}],"tool_choice":"auto","stream":false}",
"llm.model_name": "gpt-3.5-turbo-0613",
"llm.output_messages.0.message.role": "assistant",
"llm.output_messages.0.message.tool_calls.0.tool_call.function.arguments": "{
"location": "Boston"
}",
"llm.output_messages.0.message.tool_calls.0.tool_call.function.name": "getWeather",
"llm.output_messages.0.message.tool_calls.0.tool_call.id": "call_0LCdYLkdRUt3rV3dawoIFHBf",
"llm.provider": "openai",
"llm.system": "openai",
"llm.token_count.completion": 15,
Expand All @@ -451,15 +455,19 @@ describe("OpenAIInstrumentation", () => {
"llm.input_messages.1.message.role": "assistant",
"llm.input_messages.1.message.tool_calls.0.tool_call.function.arguments": "{}",
"llm.input_messages.1.message.tool_calls.0.tool_call.function.name": "getCurrentLocation",
"llm.input_messages.1.message.tool_calls.0.tool_call.id": "call_5ERYvu4iTGSvDlcDQjDP3g3J",
"llm.input_messages.2.message.content": "Boston",
"llm.input_messages.2.message.role": "tool",
"llm.input_messages.2.message.tool_call_id": "call_5ERYvu4iTGSvDlcDQjDP3g3J",
"llm.input_messages.3.message.role": "assistant",
"llm.input_messages.3.message.tool_calls.0.tool_call.function.arguments": "{
"location": "Boston"
}",
"llm.input_messages.3.message.tool_calls.0.tool_call.function.name": "getWeather",
"llm.input_messages.3.message.tool_calls.0.tool_call.id": "call_0LCdYLkdRUt3rV3dawoIFHBf",
"llm.input_messages.4.message.content": "{"temperature":52,"precipitation":"rainy"}",
"llm.input_messages.4.message.role": "tool",
"llm.input_messages.4.message.tool_call_id": "call_0LCdYLkdRUt3rV3dawoIFHBf",
"llm.invocation_parameters": "{"model":"gpt-3.5-turbo","tools":[{"type":"function","function":{"name":"getCurrentLocation","parameters":{"type":"object","properties":{}},"description":"Get the current location of the user."}},{"type":"function","function":{"name":"getWeather","parameters":{"type":"object","properties":{"location":{"type":"string"}}},"description":"Get the weather for a location."}}],"tool_choice":"auto","stream":false}",
"llm.model_name": "gpt-3.5-turbo-0613",
"llm.output_messages.0.message.content": "The weather in Boston this week is expected to be rainy with a temperature of 52 degrees.",
Expand Down Expand Up @@ -598,6 +606,7 @@ describe("OpenAIInstrumentation", () => {
"llm.output_messages.0.message.role": "assistant",
"llm.output_messages.0.message.tool_calls.0.tool_call.function.arguments": "{}",
"llm.output_messages.0.message.tool_calls.0.tool_call.function.name": "getWeather",
"llm.output_messages.0.message.tool_calls.0.tool_call.id": "call_PGkcUg2u6vYrCpTn0e9ofykY",
"llm.provider": "openai",
"llm.system": "openai",
"llm.tools.0.tool.json_schema": "{"type":"function","function":{"name":"getCurrentLocation","parameters":{"type":"object","properties":{}},"description":"Get the current location of the user."}}",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ export const MessageAttributePostfixes = {
function_call_name: "function_call_name",
function_call_arguments_json: "function_call_arguments_json",
tool_calls: "tool_calls",
tool_call_id: "tool_call_id",
} as const;

export const MessageContentsAttributePostfixes = {
Expand All @@ -89,6 +90,7 @@ export const ImageAttributesPostfixes = {
export const ToolCallAttributePostfixes = {
function_name: "function.name",
function_arguments_json: "function.arguments",
id: "id",
} as const;

export const DocumentAttributePostfixes = {
Expand Down Expand Up @@ -202,6 +204,12 @@ export const MESSAGE_NAME =
export const MESSAGE_TOOL_CALLS =
`${SemanticAttributePrefixes.message}.${MessageAttributePostfixes.tool_calls}` as const;

/**
* The id of the tool call
cephalization marked this conversation as resolved.
Show resolved Hide resolved
*/
export const MESSAGE_TOOL_CALL_ID =
`${SemanticAttributePrefixes.message}.${MessageAttributePostfixes.tool_call_id}` as const;

/**
* tool_call.function.name
*/
Expand All @@ -214,6 +222,12 @@ export const TOOL_CALL_FUNCTION_NAME =
export const TOOL_CALL_FUNCTION_ARGUMENTS_JSON =
`${SemanticAttributePrefixes.tool_call}.${ToolCallAttributePostfixes.function_arguments_json}` as const;

/**
* The id of the tool call
*/
export const TOOL_CALL_ID =
`${SemanticAttributePrefixes.tool_call}.${ToolCallAttributePostfixes.id}` as const;

/**
* The LLM function call function name
*/
Expand Down Expand Up @@ -430,6 +444,8 @@ export const SemanticConventions = {
MESSAGE_ROLE,
MESSAGE_NAME,
MESSAGE_TOOL_CALLS,
MESSAGE_TOOL_CALL_ID,
TOOL_CALL_ID,
TOOL_CALL_FUNCTION_NAME,
TOOL_CALL_FUNCTION_ARGUMENTS_JSON,
MESSAGE_FUNCTION_CALL_NAME,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ const AIPromptPostfixes = {
} as const;

const AIToolCallPostfixes = {
id: "id",
name: "name",
args: "args",
result: "result",
Expand Down Expand Up @@ -68,6 +69,8 @@ const EMBEDDING_VECTOR = `${AI_PREFIX}.embedding` as const;
const EMBEDDING_TEXTS = `${AI_PREFIX}.values` as const;
const EMBEDDING_VECTORS = `${AI_PREFIX}.embeddings` as const;

const TOOL_CALL_ID =
`${AI_PREFIX}.${AIPrefixes.toolCall}.${AIToolCallPostfixes.id}` as const;
const TOOL_CALL_NAME =
`${AI_PREFIX}.${AIPrefixes.toolCall}.${AIToolCallPostfixes.name}` as const;
const TOOL_CALL_ARGS =
Expand All @@ -94,6 +97,7 @@ export const AISemanticConventions = {
EMBEDDING_VECTOR,
EMBEDDING_TEXTS,
EMBEDDING_VECTORS,
TOOL_CALL_ID,
TOOL_CALL_NAME,
TOOL_CALL_ARGS,
TOOL_CALL_RESULT,
Expand Down
1 change: 1 addition & 0 deletions js/packages/openinference-vercel/src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ export const AISemConvToOISemConvMap: Record<
[AISemanticConventions.EMBEDDING_TEXTS]: SemanticConventions.EMBEDDING_TEXT,
[AISemanticConventions.EMBEDDING_VECTORS]:
SemanticConventions.EMBEDDING_VECTOR,
[AISemanticConventions.TOOL_CALL_ID]: SemanticConventions.TOOL_CALL_ID,
[AISemanticConventions.TOOL_CALL_NAME]: SemanticConventions.TOOL_NAME,
[AISemanticConventions.TOOL_CALL_ARGS]: SemanticConventions.TOOL_PARAMETERS,
[AISemanticConventions.TOOL_CALL_RESULT]: SemanticConventions.OUTPUT_VALUE,
Expand Down
5 changes: 5 additions & 0 deletions js/packages/openinference-vercel/src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -400,6 +400,11 @@ const getOpenInferenceAttributes = (attributes: Attributes): Attributes => {
...openInferenceAttributes,
[openInferenceKey]: attributes[convention],
};
case AISemanticConventions.TOOL_CALL_ID:
return {
...openInferenceAttributes,
[openInferenceKey]: attributes[convention],
};
case AISemanticConventions.TOOL_CALL_NAME:
return {
...openInferenceAttributes,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -402,6 +402,22 @@ const generateVercelAttributeTestCases = (): SpanProcessorTestCase[] => {
},
]);
break;
case AISemanticConventions.TOOL_CALL_ID:
testCases.push([
`${vercelSemanticConvention} to ${SemanticConventions.TOOL_CALL_ID}`,
{
vercelFunctionName: "ai.toolCall",
vercelAttributes: {
[vercelSemanticConvention]: "test-tool-id",
},
addedOpenInferenceAttributes: {
[SemanticConventions.TOOL_CALL_ID]: "test-tool-id",
[SemanticConventions.OPENINFERENCE_SPAN_KIND]:
OpenInferenceSpanKind.TOOL,
},
},
]);
break;
case AISemanticConventions.TOOL_CALL_NAME:
testCases.push([
`${vercelSemanticConvention} to ${SemanticConventions.TOOL_NAME}`,
Expand Down
Loading