Skip to content

Commit

Permalink
Changed 'result' to 'response' throughout package
Browse files Browse the repository at this point in the history
  • Loading branch information
luke-moehlenbrock committed Jan 16, 2025
1 parent 4a4a26f commit d944a82
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 20 deletions.
5 changes: 5 additions & 0 deletions js/.changeset/little-sheep-watch.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@arizeai/openinference-vercel": patch
---

Updated the OpenInference semantic convention mapping to account for changes to the Vercel AI SDK semantic conventions
22 changes: 11 additions & 11 deletions js/packages/openinference-vercel/src/AISemanticConventions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@ const AIPrefixes = {
telemetry: "telemetry",
prompt: "prompt",
toolCall: "toolCall",
result: "response",
response: "response",
} as const;

const AIUsagePostfixes = {
completionTokens: "completionTokens",
promptTokens: "promptTokens",
} as const;

const AIResultPostfixes = {
const AIResponsePostfixes = {
text: "text",
toolCalls: "toolCalls",
object: "object",
Expand Down Expand Up @@ -50,14 +50,14 @@ const TOKEN_COUNT_COMPLETION =
const TOKEN_COUNT_PROMPT =
`${AI_PREFIX}.${AIPrefixes.usage}.${AIUsagePostfixes.promptTokens}` as const;

const RESULT_TEXT =
`${AI_PREFIX}.${AIPrefixes.result}.${AIResultPostfixes.text}` as const;
const RESPONSE_TEXT =
`${AI_PREFIX}.${AIPrefixes.response}.${AIResponsePostfixes.text}` as const;

const RESULT_TOOL_CALLS =
`${AI_PREFIX}.${AIPrefixes.result}.${AIResultPostfixes.toolCalls}` as const;
const RESPONSE_TOOL_CALLS =
`${AI_PREFIX}.${AIPrefixes.response}.${AIResponsePostfixes.toolCalls}` as const;

const RESULT_OBJECT =
`${AI_PREFIX}.${AIPrefixes.result}.${AIResultPostfixes.object}` as const;
const RESPONSE_OBJECT =
`${AI_PREFIX}.${AIPrefixes.response}.${AIResponsePostfixes.object}` as const;

const PROMPT = `${AI_PREFIX}.${AIPrefixes.prompt}` as const;

Expand Down Expand Up @@ -88,9 +88,9 @@ export const AISemanticConventions = {
SETTINGS,
TOKEN_COUNT_COMPLETION,
TOKEN_COUNT_PROMPT,
RESULT_TEXT,
RESULT_TOOL_CALLS,
RESULT_OBJECT,
RESPONSE_TEXT,
RESPONSE_TOOL_CALLS,
RESPONSE_OBJECT,
PROMPT,
PROMPT_MESSAGES,
EMBEDDING_TEXT,
Expand Down
6 changes: 3 additions & 3 deletions js/packages/openinference-vercel/src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,10 @@ export const AISemConvToOISemConvMap: Record<
SemanticConventions.LLM_TOKEN_COUNT_COMPLETION,
[AISemanticConventions.TOKEN_COUNT_PROMPT]:
SemanticConventions.LLM_TOKEN_COUNT_PROMPT,
[AISemanticConventions.RESULT_TEXT]: SemanticConventions.OUTPUT_VALUE,
[AISemanticConventions.RESULT_TOOL_CALLS]:
[AISemanticConventions.RESPONSE_TEXT]: SemanticConventions.OUTPUT_VALUE,
[AISemanticConventions.RESPONSE_TOOL_CALLS]:
SemanticConventions.MESSAGE_TOOL_CALLS,
[AISemanticConventions.RESULT_OBJECT]: SemanticConventions.OUTPUT_VALUE,
[AISemanticConventions.RESPONSE_OBJECT]: SemanticConventions.OUTPUT_VALUE,
[AISemanticConventions.PROMPT]: SemanticConventions.INPUT_VALUE,
[AISemanticConventions.PROMPT_MESSAGES]:
SemanticConventions.LLM_INPUT_MESSAGES,
Expand Down
6 changes: 3 additions & 3 deletions js/packages/openinference-vercel/src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -457,8 +457,8 @@ const getOpenInferenceAttributes = (attributes: Attributes): Attributes => {
...safelyGetInvocationParamAttributes(attributes),
};
case AISemanticConventions.PROMPT:
case AISemanticConventions.RESULT_OBJECT:
case AISemanticConventions.RESULT_TEXT: {
case AISemanticConventions.RESPONSE_OBJECT:
case AISemanticConventions.RESPONSE_TEXT: {
return {
...openInferenceAttributes,
...safelyGetIOValueAttributes({
Expand All @@ -468,7 +468,7 @@ const getOpenInferenceAttributes = (attributes: Attributes): Attributes => {
}),
};
}
case AISemanticConventions.RESULT_TOOL_CALLS:
case AISemanticConventions.RESPONSE_TOOL_CALLS:
return {
...openInferenceAttributes,
...safelyGetToolCallMessageAttributes(attributes[convention]),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ const generateVercelAttributeTestCases = (): SpanProcessorTestCase[] => {
],
);
break;
case AISemanticConventions.RESULT_TEXT:
case AISemanticConventions.RESPONSE_TEXT:
testCases.push([
`${vercelSemanticConvention} to ${SemanticConventions.OUTPUT_VALUE} with MIME type ${MimeType.TEXT}`,
{
Expand All @@ -182,7 +182,7 @@ const generateVercelAttributeTestCases = (): SpanProcessorTestCase[] => {
},
]);
break;
case AISemanticConventions.RESULT_OBJECT:
case AISemanticConventions.RESPONSE_OBJECT:
testCases.push([
`${vercelSemanticConvention} to ${SemanticConventions.OUTPUT_VALUE} with MIME type ${MimeType.JSON}`,
{
Expand All @@ -201,7 +201,7 @@ const generateVercelAttributeTestCases = (): SpanProcessorTestCase[] => {
},
]);
break;
case AISemanticConventions.RESULT_TOOL_CALLS: {
case AISemanticConventions.RESPONSE_TOOL_CALLS: {
const firstOutputMessageToolPrefix = `${SemanticConventions.LLM_OUTPUT_MESSAGES}.0.${SemanticConventions.MESSAGE_TOOL_CALLS}`;
testCases.push([
`${vercelSemanticConvention} to ${SemanticConventions.MESSAGE_TOOL_CALLS} on ${SemanticConventions.LLM_OUTPUT_MESSAGES}`,
Expand Down

0 comments on commit d944a82

Please sign in to comment.