From e63c78d6bc97791847084597a1a5e4751442ef40 Mon Sep 17 00:00:00 2001 From: Alexander Song Date: Thu, 11 Jan 2024 13:31:21 -0800 Subject: [PATCH] rename function --- .../src/instrumentation.ts | 39 +++++++++---------- 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/js/packages/openinference-instrumentation-openai/src/instrumentation.ts b/js/packages/openinference-instrumentation-openai/src/instrumentation.ts index 1c0c9dc24..e7459ef36 100644 --- a/js/packages/openinference-instrumentation-openai/src/instrumentation.ts +++ b/js/packages/openinference-instrumentation-openai/src/instrumentation.ts @@ -82,7 +82,7 @@ export class OpenAIInstrumentation extends InstrumentationBase { ...args: Parameters ) { const body = args[0]; - const { messages: _, ...invocationParameters } = body; + const { messages: _messages, ...invocationParameters } = body; const span = instrumentation.tracer.startSpan( `OpenAI Chat Completions`, { @@ -163,23 +163,20 @@ export class OpenAIInstrumentation extends InstrumentationBase { ...args: Parameters ) { const body = args[0]; - const { prompt: _, ...invocationParameters } = body; - const span = instrumentation.tracer.startSpan( - `OpenAI Completions`, - { - kind: SpanKind.INTERNAL, - attributes: { - [SemanticConventions.OPENINFERENCE_SPAN_KIND]: - OpenInferenceSpanKind.LLM, - [SemanticConventions.LLM_MODEL_NAME]: body.model, - [SemanticConventions.INPUT_VALUE]: JSON.stringify(body), - [SemanticConventions.INPUT_MIME_TYPE]: MimeType.JSON, - [SemanticConventions.LLM_INVOCATION_PARAMETERS]: - JSON.stringify(invocationParameters), - ...getLLMPromptAttributes(body), - }, + const { prompt: _prompt, ...invocationParameters } = body; + const span = instrumentation.tracer.startSpan(`OpenAI Completions`, { + kind: SpanKind.INTERNAL, + attributes: { + [SemanticConventions.OPENINFERENCE_SPAN_KIND]: + OpenInferenceSpanKind.LLM, + [SemanticConventions.LLM_MODEL_NAME]: body.model, + [SemanticConventions.INPUT_VALUE]: JSON.stringify(body), + [SemanticConventions.INPUT_MIME_TYPE]: MimeType.JSON, + [SemanticConventions.LLM_INVOCATION_PARAMETERS]: + JSON.stringify(invocationParameters), + ...getLLMPromptAttributes(body), }, - ); + }); const execContext = trace.setSpan(context.active(), span); const execPromise = safeExecuteInTheMiddle< ReturnType @@ -324,7 +321,7 @@ function isCompletionResponse( /** * type-guard that checks if completion prompt attribute is an array of strings */ -function isStringArray( +function isPromptStringArray( prompt: CompletionCreateParamsBase["prompt"], ): prompt is Array { return ( @@ -358,7 +355,7 @@ function getLLMPromptAttributes(body: CompletionCreateParamsBase): Attributes { return { [SemanticConventions.LLM_PROMPTS]: [body.prompt], }; - } else if (isStringArray(body.prompt)) { + } else if (isPromptStringArray(body.prompt)) { return { [SemanticConventions.LLM_PROMPTS]: body.prompt, }; @@ -419,10 +416,10 @@ function getCompletionLLMMessageAttributes(completion: Completion): Attributes { const indexPrefix = `${SemanticConventions.LLM_OUTPUT_MESSAGES}.0`; return { [`${indexPrefix}.${SemanticConventions.MESSAGE_CONTENT}`]: String( - choice.text + choice.text, ), [`${indexPrefix}.${SemanticConventions.MESSAGE_ROLE}`]: "assistant", - } + }; } /**