diff --git a/package-lock.json b/package-lock.json index b1fd6a55..4614125f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12396,7 +12396,8 @@ "node_modules/@types/qs": { "version": "6.9.14", "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.14.tgz", - "integrity": "sha512-5khscbd3SwWMhFqylJBLQ0zIu7c1K6Vz0uBIt915BI3zV0q1nfjRQD3RqSBcPaO6PHEF4ov/t9y89fSiyThlPA==" + "integrity": "sha512-5khscbd3SwWMhFqylJBLQ0zIu7c1K6Vz0uBIt915BI3zV0q1nfjRQD3RqSBcPaO6PHEF4ov/t9y89fSiyThlPA==", + "dev": true }, "node_modules/@types/request": { "version": "2.48.12", @@ -22779,19 +22780,18 @@ } }, "node_modules/openai": { - "version": "4.57.0", - "resolved": "https://registry.npmjs.org/openai/-/openai-4.57.0.tgz", - "integrity": "sha512-JnwBSIYqiZ3jYjB5f2in8hQ0PRA092c6m+/6dYB0MzK0BEbn+0dioxZsPLBm5idJbg9xzLNOiGVm2OSuhZ+BdQ==", + "version": "4.75.0", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.75.0.tgz", + "integrity": "sha512-8cWaK3td0qLspaflKWD6AvpQnl0gynWFbHg7sMAgiu//F20I4GJlCCpllDrECO6WFSuY8HXJj8gji3urw2BGGg==", + "license": "Apache-2.0", "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", - "@types/qs": "^6.9.7", "abort-controller": "^3.0.0", "agentkeepalive": "^4.2.1", "form-data-encoder": "1.7.2", "formdata-node": "^4.3.2", - "node-fetch": "^2.6.7", - "qs": "^6.10.3" + "node-fetch": "^2.6.7" }, "bin": { "openai": "bin/cli" @@ -27821,6 +27821,7 @@ "version": "3.23.8", "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/colinhacks" } @@ -29361,7 +29362,8 @@ "cohere-ai": "^7.7.5", "langchain": "^0.2.12", "llamaindex": "^0.5.20", - "openai": "^4.57.0" + "openai": "^4.57.0", + "zod": "^3.23.8" }, "engines": { "node": ">=14" diff --git a/packages/instrumentation-openai/src/instrumentation.ts b/packages/instrumentation-openai/src/instrumentation.ts index ee9d9b9f..ae62c1bd 100644 --- a/packages/instrumentation-openai/src/instrumentation.ts +++ b/packages/instrumentation-openai/src/instrumentation.ts @@ -14,14 +14,7 @@ * limitations under the License. */ import type * as openai from "openai"; -import { - context, - trace, - Span, - Attributes, - SpanKind, - SpanStatusCode, -} from "@opentelemetry/api"; +import { context, trace, Span, Attributes, SpanKind } from "@opentelemetry/api"; import { InstrumentationBase, InstrumentationModuleDefinition, @@ -46,6 +39,7 @@ import type { import type { Stream } from "openai/streaming"; import { version } from "../package.json"; import { encodingForModel, TiktokenModel, Tiktoken } from "js-tiktoken"; +import { APIPromise } from "openai/core"; export class OpenAIInstrumentation extends InstrumentationBase { protected declare _config: OpenAIInstrumentationConfig; @@ -338,13 +332,13 @@ export class OpenAIInstrumentation extends InstrumentationBase { span: Span; type: "chat"; params: ChatCompletionCreateParamsStreaming; - promise: Promise>; + promise: APIPromise>; } | { span: Span; params: CompletionCreateParamsStreaming; type: "completion"; - promise: Promise>; + promise: APIPromise>; }) { if (type === "chat") { const result: ChatCompletion = { @@ -532,63 +526,49 @@ export class OpenAIInstrumentation extends InstrumentationBase { type: "chat" | "completion", version: "v3" | "v4", span: Span, - promise: Promise, - ): Promise { - return promise - .then((result) => { - return new Promise((resolve) => { - if (version === "v3") { - if (type === "chat") { - this._addLogProbsEvent( - span, - ((result as any).data as ChatCompletion).choices[0].logprobs, - ); - this._endSpan({ - type, - span, - result: (result as any).data as ChatCompletion, - }); - } else { - this._addLogProbsEvent( - span, - ((result as any).data as Completion).choices[0].logprobs, - ); - this._endSpan({ - type, - span, - result: (result as any).data as Completion, - }); - } - } else { - if (type === "chat") { - this._addLogProbsEvent( - span, - (result as ChatCompletion).choices[0].logprobs, - ); - this._endSpan({ type, span, result: result as ChatCompletion }); - } else { - this._addLogProbsEvent( - span, - (result as Completion).choices[0].logprobs, - ); - this._endSpan({ type, span, result: result as Completion }); - } - } - resolve(result); - }); - }) - .catch((error: Error) => { - return new Promise((_, reject) => { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: error.message, + promise: APIPromise, + ): APIPromise { + return promise._thenUnwrap((result) => { + if (version === "v3") { + if (type === "chat") { + this._addLogProbsEvent( + span, + ((result as any).data as ChatCompletion).choices[0].logprobs, + ); + this._endSpan({ + type, + span, + result: (result as any).data as ChatCompletion, + }); + } else { + this._addLogProbsEvent( + span, + ((result as any).data as Completion).choices[0].logprobs, + ); + this._endSpan({ + type, + span, + result: (result as any).data as Completion, }); - span.recordException(error); - span.end(); + } + } else { + if (type === "chat") { + this._addLogProbsEvent( + span, + (result as ChatCompletion).choices[0].logprobs, + ); + this._endSpan({ type, span, result: result as ChatCompletion }); + } else { + this._addLogProbsEvent( + span, + (result as Completion).choices[0].logprobs, + ); + this._endSpan({ type, span, result: result as Completion }); + } + } - reject(error); - }); - }); + return result; + }); } private _endSpan({ diff --git a/packages/sample-app/package.json b/packages/sample-app/package.json index 9222f8c0..1d9569dc 100644 --- a/packages/sample-app/package.json +++ b/packages/sample-app/package.json @@ -24,6 +24,7 @@ "run:llamaindex": "npm run build && node dist/src/sample_llamaindex.js", "run:pinecone": "npm run build && node dist/src/sample_pinecone.js", "run:langchain": "npm run build && node dist/src/sample_langchain.js", + "run:sample_structured_output": "npm run build && node dist/src/sample_structured_output.js", "lint": "eslint . --ext .ts", "lint:fix": "eslint . --ext .ts --fix" }, @@ -46,7 +47,8 @@ "cohere-ai": "^7.7.5", "langchain": "^0.2.12", "llamaindex": "^0.5.20", - "openai": "^4.57.0" + "openai": "^4.57.0", + "zod": "^3.23.8" }, "private": true, "gitHead": "ef1e70d6037f7b5c061056ef2be16e3f55f02ed5" diff --git a/packages/sample-app/src/sample_structured_output.ts b/packages/sample-app/src/sample_structured_output.ts new file mode 100644 index 00000000..23c24ace --- /dev/null +++ b/packages/sample-app/src/sample_structured_output.ts @@ -0,0 +1,37 @@ +import * as traceloop from "@traceloop/node-server-sdk"; +import OpenAI from "openai"; +import { zodResponseFormat } from "openai/helpers/zod"; +import { z } from "zod"; + +traceloop.initialize({ + appName: "sample_same_structured_output", + apiKey: process.env.TRACELOOP_API_KEY, + disableBatch: true, +}); +const openai = new OpenAI(); + +const CalendarEvent = z.object({ + name: z.string(), + date: z.string(), + participants: z.array(z.string()), +}); + +async function create_event() { + const completion = await openai.beta.chat.completions.parse({ + model: "gpt-4o", + messages: [ + { role: "system", content: "Extract the event information." }, + { + role: "user", + content: "Alice and Bob are going to a science fair on Friday.", + }, + ], + response_format: zodResponseFormat(CalendarEvent, "event"), + }); + + console.log(completion.choices[0].message.parsed); + + return completion.choices[0].message.parsed; +} + +create_event();