From e06c931bca9d17d9367f60859df9e1070da125cb Mon Sep 17 00:00:00 2001 From: Mikyo King Date: Wed, 11 Sep 2024 17:22:05 -0600 Subject: [PATCH] chore(openai-node): add manual instrumentation example (#1031) --- .../examples/chat.ts | 23 +++++++ .../examples/instrumentation.ts | 38 +++++++++++ .../examples/manual-instrumentation.ts | 65 +++++++++++++++++++ .../package.json | 3 +- js/pnpm-lock.yaml | 7 +- 5 files changed, 133 insertions(+), 3 deletions(-) create mode 100644 js/packages/openinference-instrumentation-openai/examples/chat.ts create mode 100644 js/packages/openinference-instrumentation-openai/examples/instrumentation.ts create mode 100644 js/packages/openinference-instrumentation-openai/examples/manual-instrumentation.ts diff --git a/js/packages/openinference-instrumentation-openai/examples/chat.ts b/js/packages/openinference-instrumentation-openai/examples/chat.ts new file mode 100644 index 000000000..1622fb44f --- /dev/null +++ b/js/packages/openinference-instrumentation-openai/examples/chat.ts @@ -0,0 +1,23 @@ +import "./instrumentation"; +import { isPatched } from "../src"; +import OpenAI from "openai"; + +// Check if OpenAI has been patched +if (!isPatched()) { + throw new Error("OpenAI instrumentation failed"); +} + +// Initialize OpenAI +const openai = new OpenAI(); + +openai.chat.completions + .create({ + model: "gpt-3.5-turbo", + messages: [{ role: "system", content: "You are a helpful assistant." }], + max_tokens: 150, + temperature: 0.5, + }) + .then((response) => { + // eslint-disable-next-line no-console + console.log(response.choices[0].message.content); + }); diff --git a/js/packages/openinference-instrumentation-openai/examples/instrumentation.ts b/js/packages/openinference-instrumentation-openai/examples/instrumentation.ts new file mode 100644 index 000000000..b1b5cf5f1 --- /dev/null +++ b/js/packages/openinference-instrumentation-openai/examples/instrumentation.ts @@ -0,0 +1,38 @@ +import { OpenAIInstrumentation } from "../src/index"; +import { ConsoleSpanExporter } from "@opentelemetry/sdk-trace-base"; +import { + NodeTracerProvider, + SimpleSpanProcessor, +} from "@opentelemetry/sdk-trace-node"; +import { Resource } from "@opentelemetry/resources"; +import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto"; +import { SEMRESATTRS_SERVICE_NAME } from "@opentelemetry/semantic-conventions"; +import { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api"; +import { registerInstrumentations } from "@opentelemetry/instrumentation"; + +// For troubleshooting, set the log level to DiagLogLevel.DEBUG +diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG); + +const provider = new NodeTracerProvider({ + resource: new Resource({ + [SEMRESATTRS_SERVICE_NAME]: "openai-service", + }), +}); + +provider.addSpanProcessor(new SimpleSpanProcessor(new ConsoleSpanExporter())); +provider.addSpanProcessor( + new SimpleSpanProcessor( + new OTLPTraceExporter({ + url: "http://localhost:6006/v1/traces", + }), + ), +); + +registerInstrumentations({ + instrumentations: [new OpenAIInstrumentation()], +}); + +provider.register(); + +// eslint-disable-next-line no-console +console.log("👀 OpenInference initialized"); diff --git a/js/packages/openinference-instrumentation-openai/examples/manual-instrumentation.ts b/js/packages/openinference-instrumentation-openai/examples/manual-instrumentation.ts new file mode 100644 index 000000000..e84d9dd9d --- /dev/null +++ b/js/packages/openinference-instrumentation-openai/examples/manual-instrumentation.ts @@ -0,0 +1,65 @@ +/** + * There are some times such as in lambdas or in server actions inside of vercel where you cannot tap into the automatic instrumentation. + * This file shows an example of how if openai is already imported, you can manually instrument it after it's been imported. + */ + +import * as openai from "openai"; // Note that openai is imported before the instrumentation +import { isPatched, OpenAIInstrumentation } from "../src"; +import { + NodeTracerProvider, + SimpleSpanProcessor, +} from "@opentelemetry/sdk-trace-node"; +import { Resource } from "@opentelemetry/resources"; +import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto"; +import { SEMRESATTRS_SERVICE_NAME } from "@opentelemetry/semantic-conventions"; +import { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api"; +import { assert } from "console"; + +// For troubleshooting, set the log level to DiagLogLevel.DEBUG +diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG); + +const provider = new NodeTracerProvider({ + resource: new Resource({ + [SEMRESATTRS_SERVICE_NAME]: "openai-service", + }), +}); + +provider.addSpanProcessor( + new SimpleSpanProcessor( + new OTLPTraceExporter({ + url: "http://localhost:6006/v1/traces", + }), + ), +); + +provider.register(); + +// Make sure that openai is not patched +assert(isPatched() === false); +// eslint-disable-next-line no-console +console.log("OpenAI is not patched"); + +const oaiInstrumentor = new OpenAIInstrumentation(); + +oaiInstrumentor.manuallyInstrument(openai); + +// Make sure that openai is patched +assert(isPatched() === true); +// eslint-disable-next-line no-console +console.log("OpenAI is patched"); + +// Initialize OpenAI + +const client = new openai.OpenAI(); + +client.chat.completions + .create({ + model: "gpt-3.5-turbo", + messages: [{ role: "system", content: "You are a helpful assistant." }], + max_tokens: 150, + temperature: 0.5, + }) + .then((response) => { + // eslint-disable-next-line no-console + console.log(response.choices[0].message.content); + }); diff --git a/js/packages/openinference-instrumentation-openai/package.json b/js/packages/openinference-instrumentation-openai/package.json index c92e4e0e6..9d5fe3d10 100644 --- a/js/packages/openinference-instrumentation-openai/package.json +++ b/js/packages/openinference-instrumentation-openai/package.json @@ -19,8 +19,8 @@ "test": "jest ." }, "dependencies": { - "@arizeai/openinference-semantic-conventions": "workspace:*", "@arizeai/openinference-core": "workspace:*", + "@arizeai/openinference-semantic-conventions": "workspace:*", "@opentelemetry/api": "^1.9.0", "@opentelemetry/core": "^1.25.1", "@opentelemetry/instrumentation": "^0.46.0" @@ -33,6 +33,7 @@ "author": "oss-devs@arize.com", "license": "Apache-2.0", "devDependencies": { + "@opentelemetry/exporter-trace-otlp-proto": "^0.50.0", "@opentelemetry/resources": "^1.25.1", "@opentelemetry/sdk-trace-base": "^1.25.1", "@opentelemetry/sdk-trace-node": "^1.25.1", diff --git a/js/pnpm-lock.yaml b/js/pnpm-lock.yaml index 752d9b80c..4c78f7a96 100644 --- a/js/pnpm-lock.yaml +++ b/js/pnpm-lock.yaml @@ -37,7 +37,7 @@ importers: version: 5.0.10 ts-jest: specifier: ^29.2.2 - version: 29.2.4(@babel/core@7.24.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.9))(jest@29.7.0)(typescript@5.5.4) + version: 29.2.4(@babel/core@7.24.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.9))(jest@29.7.0(@types/node@20.14.11))(typescript@5.5.4) typescript: specifier: ^5.5.4 version: 5.5.4 @@ -176,6 +176,9 @@ importers: specifier: ^0.46.0 version: 0.46.0(@opentelemetry/api@1.9.0) devDependencies: + '@opentelemetry/exporter-trace-otlp-proto': + specifier: ^0.50.0 + version: 0.50.0(@opentelemetry/api@1.9.0) '@opentelemetry/resources': specifier: ^1.25.1 version: 1.25.1(@opentelemetry/api@1.9.0) @@ -5921,7 +5924,7 @@ snapshots: dependencies: typescript: 5.5.4 - ts-jest@29.2.4(@babel/core@7.24.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.9))(jest@29.7.0)(typescript@5.5.4): + ts-jest@29.2.4(@babel/core@7.24.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.9))(jest@29.7.0(@types/node@20.14.11))(typescript@5.5.4): dependencies: bs-logger: 0.2.6 ejs: 3.1.10