Skip to content

Commit

Permalink
chore(openai-node): add manual instrumentation example (#1031)
Browse files Browse the repository at this point in the history
  • Loading branch information
mikeldking authored Sep 11, 2024
1 parent 518167c commit e06c931
Show file tree
Hide file tree
Showing 5 changed files with 133 additions and 3 deletions.
23 changes: 23 additions & 0 deletions js/packages/openinference-instrumentation-openai/examples/chat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import "./instrumentation";
import { isPatched } from "../src";
import OpenAI from "openai";

// Check if OpenAI has been patched
if (!isPatched()) {
throw new Error("OpenAI instrumentation failed");
}

// Initialize OpenAI
const openai = new OpenAI();

openai.chat.completions
.create({
model: "gpt-3.5-turbo",
messages: [{ role: "system", content: "You are a helpful assistant." }],
max_tokens: 150,
temperature: 0.5,
})
.then((response) => {
// eslint-disable-next-line no-console
console.log(response.choices[0].message.content);
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import { OpenAIInstrumentation } from "../src/index";
import { ConsoleSpanExporter } from "@opentelemetry/sdk-trace-base";
import {
NodeTracerProvider,
SimpleSpanProcessor,
} from "@opentelemetry/sdk-trace-node";
import { Resource } from "@opentelemetry/resources";
import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto";
import { SEMRESATTRS_SERVICE_NAME } from "@opentelemetry/semantic-conventions";
import { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
import { registerInstrumentations } from "@opentelemetry/instrumentation";

// For troubleshooting, set the log level to DiagLogLevel.DEBUG
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);

const provider = new NodeTracerProvider({
resource: new Resource({
[SEMRESATTRS_SERVICE_NAME]: "openai-service",
}),
});

provider.addSpanProcessor(new SimpleSpanProcessor(new ConsoleSpanExporter()));
provider.addSpanProcessor(
new SimpleSpanProcessor(
new OTLPTraceExporter({
url: "http://localhost:6006/v1/traces",
}),
),
);

registerInstrumentations({
instrumentations: [new OpenAIInstrumentation()],
});

provider.register();

// eslint-disable-next-line no-console
console.log("👀 OpenInference initialized");
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/**
* There are some times such as in lambdas or in server actions inside of vercel where you cannot tap into the automatic instrumentation.
* This file shows an example of how if openai is already imported, you can manually instrument it after it's been imported.
*/

import * as openai from "openai"; // Note that openai is imported before the instrumentation
import { isPatched, OpenAIInstrumentation } from "../src";
import {
NodeTracerProvider,
SimpleSpanProcessor,
} from "@opentelemetry/sdk-trace-node";
import { Resource } from "@opentelemetry/resources";
import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto";
import { SEMRESATTRS_SERVICE_NAME } from "@opentelemetry/semantic-conventions";
import { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
import { assert } from "console";

// For troubleshooting, set the log level to DiagLogLevel.DEBUG
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);

const provider = new NodeTracerProvider({
resource: new Resource({
[SEMRESATTRS_SERVICE_NAME]: "openai-service",
}),
});

provider.addSpanProcessor(
new SimpleSpanProcessor(
new OTLPTraceExporter({
url: "http://localhost:6006/v1/traces",
}),
),
);

provider.register();

// Make sure that openai is not patched
assert(isPatched() === false);
// eslint-disable-next-line no-console
console.log("OpenAI is not patched");

const oaiInstrumentor = new OpenAIInstrumentation();

oaiInstrumentor.manuallyInstrument(openai);

// Make sure that openai is patched
assert(isPatched() === true);
// eslint-disable-next-line no-console
console.log("OpenAI is patched");

// Initialize OpenAI

const client = new openai.OpenAI();

client.chat.completions
.create({
model: "gpt-3.5-turbo",
messages: [{ role: "system", content: "You are a helpful assistant." }],
max_tokens: 150,
temperature: 0.5,
})
.then((response) => {
// eslint-disable-next-line no-console
console.log(response.choices[0].message.content);
});
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
"test": "jest ."
},
"dependencies": {
"@arizeai/openinference-semantic-conventions": "workspace:*",
"@arizeai/openinference-core": "workspace:*",
"@arizeai/openinference-semantic-conventions": "workspace:*",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/core": "^1.25.1",
"@opentelemetry/instrumentation": "^0.46.0"
Expand All @@ -33,6 +33,7 @@
"author": "[email protected]",
"license": "Apache-2.0",
"devDependencies": {
"@opentelemetry/exporter-trace-otlp-proto": "^0.50.0",
"@opentelemetry/resources": "^1.25.1",
"@opentelemetry/sdk-trace-base": "^1.25.1",
"@opentelemetry/sdk-trace-node": "^1.25.1",
Expand Down
7 changes: 5 additions & 2 deletions js/pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit e06c931

Please sign in to comment.