Skip to content

Commit ecb7105

Browse files
committed
feat: v1 of otel semantic conventions
1 parent 1586ae3 commit ecb7105

File tree

21 files changed

+156
-129
lines changed

21 files changed

+156
-129
lines changed

packages/ai-semantic-conventions/README

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ const span = tracer
2020
.startSpan()
2121
.startSpan(spanName, spanOptions)
2222
.setAttributes({
23-
[SemanticAttributes.LLM_VENDOR]: "openai",
23+
[SemanticAttributes.LLM_SYSTEM]: "openai",
2424
});
2525
```
2626

packages/ai-semantic-conventions/src/SemanticAttributes.ts

+13-10
Original file line numberDiff line numberDiff line change
@@ -15,23 +15,26 @@
1515
*/
1616

1717
export const SpanAttributes = {
18-
LLM_VENDOR: "llm.vendor",
18+
LLM_SYSTEM: "gen_ai.system",
19+
LLM_REQUEST_MODEL: "gen_ai.request.model",
20+
LLM_REQUEST_MAX_TOKENS: "gen_ai.request.max_tokens",
21+
LLM_REQUEST_TEMPERATURE: "gen_ai.request.temperature",
22+
LLM_REQUEST_TOP_P: "gen_ai.request.top_p",
23+
LLM_PROMPTS: "gen_ai.prompt",
24+
LLM_COMPLETIONS: "gen_ai.completion",
25+
LLM_RESPONSE_MODEL: "gen_ai.response.model",
26+
LLM_USAGE_PROMPT_TOKENS: "gen_ai.usage.prompt_tokens",
27+
LLM_USAGE_COMPLETION_TOKENS: "gen_ai.usage.completion_tokens",
28+
29+
// LLM
1930
LLM_REQUEST_TYPE: "llm.request.type",
20-
LLM_REQUEST_MODEL: "llm.request.model",
21-
LLM_RESPONSE_MODEL: "llm.response.model",
22-
LLM_REQUEST_MAX_TOKENS: "llm.request.max_tokens",
2331
LLM_USAGE_TOTAL_TOKENS: "llm.usage.total_tokens",
24-
LLM_USAGE_COMPLETION_TOKENS: "llm.usage.completion_tokens",
25-
LLM_USAGE_PROMPT_TOKENS: "llm.usage.prompt_tokens",
26-
LLM_TEMPERATURE: "llm.temperature",
27-
LLM_TOP_P: "llm.top_p",
2832
LLM_TOP_K: "llm.top_k",
2933
LLM_FREQUENCY_PENALTY: "llm.frequency_penalty",
3034
LLM_PRESENCE_PENALTY: "llm.presence_penalty",
31-
LLM_PROMPTS: "llm.prompts",
32-
LLM_COMPLETIONS: "llm.completions",
3335
LLM_CHAT_STOP_SEQUENCES: "llm.chat.stop_sequences",
3436
LLM_REQUEST_FUNCTIONS: "llm.request.functions",
37+
3538
// Vector DB
3639
VECTOR_DB_VENDOR: "db.system",
3740
VECTOR_DB_QUERY_TOP_K: "db.vector.query.top_k",

packages/instrumentation-anthropic/src/instrumentation.ts

+3-3
Original file line numberDiff line numberDiff line change
@@ -189,14 +189,14 @@ export class AnthropicInstrumentation extends InstrumentationBase<any> {
189189
};
190190
}): Span {
191191
const attributes: Attributes = {
192-
[SpanAttributes.LLM_VENDOR]: "Anthropic",
192+
[SpanAttributes.LLM_SYSTEM]: "Anthropic",
193193
[SpanAttributes.LLM_REQUEST_TYPE]: type,
194194
};
195195

196196
try {
197197
attributes[SpanAttributes.LLM_REQUEST_MODEL] = params.model;
198-
attributes[SpanAttributes.LLM_TEMPERATURE] = params.temperature;
199-
attributes[SpanAttributes.LLM_TOP_P] = params.top_p;
198+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE] = params.temperature;
199+
attributes[SpanAttributes.LLM_REQUEST_TOP_P] = params.top_p;
200200
attributes[SpanAttributes.LLM_TOP_K] = params.top_k;
201201

202202
if (type === "completion") {

packages/instrumentation-azure/src/instrumentation.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ export class AzureOpenAIInstrumentation extends InstrumentationBase<any> {
173173
};
174174
}): Span {
175175
const attributes: Attributes = {
176-
[SpanAttributes.LLM_VENDOR]: "Azure OpenAI",
176+
[SpanAttributes.LLM_SYSTEM]: "Azure OpenAI",
177177
[SpanAttributes.LLM_REQUEST_TYPE]: type,
178178
};
179179

packages/instrumentation-bedrock/src/instrumentation.ts

+14-14
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
156156
: ["", ""];
157157

158158
attributes = {
159-
[SpanAttributes.LLM_VENDOR]: vendor,
159+
[SpanAttributes.LLM_SYSTEM]: vendor,
160160
[SpanAttributes.LLM_REQUEST_MODEL]: model,
161161
[SpanAttributes.LLM_RESPONSE_MODEL]: model,
162162
[SpanAttributes.LLM_REQUEST_TYPE]: LLMRequestTypeValues.COMPLETION,
@@ -197,7 +197,7 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
197197
? (span["attributes"] as Record<string, any>)
198198
: {};
199199

200-
if (SpanAttributes.LLM_VENDOR in attributes) {
200+
if (SpanAttributes.LLM_SYSTEM in attributes) {
201201
if (!(result.body instanceof Object.getPrototypeOf(Uint8Array))) {
202202
const rawRes = result.body as AsyncIterable<bedrock.ResponseStream>;
203203

@@ -234,7 +234,7 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
234234
}
235235

236236
let responseAttributes = this._setResponseAttributes(
237-
attributes[SpanAttributes.LLM_VENDOR],
237+
attributes[SpanAttributes.LLM_SYSTEM],
238238
parsedResponse,
239239
true,
240240
);
@@ -265,7 +265,7 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
265265
const parsedResponse = JSON.parse(jsonString);
266266

267267
const responseAttributes = this._setResponseAttributes(
268-
attributes[SpanAttributes.LLM_VENDOR],
268+
attributes[SpanAttributes.LLM_SYSTEM],
269269
parsedResponse,
270270
);
271271

@@ -289,8 +289,8 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
289289
switch (vendor) {
290290
case "ai21": {
291291
return {
292-
[SpanAttributes.LLM_TOP_P]: requestBody["topP"],
293-
[SpanAttributes.LLM_TEMPERATURE]: requestBody["temperature"],
292+
[SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["topP"],
293+
[SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"],
294294
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody["maxTokens"],
295295
[SpanAttributes.LLM_PRESENCE_PENALTY]:
296296
requestBody["presencePenalty"]["scale"],
@@ -309,9 +309,9 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
309309
}
310310
case "amazon": {
311311
return {
312-
[SpanAttributes.LLM_TOP_P]:
312+
[SpanAttributes.LLM_REQUEST_TOP_P]:
313313
requestBody["textGenerationConfig"]["topP"],
314-
[SpanAttributes.LLM_TEMPERATURE]:
314+
[SpanAttributes.LLM_REQUEST_TEMPERATURE]:
315315
requestBody["textGenerationConfig"]["temperature"],
316316
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]:
317317
requestBody["textGenerationConfig"]["maxTokenCount"],
@@ -328,9 +328,9 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
328328
}
329329
case "anthropic": {
330330
return {
331-
[SpanAttributes.LLM_TOP_P]: requestBody["top_p"],
331+
[SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["top_p"],
332332
[SpanAttributes.LLM_TOP_K]: requestBody["top_k"],
333-
[SpanAttributes.LLM_TEMPERATURE]: requestBody["temperature"],
333+
[SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"],
334334
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]:
335335
requestBody["max_tokens_to_sample"],
336336

@@ -350,9 +350,9 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
350350
}
351351
case "cohere": {
352352
return {
353-
[SpanAttributes.LLM_TOP_P]: requestBody["p"],
353+
[SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["p"],
354354
[SpanAttributes.LLM_TOP_K]: requestBody["k"],
355-
[SpanAttributes.LLM_TEMPERATURE]: requestBody["temperature"],
355+
[SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"],
356356
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody["max_tokens"],
357357

358358
// Prompt & Role
@@ -367,8 +367,8 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
367367
}
368368
case "meta": {
369369
return {
370-
[SpanAttributes.LLM_TOP_P]: requestBody["top_p"],
371-
[SpanAttributes.LLM_TEMPERATURE]: requestBody["temperature"],
370+
[SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["top_p"],
371+
[SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"],
372372
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody["max_gen_len"],
373373

374374
// Prompt & Role

packages/instrumentation-bedrock/tests/ai21.test.ts

+6-3
Original file line numberDiff line numberDiff line change
@@ -117,13 +117,16 @@ describe("Test Ai21 with AWS Bedrock Instrumentation", () => {
117117
const spans = memoryExporter.getFinishedSpans();
118118

119119
const attributes = spans[0].attributes;
120-
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
120+
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
121121
assert.strictEqual(
122122
attributes[SpanAttributes.LLM_REQUEST_TYPE],
123123
"completion",
124124
);
125125
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
126-
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.topP);
126+
assert.strictEqual(
127+
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
128+
params.topP,
129+
);
127130
assert.strictEqual(
128131
attributes[SpanAttributes.LLM_PRESENCE_PENALTY],
129132
params.presencePenalty.scale,
@@ -133,7 +136,7 @@ describe("Test Ai21 with AWS Bedrock Instrumentation", () => {
133136
params.frequencyPenalty.scale,
134137
);
135138
assert.strictEqual(
136-
attributes[SpanAttributes.LLM_TEMPERATURE],
139+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
137140
params.temperature,
138141
);
139142
assert.strictEqual(

packages/instrumentation-bedrock/tests/amazon.test.ts

+6-6
Original file line numberDiff line numberDiff line change
@@ -118,18 +118,18 @@ describe("Test Amazon Titan with AWS Bedrock Instrumentation", () => {
118118
const spans = memoryExporter.getFinishedSpans();
119119

120120
const attributes = spans[0].attributes;
121-
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
121+
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
122122
assert.strictEqual(
123123
attributes[SpanAttributes.LLM_REQUEST_TYPE],
124124
"completion",
125125
);
126126
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
127127
assert.strictEqual(
128-
attributes[SpanAttributes.LLM_TOP_P],
128+
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
129129
params.textGenerationConfig.topP,
130130
);
131131
assert.strictEqual(
132-
attributes[SpanAttributes.LLM_TEMPERATURE],
132+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
133133
params.textGenerationConfig.temperature,
134134
);
135135
assert.strictEqual(
@@ -203,18 +203,18 @@ describe("Test Amazon Titan with AWS Bedrock Instrumentation", () => {
203203

204204
const attributes = spans[0].attributes;
205205

206-
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
206+
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
207207
assert.strictEqual(
208208
attributes[SpanAttributes.LLM_REQUEST_TYPE],
209209
"completion",
210210
);
211211
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
212212
assert.strictEqual(
213-
attributes[SpanAttributes.LLM_TOP_P],
213+
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
214214
params.textGenerationConfig.topP,
215215
);
216216
assert.strictEqual(
217-
attributes[SpanAttributes.LLM_TEMPERATURE],
217+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
218218
params.textGenerationConfig.temperature,
219219
);
220220
assert.strictEqual(

packages/instrumentation-bedrock/tests/anthropic.test.ts

+12-6
Original file line numberDiff line numberDiff line change
@@ -116,16 +116,19 @@ describe("Test Anthropic with AWS Bedrock Instrumentation", () => {
116116
const spans = memoryExporter.getFinishedSpans();
117117

118118
const attributes = spans[0].attributes;
119-
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
119+
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
120120
assert.strictEqual(
121121
attributes[SpanAttributes.LLM_REQUEST_TYPE],
122122
"completion",
123123
);
124124
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
125-
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.top_p);
125+
assert.strictEqual(
126+
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
127+
params.top_p,
128+
);
126129
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_K], params.top_k);
127130
assert.strictEqual(
128-
attributes[SpanAttributes.LLM_TEMPERATURE],
131+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
129132
params.temperature,
130133
);
131134
assert.strictEqual(
@@ -187,16 +190,19 @@ describe("Test Anthropic with AWS Bedrock Instrumentation", () => {
187190

188191
const attributes = spans[0].attributes;
189192

190-
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
193+
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
191194
assert.strictEqual(
192195
attributes[SpanAttributes.LLM_REQUEST_TYPE],
193196
"completion",
194197
);
195198
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
196-
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.top_p);
199+
assert.strictEqual(
200+
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
201+
params.top_p,
202+
);
197203
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_K], params.top_k);
198204
assert.strictEqual(
199-
attributes[SpanAttributes.LLM_TEMPERATURE],
205+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
200206
params.temperature,
201207
);
202208
assert.strictEqual(

packages/instrumentation-bedrock/tests/cohere.test.ts

+9-6
Original file line numberDiff line numberDiff line change
@@ -116,16 +116,16 @@ describe("Test Cohere with AWS Bedrock Instrumentation", () => {
116116
const spans = memoryExporter.getFinishedSpans();
117117

118118
const attributes = spans[0].attributes;
119-
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
119+
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
120120
assert.strictEqual(
121121
attributes[SpanAttributes.LLM_REQUEST_TYPE],
122122
"completion",
123123
);
124124
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
125-
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.p);
125+
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_TOP_P], params.p);
126126
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_K], params.k);
127127
assert.strictEqual(
128-
attributes[SpanAttributes.LLM_TEMPERATURE],
128+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
129129
params.temperature,
130130
);
131131
assert.strictEqual(
@@ -184,16 +184,19 @@ describe("Test Cohere with AWS Bedrock Instrumentation", () => {
184184

185185
const attributes = spans[0].attributes;
186186

187-
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
187+
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
188188
assert.strictEqual(
189189
attributes[SpanAttributes.LLM_REQUEST_TYPE],
190190
"completion",
191191
);
192192
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
193-
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.p);
193+
assert.strictEqual(
194+
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
195+
params.p,
196+
);
194197
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_K], params.k);
195198
assert.strictEqual(
196-
attributes[SpanAttributes.LLM_TEMPERATURE],
199+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
197200
params.temperature,
198201
);
199202
assert.strictEqual(

packages/instrumentation-bedrock/tests/meta.test.ts

+12-6
Original file line numberDiff line numberDiff line change
@@ -115,15 +115,18 @@ describe("Test Meta with AWS Bedrock Instrumentation", () => {
115115
const spans = memoryExporter.getFinishedSpans();
116116

117117
const attributes = spans[0].attributes;
118-
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
118+
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
119119
assert.strictEqual(
120120
attributes[SpanAttributes.LLM_REQUEST_TYPE],
121121
"completion",
122122
);
123123
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
124-
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.top_p);
125124
assert.strictEqual(
126-
attributes[SpanAttributes.LLM_TEMPERATURE],
125+
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
126+
params.top_p,
127+
);
128+
assert.strictEqual(
129+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
127130
params.temperature,
128131
);
129132
assert.strictEqual(
@@ -194,15 +197,18 @@ describe("Test Meta with AWS Bedrock Instrumentation", () => {
194197

195198
const attributes = spans[0].attributes;
196199

197-
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
200+
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
198201
assert.strictEqual(
199202
attributes[SpanAttributes.LLM_REQUEST_TYPE],
200203
"completion",
201204
);
202205
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
203-
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.top_p);
204206
assert.strictEqual(
205-
attributes[SpanAttributes.LLM_TEMPERATURE],
207+
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
208+
params.top_p,
209+
);
210+
assert.strictEqual(
211+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
206212
params.temperature,
207213
);
208214
assert.strictEqual(

packages/instrumentation-cohere/src/instrumentation.ts

+3-3
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ export class CohereInstrumentation extends InstrumentationBase<any> {
216216
type: LLM_COMPLETION_TYPE;
217217
}): Span {
218218
const attributes: Attributes = {
219-
[SpanAttributes.LLM_VENDOR]: "Cohere",
219+
[SpanAttributes.LLM_SYSTEM]: "Cohere",
220220
[SpanAttributes.LLM_REQUEST_TYPE]: this._getLlmRequestTypeByMethod(type),
221221
};
222222

@@ -226,9 +226,9 @@ export class CohereInstrumentation extends InstrumentationBase<any> {
226226
attributes[SpanAttributes.LLM_REQUEST_MODEL] = model;
227227

228228
if (!("query" in params)) {
229-
attributes[SpanAttributes.LLM_TOP_P] = params.p;
229+
attributes[SpanAttributes.LLM_REQUEST_TOP_P] = params.p;
230230
attributes[SpanAttributes.LLM_TOP_K] = params.k;
231-
attributes[SpanAttributes.LLM_TEMPERATURE] = params.temperature;
231+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE] = params.temperature;
232232
attributes[SpanAttributes.LLM_FREQUENCY_PENALTY] =
233233
params.frequencyPenalty;
234234
attributes[SpanAttributes.LLM_PRESENCE_PENALTY] =

0 commit comments

Comments
 (0)