Skip to content

Commit eefa7ba

Browse files
authored
Merge branch 'main' into Ollama_meter
2 parents 5a895ad + 66f8373 commit eefa7ba

File tree

60 files changed

+273
-188
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+273
-188
lines changed

.cz.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ tag_format = "v$version"
44
version_scheme = "pep440"
55
major_version_zero = true
66
update_changelog_on_bump = true
7-
version = "0.38.10"
7+
version = "0.38.12"
88
version_files = [
99
"packages/opentelemetry-instrumentation-groq/pyproject.toml:^version",
1010
"packages/opentelemetry-instrumentation-groq/opentelemetry/instrumentation/groq/version.py",

CHANGELOG.md

+12
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,15 @@
1+
## v0.38.12 (2025-03-07)
2+
3+
### Fix
4+
5+
- **sdk**: client shouldn't be initialized if destination is not traceloop (#2754)
6+
7+
## v0.38.11 (2025-03-06)
8+
9+
### Fix
10+
11+
- **sdk**: When tracing task with no `name` provided , use qualified name instaed of name (#2743)
12+
113
## v0.38.10 (2025-03-05)
214

315
### Fix
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "0.38.10"
1+
__version__ = "0.38.12"

packages/opentelemetry-instrumentation-alephalpha/pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ show_missing = true
88

99
[tool.poetry]
1010
name = "opentelemetry-instrumentation-alephalpha"
11-
version = "0.38.10"
11+
version = "0.38.12"
1212
description = "OpenTelemetry Aleph Alpha instrumentation"
1313
authors = [
1414
"Gal Kleinman <[email protected]>",
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "0.38.10"
1+
__version__ = "0.38.12"

packages/opentelemetry-instrumentation-anthropic/pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ show_missing = true
88

99
[tool.poetry]
1010
name = "opentelemetry-instrumentation-anthropic"
11-
version = "0.38.10"
11+
version = "0.38.12"
1212
description = "OpenTelemetry Anthropic instrumentation"
1313
authors = [
1414
"Gal Kleinman <[email protected]>",

packages/opentelemetry-instrumentation-bedrock/opentelemetry/instrumentation/bedrock/__init__.py

+50-3
Original file line numberDiff line numberDiff line change
@@ -197,8 +197,13 @@ def _handle_stream_call(span, kwargs, response, metric_params):
197197
@dont_throw
198198
def stream_done(response_body):
199199
request_body = json.loads(kwargs.get("body"))
200+
modelId = kwargs.get("modelId")
200201

201-
(vendor, model) = kwargs.get("modelId").split(".")
202+
if modelId is not None and "." in modelId:
203+
(vendor, model) = modelId.split(".")
204+
else:
205+
vendor = "imported_model"
206+
model = kwargs.get("modelId")
202207

203208
metric_params.vendor = vendor
204209
metric_params.model = model
@@ -233,6 +238,8 @@ def stream_done(response_body):
233238
_set_amazon_span_attributes(
234239
span, request_body, response_body, metric_params
235240
)
241+
elif vendor == "imported_model":
242+
_set_imported_model_span_attributes(span, request_body, response_body, metric_params)
236243

237244
span.end()
238245

@@ -246,9 +253,13 @@ def _handle_call(span, kwargs, response, metric_params):
246253
)
247254
request_body = json.loads(kwargs.get("body"))
248255
response_body = json.loads(response.get("body").read())
256+
modelId = kwargs.get("modelId")
249257

250-
(vendor, model) = kwargs.get("modelId").split(".")
251-
258+
if modelId is not None and "." in modelId:
259+
(vendor, model) = modelId.split(".")
260+
else:
261+
vendor = "imported_model"
262+
model = kwargs.get("modelId")
252263
metric_params.vendor = vendor
253264
metric_params.model = model
254265
metric_params.is_stream = False
@@ -278,6 +289,8 @@ def _handle_call(span, kwargs, response, metric_params):
278289
_set_llama_span_attributes(span, request_body, response_body, metric_params)
279290
elif vendor == "amazon":
280291
_set_amazon_span_attributes(span, request_body, response_body, metric_params)
292+
elif vendor == "imported_model":
293+
_set_imported_model_span_attributes(span, request_body, response_body, metric_params)
281294

282295

283296
def _record_usage_to_span(span, prompt_tokens, completion_tokens, metric_params):
@@ -636,6 +649,40 @@ def _set_amazon_span_attributes(span, request_body, response_body, metric_params
636649
)
637650

638651

652+
def _set_imported_model_span_attributes(span, request_body, response_body, metric_params):
653+
_set_span_attribute(
654+
span, SpanAttributes.LLM_REQUEST_TYPE, LLMRequestTypeValues.COMPLETION.value
655+
)
656+
_set_span_attribute(
657+
span, SpanAttributes.LLM_REQUEST_TOP_P, request_body.get("topP")
658+
)
659+
_set_span_attribute(
660+
span, SpanAttributes.LLM_REQUEST_TEMPERATURE, request_body.get("temperature")
661+
)
662+
_set_span_attribute(
663+
span, SpanAttributes.LLM_REQUEST_MAX_TOKENS, request_body.get("max_tokens")
664+
)
665+
prompt_tokens = (
666+
response_body.get("usage", {}).get("prompt_tokens")
667+
if response_body.get("usage", {}).get("prompt_tokens") is not None
668+
else response_body.get("prompt_token_count")
669+
)
670+
completion_tokens = response_body.get("usage", {}).get(
671+
"completion_tokens"
672+
) or response_body.get("generation_token_count")
673+
674+
_record_usage_to_span(span, prompt_tokens, completion_tokens, metric_params, )
675+
676+
if should_send_prompts():
677+
_set_span_attribute(
678+
span, f"{SpanAttributes.LLM_PROMPTS}.0.content", request_body.get("prompt")
679+
)
680+
_set_span_attribute(
681+
span, f"{SpanAttributes.LLM_COMPLETIONS}.0.content",
682+
response_body.get("generation"),
683+
)
684+
685+
639686
def _create_metrics(meter: Meter):
640687
token_histogram = meter.create_histogram(
641688
name=Meters.LLM_TOKEN_USAGE,
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "0.38.10"
1+
__version__ = "0.38.12"

0 commit comments

Comments
 (0)