Skip to content

Commit 6757cc7

Browse files
authored
Add obs for logs in scraper/scraperhelper (#12036)
<!--Ex. Fixing a bug - Describe the bug and how this fixes the issue. Ex. Adding a feature - Explain what this achieves.--> #### Description This PR adds obs for logs in scraper/scraperhelper, also introduced new metrics for scraping logs. <!-- Issue number if applicable --> #### Link to tracking issue Relates to #11238 <!--Describe what testing was performed and which tests were added.--> #### Testing Added <!--Describe the documentation added.--> #### Documentation Added <!--Please delete paragraphs that you did not use before submitting.-->
1 parent 60b22d1 commit 6757cc7

File tree

7 files changed

+319
-1
lines changed

7 files changed

+319
-1
lines changed

.chloggen/add-obs-for-logs.yaml

+25
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
# Use this changelog template to create an entry for release notes.
2+
3+
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
4+
change_type: enhancement
5+
6+
# The name of the component, or a single word describing the area of concern, (e.g. otlpreceiver)
7+
component: scraper/scraperhelper
8+
9+
# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
10+
note: Add obs_logs for scraper/scraperhelper
11+
12+
# One or more tracking issues or pull requests related to the change
13+
issues: [12036]
14+
15+
# (Optional) One or more lines of additional information to render under the primary note.
16+
# These lines will be padded with 2 spaces and then inserted directly into the document.
17+
# Use pipe (|) for multiline entries.
18+
subtext: This change adds obs for logs in scraper/scraperhelper, also introduced new metrics for scraping logs.
19+
20+
# Optional: The change log or logs in which this entry should be included.
21+
# e.g. '[user]' or '[user, api]'
22+
# Include 'user' if the change is relevant to end users.
23+
# Include 'api' if there is a change to a library API.
24+
# Default: '[user]'
25+
change_logs: [api]

scraper/scraperhelper/documentation.md

+16
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,14 @@
66

77
The following telemetry is emitted by this component.
88

9+
### otelcol_scraper_errored_log_records
10+
11+
Number of log records that were unable to be scraped. [alpha]
12+
13+
| Unit | Metric Type | Value Type | Monotonic |
14+
| ---- | ----------- | ---------- | --------- |
15+
| {datapoints} | Sum | Int | true |
16+
917
### otelcol_scraper_errored_metric_points
1018

1119
Number of metric points that were unable to be scraped. [alpha]
@@ -14,6 +22,14 @@ Number of metric points that were unable to be scraped. [alpha]
1422
| ---- | ----------- | ---------- | --------- |
1523
| {datapoints} | Sum | Int | true |
1624

25+
### otelcol_scraper_scraped_log_records
26+
27+
Number of log records successfully scraped. [alpha]
28+
29+
| Unit | Metric Type | Value Type | Monotonic |
30+
| ---- | ----------- | ---------- | --------- |
31+
| {datapoints} | Sum | Int | true |
32+
1733
### otelcol_scraper_scraped_metric_points
1834

1935
Number of metric points successfully scraped. [alpha]

scraper/scraperhelper/internal/metadata/generated_telemetry.go

+14
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

scraper/scraperhelper/internal/metadatatest/generated_telemetrytest_test.go

+26
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

scraper/scraperhelper/metadata.yaml

+21-1
Original file line numberDiff line numberDiff line change
@@ -26,4 +26,24 @@ telemetry:
2626
unit: "{datapoints}"
2727
sum:
2828
value_type: int
29-
monotonic: true
29+
monotonic: true
30+
31+
scraper_scraped_log_records:
32+
enabled: true
33+
stability:
34+
level: alpha
35+
description: Number of log records successfully scraped.
36+
unit: "{datapoints}"
37+
sum:
38+
value_type: int
39+
monotonic: true
40+
41+
scraper_errored_log_records:
42+
enabled: true
43+
stability:
44+
level: alpha
45+
description: Number of log records that were unable to be scraped.
46+
unit: "{datapoints}"
47+
sum:
48+
value_type: int
49+
monotonic: true

scraper/scraperhelper/obs_logs.go

+81
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
// Copyright The OpenTelemetry Authors
2+
// SPDX-License-Identifier: Apache-2.0
3+
4+
package scraperhelper // import "go.opentelemetry.io/collector/scraper/scraperhelper"
5+
6+
import (
7+
"context"
8+
"errors"
9+
10+
"go.opentelemetry.io/otel/attribute"
11+
"go.opentelemetry.io/otel/codes"
12+
"go.opentelemetry.io/otel/metric"
13+
"go.uber.org/zap"
14+
15+
"go.opentelemetry.io/collector/component"
16+
"go.opentelemetry.io/collector/pdata/plog"
17+
"go.opentelemetry.io/collector/pipeline"
18+
"go.opentelemetry.io/collector/scraper"
19+
"go.opentelemetry.io/collector/scraper/scrapererror"
20+
"go.opentelemetry.io/collector/scraper/scraperhelper/internal/metadata"
21+
)
22+
23+
const (
24+
// scrapedLogRecordsKey used to identify log records scraped by the
25+
// Collector.
26+
scrapedLogRecordsKey = "scraped_log_records"
27+
// erroredLogRecordsKey used to identify log records errored (i.e.
28+
// unable to be scraped) by the Collector.
29+
erroredLogRecordsKey = "errored_log_records"
30+
)
31+
32+
func newObsLogs(delegate scraper.ScrapeLogsFunc, receiverID component.ID, scraperID component.ID, telSettings component.TelemetrySettings) (scraper.ScrapeLogsFunc, error) {
33+
telemetryBuilder, errBuilder := metadata.NewTelemetryBuilder(telSettings)
34+
if errBuilder != nil {
35+
return nil, errBuilder
36+
}
37+
38+
tracer := metadata.Tracer(telSettings)
39+
spanName := scraperKey + spanNameSep + scraperID.String() + spanNameSep + "ScrapeLogs"
40+
otelAttrs := metric.WithAttributeSet(attribute.NewSet(
41+
attribute.String(receiverKey, receiverID.String()),
42+
attribute.String(scraperKey, scraperID.String()),
43+
))
44+
45+
return func(ctx context.Context) (plog.Logs, error) {
46+
ctx, span := tracer.Start(ctx, spanName)
47+
defer span.End()
48+
49+
md, err := delegate(ctx)
50+
numScrapedLogs := 0
51+
numErroredLogs := 0
52+
if err != nil {
53+
telSettings.Logger.Error("Error scraping logs", zap.Error(err))
54+
var partialErr scrapererror.PartialScrapeError
55+
if errors.As(err, &partialErr) {
56+
numErroredLogs = partialErr.Failed
57+
numScrapedLogs = md.LogRecordCount()
58+
}
59+
} else {
60+
numScrapedLogs = md.LogRecordCount()
61+
}
62+
63+
telemetryBuilder.ScraperScrapedLogRecords.Add(ctx, int64(numScrapedLogs), otelAttrs)
64+
telemetryBuilder.ScraperErroredLogRecords.Add(ctx, int64(numErroredLogs), otelAttrs)
65+
66+
// end span according to errors
67+
if span.IsRecording() {
68+
span.SetAttributes(
69+
attribute.String(formatKey, pipeline.SignalMetrics.String()),
70+
attribute.Int64(scrapedLogRecordsKey, int64(numScrapedLogs)),
71+
attribute.Int64(erroredLogRecordsKey, int64(numErroredLogs)),
72+
)
73+
74+
if err != nil {
75+
span.SetStatus(codes.Error, err.Error())
76+
}
77+
}
78+
79+
return md, err
80+
}, nil
81+
}
+136
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,136 @@
1+
// Copyright The OpenTelemetry Authors
2+
// SPDX-License-Identifier: Apache-2.0
3+
4+
package scraperhelper
5+
6+
import (
7+
"context"
8+
"errors"
9+
"testing"
10+
11+
"github.com/stretchr/testify/assert"
12+
"github.com/stretchr/testify/require"
13+
"go.opentelemetry.io/otel/attribute"
14+
"go.opentelemetry.io/otel/codes"
15+
"go.opentelemetry.io/otel/sdk/metric/metricdata"
16+
"go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest"
17+
sdktrace "go.opentelemetry.io/otel/sdk/trace"
18+
"go.opentelemetry.io/otel/sdk/trace/tracetest"
19+
20+
"go.opentelemetry.io/collector/component"
21+
"go.opentelemetry.io/collector/pdata/plog"
22+
"go.opentelemetry.io/collector/pdata/testdata"
23+
"go.opentelemetry.io/collector/scraper/scraperhelper/internal/metadatatest"
24+
)
25+
26+
func TestScrapeLogsDataOp(t *testing.T) {
27+
tt := metadatatest.SetupTelemetry()
28+
t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) })
29+
30+
tel := tt.NewTelemetrySettings()
31+
// TODO: Add capability for tracing testing in metadatatest.
32+
spanRecorder := new(tracetest.SpanRecorder)
33+
tel.TracerProvider = sdktrace.NewTracerProvider(sdktrace.WithSpanProcessor(spanRecorder))
34+
35+
parentCtx, parentSpan := tel.TracerProvider.Tracer("test").Start(context.Background(), t.Name())
36+
defer parentSpan.End()
37+
38+
params := []testParams{
39+
{items: 23, err: partialErrFake},
40+
{items: 29, err: errFake},
41+
{items: 15, err: nil},
42+
}
43+
for i := range params {
44+
sf, err := newObsLogs(func(context.Context) (plog.Logs, error) {
45+
return testdata.GenerateLogs(params[i].items), params[i].err
46+
}, receiverID, scraperID, tel)
47+
require.NoError(t, err)
48+
_, err = sf.ScrapeLogs(parentCtx)
49+
require.ErrorIs(t, err, params[i].err)
50+
}
51+
52+
spans := spanRecorder.Ended()
53+
require.Equal(t, len(params), len(spans))
54+
55+
var scrapedLogRecords, erroredLogRecords int
56+
for i, span := range spans {
57+
assert.Equal(t, "scraper/"+scraperID.String()+"/ScrapeLogs", span.Name())
58+
switch {
59+
case params[i].err == nil:
60+
scrapedLogRecords += params[i].items
61+
require.Contains(t, span.Attributes(), attribute.Int64(scrapedLogRecordsKey, int64(params[i].items)))
62+
require.Contains(t, span.Attributes(), attribute.Int64(erroredLogRecordsKey, 0))
63+
assert.Equal(t, codes.Unset, span.Status().Code)
64+
case errors.Is(params[i].err, errFake):
65+
// Since we get an error, we cannot record any metrics because we don't know if the returned plog.Logs is valid instance.
66+
require.Contains(t, span.Attributes(), attribute.Int64(scrapedLogRecordsKey, 0))
67+
require.Contains(t, span.Attributes(), attribute.Int64(erroredLogRecordsKey, 0))
68+
assert.Equal(t, codes.Error, span.Status().Code)
69+
assert.Equal(t, params[i].err.Error(), span.Status().Description)
70+
case errors.Is(params[i].err, partialErrFake):
71+
scrapedLogRecords += params[i].items
72+
erroredLogRecords += 2
73+
require.Contains(t, span.Attributes(), attribute.Int64(scrapedLogRecordsKey, int64(params[i].items)))
74+
require.Contains(t, span.Attributes(), attribute.Int64(erroredLogRecordsKey, 2))
75+
assert.Equal(t, codes.Error, span.Status().Code)
76+
assert.Equal(t, params[i].err.Error(), span.Status().Description)
77+
default:
78+
t.Fatalf("unexpected err param: %v", params[i].err)
79+
}
80+
}
81+
82+
checkScraperLogs(t, tt, receiverID, scraperID, int64(scrapedLogRecords), int64(erroredLogRecords))
83+
}
84+
85+
func TestCheckScraperLogs(t *testing.T) {
86+
tt := metadatatest.SetupTelemetry()
87+
t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) })
88+
89+
sf, err := newObsLogs(func(context.Context) (plog.Logs, error) {
90+
return testdata.GenerateLogs(7), nil
91+
}, receiverID, scraperID, tt.NewTelemetrySettings())
92+
require.NoError(t, err)
93+
_, err = sf.ScrapeLogs(context.Background())
94+
require.NoError(t, err)
95+
96+
checkScraperLogs(t, tt, receiverID, scraperID, 7, 0)
97+
}
98+
99+
func checkScraperLogs(t *testing.T, tt metadatatest.Telemetry, receiver component.ID, scraper component.ID, scrapedLogRecords int64, erroredLogRecords int64) {
100+
tt.AssertMetrics(t, []metricdata.Metrics{
101+
{
102+
Name: "otelcol_scraper_scraped_log_records",
103+
Description: "Number of log records successfully scraped. [alpha]",
104+
Unit: "{datapoints}",
105+
Data: metricdata.Sum[int64]{
106+
Temporality: metricdata.CumulativeTemporality,
107+
IsMonotonic: true,
108+
DataPoints: []metricdata.DataPoint[int64]{
109+
{
110+
Attributes: attribute.NewSet(
111+
attribute.String(receiverKey, receiver.String()),
112+
attribute.String(scraperKey, scraper.String())),
113+
Value: scrapedLogRecords,
114+
},
115+
},
116+
},
117+
},
118+
{
119+
Name: "otelcol_scraper_errored_log_records",
120+
Description: "Number of log records that were unable to be scraped. [alpha]",
121+
Unit: "{datapoints}",
122+
Data: metricdata.Sum[int64]{
123+
Temporality: metricdata.CumulativeTemporality,
124+
IsMonotonic: true,
125+
DataPoints: []metricdata.DataPoint[int64]{
126+
{
127+
Attributes: attribute.NewSet(
128+
attribute.String(receiverKey, receiver.String()),
129+
attribute.String(scraperKey, scraper.String())),
130+
Value: erroredLogRecords,
131+
},
132+
},
133+
},
134+
},
135+
}, metricdatatest.IgnoreTimestamp(), metricdatatest.IgnoreExemplars())
136+
}

0 commit comments

Comments
 (0)