Skip to content

Commit 470c0dd

Browse files
authored
Add scraper for logs (#11799)
<!--Ex. Fixing a bug - Describe the bug and how this fixes the issue. Ex. Adding a feature - Explain what this achieves.--> #### Description Add scraper for logs <!-- Issue number if applicable --> #### Link to tracking issue Relates to #11238 <!--Describe what testing was performed and which tests were added.--> #### Testing Added unit tests <!--Describe the documentation added.--> #### Documentation Added <!--Please delete paragraphs that you did not use before submitting.-->
1 parent d5ddbf7 commit 470c0dd

File tree

7 files changed

+154
-6
lines changed

7 files changed

+154
-6
lines changed

.chloggen/add-scraper-for-logs.yaml

+25
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
# Use this changelog template to create an entry for release notes.
2+
3+
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
4+
change_type: enhancement
5+
6+
# The name of the component, or a single word describing the area of concern, (e.g. otlpreceiver)
7+
component: receiver/scraperhelper
8+
9+
# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
10+
note: Add scraper for logs
11+
12+
# One or more tracking issues or pull requests related to the change
13+
issues: [11238]
14+
15+
# (Optional) One or more lines of additional information to render under the primary note.
16+
# These lines will be padded with 2 spaces and then inserted directly into the document.
17+
# Use pipe (|) for multiline entries.
18+
subtext:
19+
20+
# Optional: The change log or logs in which this entry should be included.
21+
# e.g. '[user]' or '[user, api]'
22+
# Include 'user' if the change is relevant to end users.
23+
# Include 'api' if there is a change to a library API.
24+
# Default: '[user]'
25+
change_logs: [api]

scraper/README.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ A scraper defines how to connect and scrape telemetry data from an external sour
55
<!-- status autogenerated section -->
66
| Status | |
77
| ------------- |-----------|
8-
| Stability | [development]: metrics |
8+
| Stability | [development]: metrics, logs |
99
| Issues | [![Open issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector?query=is%3Aissue%20is%3Aopen%20label%3Apkg%2F%20&label=open&color=orange&logo=opentelemetry)](https://github.com/open-telemetry/opentelemetry-collector/issues?q=is%3Aopen+is%3Aissue+label%3Apkg%2F) [![Closed issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector?query=is%3Aissue%20is%3Aclosed%20label%3Apkg%2F%20&label=closed&color=blue&logo=opentelemetry)](https://github.com/open-telemetry/opentelemetry-collector/issues?q=is%3Aclosed+is%3Aissue+label%3Apkg%2F) |
1010

1111
[development]: https://github.com/open-telemetry/opentelemetry-collector/blob/main/docs/component-stability.md#development
12-
<!-- end autogenerated section -->
12+
<!-- end autogenerated section -->

scraper/logs.go

+44
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
// Copyright The OpenTelemetry Authors
2+
// SPDX-License-Identifier: Apache-2.0
3+
4+
package scraper // import "go.opentelemetry.io/collector/scraper"
5+
6+
import (
7+
"context"
8+
9+
"go.opentelemetry.io/collector/component"
10+
"go.opentelemetry.io/collector/pdata/plog"
11+
)
12+
13+
// Logs is the base interface for logs scrapers.
14+
type Logs interface {
15+
component.Component
16+
17+
// ScrapeLogs is the base interface to indicate that how should logs be scraped.
18+
ScrapeLogs(context.Context) (plog.Logs, error)
19+
}
20+
21+
// ScrapeLogsFunc is a helper function that is similar to Logs.ScrapeLogs.
22+
type ScrapeLogsFunc ScrapeFunc[plog.Logs]
23+
24+
func (sf ScrapeLogsFunc) ScrapeLogs(ctx context.Context) (plog.Logs, error) {
25+
return sf(ctx)
26+
}
27+
28+
type logs struct {
29+
baseScraper
30+
ScrapeLogsFunc
31+
}
32+
33+
// NewLogs creates a new Logs scraper.
34+
func NewLogs(scrape ScrapeLogsFunc, options ...Option) (Logs, error) {
35+
if scrape == nil {
36+
return nil, errNilFunc
37+
}
38+
bs := &logs{
39+
baseScraper: newBaseScraper(options),
40+
ScrapeLogsFunc: scrape,
41+
}
42+
43+
return bs, nil
44+
}

scraper/logs_test.go

+79
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
// Copyright The OpenTelemetry Authors
2+
// SPDX-License-Identifier: Apache-2.0
3+
4+
package scraper
5+
6+
import (
7+
"context"
8+
"errors"
9+
"sync"
10+
"testing"
11+
12+
"github.com/stretchr/testify/assert"
13+
"github.com/stretchr/testify/require"
14+
15+
"go.opentelemetry.io/collector/component"
16+
"go.opentelemetry.io/collector/component/componenttest"
17+
"go.opentelemetry.io/collector/pdata/plog"
18+
)
19+
20+
func TestNewLogs(t *testing.T) {
21+
mp, err := NewLogs(newTestScrapeLogsFunc(nil))
22+
require.NoError(t, err)
23+
24+
require.NoError(t, mp.Start(context.Background(), componenttest.NewNopHost()))
25+
md, err := mp.ScrapeLogs(context.Background())
26+
require.NoError(t, err)
27+
assert.Equal(t, plog.NewLogs(), md)
28+
require.NoError(t, mp.Shutdown(context.Background()))
29+
}
30+
31+
func TestNewLogs_WithOptions(t *testing.T) {
32+
want := errors.New("my_error")
33+
mp, err := NewLogs(newTestScrapeLogsFunc(nil),
34+
WithStart(func(context.Context, component.Host) error { return want }),
35+
WithShutdown(func(context.Context) error { return want }))
36+
require.NoError(t, err)
37+
38+
assert.Equal(t, want, mp.Start(context.Background(), componenttest.NewNopHost()))
39+
assert.Equal(t, want, mp.Shutdown(context.Background()))
40+
}
41+
42+
func TestNewLogs_NilRequiredFields(t *testing.T) {
43+
_, err := NewLogs(nil)
44+
require.Error(t, err)
45+
}
46+
47+
func TestNewLogs_ProcessLogsError(t *testing.T) {
48+
want := errors.New("my_error")
49+
mp, err := NewLogs(newTestScrapeLogsFunc(want))
50+
require.NoError(t, err)
51+
_, err = mp.ScrapeLogs(context.Background())
52+
require.ErrorIs(t, err, want)
53+
}
54+
55+
func TestLogsConcurrency(t *testing.T) {
56+
mp, err := NewLogs(newTestScrapeLogsFunc(nil))
57+
require.NoError(t, err)
58+
require.NoError(t, mp.Start(context.Background(), componenttest.NewNopHost()))
59+
60+
var wg sync.WaitGroup
61+
for i := 0; i < 10; i++ {
62+
wg.Add(1)
63+
go func() {
64+
defer wg.Done()
65+
for j := 0; j < 10000; j++ {
66+
_, errScrape := mp.ScrapeLogs(context.Background())
67+
assert.NoError(t, errScrape)
68+
}
69+
}()
70+
}
71+
wg.Wait()
72+
require.NoError(t, mp.Shutdown(context.Background()))
73+
}
74+
75+
func newTestScrapeLogsFunc(retError error) ScrapeLogsFunc {
76+
return func(_ context.Context) (plog.Logs, error) {
77+
return plog.NewLogs(), retError
78+
}
79+
}

scraper/metadata.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,4 @@ github_project: open-telemetry/opentelemetry-collector
44
status:
55
class: pkg
66
stability:
7-
development: [metrics]
7+
development: [metrics, logs]

scraper/scraper.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ import (
1212

1313
var errNilFunc = errors.New("nil scrape func")
1414

15-
// ScrapeFunc scrapes metrics.
15+
// ScrapeFunc scrapes data.
1616
type ScrapeFunc[T any] func(context.Context) (T, error)
1717

1818
// Option apply changes to internal options.

scraper/scrapererror/partialscrapeerror.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,13 @@ package scrapererror // import "go.opentelemetry.io/collector/scraper/scrapererr
66
import "errors"
77

88
// PartialScrapeError is an error to represent
9-
// that a subset of metrics were failed to be scraped.
9+
// that a subset of data were failed to be scraped.
1010
type PartialScrapeError struct {
1111
error
1212
Failed int
1313
}
1414

15-
// NewPartialScrapeError creates PartialScrapeError for failed metrics.
15+
// NewPartialScrapeError creates PartialScrapeError for failed data.
1616
// Use this error type only when a subset of data was failed to be scraped.
1717
func NewPartialScrapeError(err error, failed int) PartialScrapeError {
1818
return PartialScrapeError{

0 commit comments

Comments
 (0)