Skip to content

Commit

Permalink
[chore] Remove use of deprecated scraperhelper structs/funcs (#31816)
Browse files Browse the repository at this point in the history
  • Loading branch information
TylerHelmuth committed Mar 19, 2024
1 parent fbcf535 commit e957541
Show file tree
Hide file tree
Showing 173 changed files with 697 additions and 688 deletions.
27 changes: 27 additions & 0 deletions .chloggen/remove-deprecated-ScraperControllerSettings.yaml
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: breaking

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: general

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Update any component using `scraperhelper.ScraperControllerSettings` to use `scraperhelper.ControllerConfig`

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [31816]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext: This changes the config field name from `ScraperControllerSettings` to `ControllerConfig`

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: [api]
12 changes: 6 additions & 6 deletions internal/sqlquery/config.go
Expand Up @@ -12,12 +12,12 @@ import (
)

type Config struct {
scraperhelper.ScraperControllerSettings `mapstructure:",squash"`
Driver string `mapstructure:"driver"`
DataSource string `mapstructure:"datasource"`
Queries []Query `mapstructure:"queries"`
StorageID *component.ID `mapstructure:"storage"`
Telemetry TelemetryConfig `mapstructure:"telemetry"`
scraperhelper.ControllerConfig `mapstructure:",squash"`
Driver string `mapstructure:"driver"`
DataSource string `mapstructure:"datasource"`
Queries []Query `mapstructure:"queries"`
StorageID *component.ID `mapstructure:"storage"`
Telemetry TelemetryConfig `mapstructure:"telemetry"`
}

func (c Config) Validate() error {
Expand Down
4 changes: 2 additions & 2 deletions internal/sqlquery/metrics.go
Expand Up @@ -12,7 +12,7 @@ import (
"go.opentelemetry.io/collector/receiver/scraperhelper"
)

func rowToMetric(row StringMap, cfg MetricCfg, dest pmetric.Metric, startTime pcommon.Timestamp, ts pcommon.Timestamp, scrapeCfg scraperhelper.ScraperControllerSettings) error {
func rowToMetric(row StringMap, cfg MetricCfg, dest pmetric.Metric, startTime pcommon.Timestamp, ts pcommon.Timestamp, scrapeCfg scraperhelper.ControllerConfig) error {
dest.SetName(cfg.MetricName)
dest.SetDescription(cfg.Description)
dest.SetUnit(cfg.Unit)
Expand Down Expand Up @@ -63,7 +63,7 @@ func rowToMetric(row StringMap, cfg MetricCfg, dest pmetric.Metric, startTime pc
return nil
}

func setTimestamp(cfg MetricCfg, dp pmetric.NumberDataPoint, startTime pcommon.Timestamp, ts pcommon.Timestamp, scrapeCfg scraperhelper.ScraperControllerSettings) {
func setTimestamp(cfg MetricCfg, dp pmetric.NumberDataPoint, startTime pcommon.Timestamp, ts pcommon.Timestamp, scrapeCfg scraperhelper.ControllerConfig) {
dp.SetTimestamp(ts)

// Cumulative sum should have a start time set to the beginning of the data points cumulation
Expand Down
4 changes: 2 additions & 2 deletions internal/sqlquery/scraper.go
Expand Up @@ -27,7 +27,7 @@ type ClientProviderFunc func(Db, string, *zap.Logger, TelemetryConfig) DbClient
type Scraper struct {
id component.ID
Query Query
ScrapeCfg scraperhelper.ScraperControllerSettings
ScrapeCfg scraperhelper.ControllerConfig
StartTime pcommon.Timestamp
ClientProviderFunc ClientProviderFunc
DbProviderFunc DbProviderFunc
Expand All @@ -39,7 +39,7 @@ type Scraper struct {

var _ scraperhelper.Scraper = (*Scraper)(nil)

func NewScraper(id component.ID, query Query, scrapeCfg scraperhelper.ScraperControllerSettings, logger *zap.Logger, telemetry TelemetryConfig, dbProviderFunc DbProviderFunc, clientProviderFunc ClientProviderFunc) *Scraper {
func NewScraper(id component.ID, query Query, scrapeCfg scraperhelper.ControllerConfig, logger *zap.Logger, telemetry TelemetryConfig, dbProviderFunc DbProviderFunc, clientProviderFunc ClientProviderFunc) *Scraper {
return &Scraper{
id: id,
Query: query,
Expand Down
4 changes: 2 additions & 2 deletions receiver/activedirectorydsreceiver/config.go
Expand Up @@ -10,6 +10,6 @@ import (
)

type Config struct {
scraperhelper.ScraperControllerSettings `mapstructure:",squash"`
metadata.MetricsBuilderConfig `mapstructure:",squash"`
scraperhelper.ControllerConfig `mapstructure:",squash"`
metadata.MetricsBuilderConfig `mapstructure:",squash"`
}
2 changes: 1 addition & 1 deletion receiver/activedirectorydsreceiver/config_test.go
Expand Up @@ -38,7 +38,7 @@ func TestLoadConfig(t *testing.T) {
{
id: component.NewIDWithName(metadata.Type, ""),
expected: &Config{
ScraperControllerSettings: scraperhelper.ScraperControllerSettings{
ControllerConfig: scraperhelper.ControllerConfig{
CollectionInterval: 2 * time.Minute,
InitialDelay: time.Second,
},
Expand Down
6 changes: 3 additions & 3 deletions receiver/activedirectorydsreceiver/factory.go
Expand Up @@ -26,10 +26,10 @@ func NewFactory() receiver.Factory {
}

func createDefaultConfig() component.Config {
cfg := scraperhelper.NewDefaultScraperControllerSettings(metadata.Type)
cfg := scraperhelper.NewDefaultControllerConfig()
cfg.CollectionInterval = defaultCollectionInterval
return &Config{
ScraperControllerSettings: cfg,
MetricsBuilderConfig: metadata.DefaultMetricsBuilderConfig(),
ControllerConfig: cfg,
MetricsBuilderConfig: metadata.DefaultMetricsBuilderConfig(),
}
}
2 changes: 1 addition & 1 deletion receiver/activedirectorydsreceiver/factory_windows.go
Expand Up @@ -43,7 +43,7 @@ func createMetricsReceiver(
}

return scraperhelper.NewScraperControllerReceiver(
&c.ScraperControllerSettings,
&c.ControllerConfig,
params,
consumer,
scraperhelper.AddScraper(scraper),
Expand Down
18 changes: 9 additions & 9 deletions receiver/aerospikereceiver/config.go
Expand Up @@ -31,15 +31,15 @@ var (

// Config is the receiver configuration
type Config struct {
scraperhelper.ScraperControllerSettings `mapstructure:",squash"`
Endpoint string `mapstructure:"endpoint"`
TLSName string `mapstructure:"tlsname"`
Username string `mapstructure:"username"`
Password configopaque.String `mapstructure:"password"`
CollectClusterMetrics bool `mapstructure:"collect_cluster_metrics"`
Timeout time.Duration `mapstructure:"timeout"`
MetricsBuilderConfig metadata.MetricsBuilderConfig `mapstructure:",squash"`
TLS *configtls.ClientConfig `mapstructure:"tls,omitempty"`
scraperhelper.ControllerConfig `mapstructure:",squash"`
Endpoint string `mapstructure:"endpoint"`
TLSName string `mapstructure:"tlsname"`
Username string `mapstructure:"username"`
Password configopaque.String `mapstructure:"password"`
CollectClusterMetrics bool `mapstructure:"collect_cluster_metrics"`
Timeout time.Duration `mapstructure:"timeout"`
MetricsBuilderConfig metadata.MetricsBuilderConfig `mapstructure:",squash"`
TLS *configtls.ClientConfig `mapstructure:"tls,omitempty"`
}

// Validate validates the values of the given Config, and returns an error if validation fails
Expand Down
48 changes: 24 additions & 24 deletions receiver/aerospikereceiver/config_test.go
Expand Up @@ -26,76 +26,76 @@ func TestValidate(t *testing.T) {
{
name: "blank endpoint",
config: &Config{
Endpoint: "",
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
Endpoint: "",
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errEmptyEndpoint,
},
{
name: "missing port",
config: &Config{
Endpoint: "localhost",
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
Endpoint: "localhost",
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errBadEndpoint,
},
{
name: "bad endpoint",
config: &Config{
Endpoint: "x;;ef;s;d:::ss:23423423423423423",
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
Endpoint: "x;;ef;s;d:::ss:23423423423423423",
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errBadEndpoint,
},
{
name: "missing host",
config: &Config{
Endpoint: ":3001",
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
Endpoint: ":3001",
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errBadEndpoint,
},
{
name: "negative port",
config: &Config{
Endpoint: "localhost:-2",
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
Endpoint: "localhost:-2",
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errBadPort,
},
{
name: "bad port",
config: &Config{
Endpoint: "localhost:9999999999999999999",
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
Endpoint: "localhost:9999999999999999999",
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errBadPort,
},
{
name: "negative timeout",
config: &Config{
Endpoint: "localhost:3000",
Timeout: -1 * time.Second,
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
Endpoint: "localhost:3000",
Timeout: -1 * time.Second,
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errNegativeTimeout,
},
{
name: "password but no username",
config: &Config{
Endpoint: "localhost:3000",
Username: "",
Password: "secret",
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
Endpoint: "localhost:3000",
Username: "",
Password: "secret",
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errEmptyUsername,
},
{
name: "username but no password",
config: &Config{
Endpoint: "localhost:3000",
Username: "ro_user",
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
Endpoint: "localhost:3000",
Username: "ro_user",
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errEmptyPassword,
},
Expand All @@ -110,7 +110,7 @@ func TestValidate(t *testing.T) {
CAFile: "BADCAFILE",
},
},
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errFailedTLSLoad,
},
Expand All @@ -123,7 +123,7 @@ func TestValidate(t *testing.T) {
Insecure: false,
TLSSetting: configtls.Config{},
},
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expected: errEmptyEndpointTLSName,
},
Expand Down
12 changes: 6 additions & 6 deletions receiver/aerospikereceiver/factory.go
Expand Up @@ -54,17 +54,17 @@ func createMetricsReceiver(
}

return scraperhelper.NewScraperControllerReceiver(
&cfg.ScraperControllerSettings, params, consumer,
&cfg.ControllerConfig, params, consumer,
scraperhelper.AddScraper(scraper),
)
}

func createDefaultConfig() component.Config {
return &Config{
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
Endpoint: defaultEndpoint,
Timeout: defaultTimeout,
CollectClusterMetrics: defaultCollectClusterMetrics,
MetricsBuilderConfig: metadata.DefaultMetricsBuilderConfig(),
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
Endpoint: defaultEndpoint,
Timeout: defaultTimeout,
CollectClusterMetrics: defaultCollectClusterMetrics,
MetricsBuilderConfig: metadata.DefaultMetricsBuilderConfig(),
}
}
2 changes: 1 addition & 1 deletion receiver/aerospikereceiver/integration_test.go
Expand Up @@ -57,7 +57,7 @@ func integrationTest(cfgMod func(*Config)) func(*testing.T) {
func(t *testing.T, cfg component.Config, ci *scraperinttest.ContainerInfo) {
rCfg := cfg.(*Config)
rCfg.Endpoint = fmt.Sprintf("%s:%s", ci.Host(t), ci.MappedPort(t, aerospikePort))
rCfg.ScraperControllerSettings.CollectionInterval = 100 * time.Millisecond
rCfg.ControllerConfig.CollectionInterval = 100 * time.Millisecond
cfgMod(rCfg)
}),
scraperinttest.WithCompareOptions(
Expand Down
6 changes: 3 additions & 3 deletions receiver/apachereceiver/config.go
Expand Up @@ -14,9 +14,9 @@ import (
)

type Config struct {
scraperhelper.ScraperControllerSettings `mapstructure:",squash"`
confighttp.ClientConfig `mapstructure:",squash"`
MetricsBuilderConfig metadata.MetricsBuilderConfig `mapstructure:",squash"`
scraperhelper.ControllerConfig `mapstructure:",squash"`
confighttp.ClientConfig `mapstructure:",squash"`
MetricsBuilderConfig metadata.MetricsBuilderConfig `mapstructure:",squash"`
}

var (
Expand Down
6 changes: 3 additions & 3 deletions receiver/apachereceiver/factory.go
Expand Up @@ -31,11 +31,11 @@ func NewFactory() receiver.Factory {
}

func createDefaultConfig() component.Config {
cfg := scraperhelper.NewDefaultScraperControllerSettings(metadata.Type)
cfg := scraperhelper.NewDefaultControllerConfig()
cfg.CollectionInterval = 10 * time.Second

return &Config{
ScraperControllerSettings: cfg,
ControllerConfig: cfg,
ClientConfig: confighttp.ClientConfig{
Endpoint: defaultEndpoint,
Timeout: 10 * time.Second,
Expand Down Expand Up @@ -80,7 +80,7 @@ func createMetricsReceiver(
}

return scraperhelper.NewScraperControllerReceiver(
&cfg.ScraperControllerSettings, params, consumer,
&cfg.ControllerConfig, params, consumer,
scraperhelper.AddScraper(scraper),
)
}
2 changes: 1 addition & 1 deletion receiver/apachereceiver/factory_test.go
Expand Up @@ -34,7 +34,7 @@ func TestCreateMetricsReceiver(t *testing.T) {
context.Background(),
receivertest.NewNopCreateSettings(),
&Config{
ScraperControllerSettings: scraperhelper.ScraperControllerSettings{
ControllerConfig: scraperhelper.ControllerConfig{
CollectionInterval: 10 * time.Second,
},
},
Expand Down
2 changes: 1 addition & 1 deletion receiver/apachereceiver/integration_test.go
Expand Up @@ -42,7 +42,7 @@ func TestIntegration(t *testing.T) {
scraperinttest.WithCustomConfig(
func(t *testing.T, cfg component.Config, ci *scraperinttest.ContainerInfo) {
rCfg := cfg.(*Config)
rCfg.ScraperControllerSettings.CollectionInterval = 100 * time.Millisecond
rCfg.ControllerConfig.CollectionInterval = 100 * time.Millisecond
rCfg.Endpoint = fmt.Sprintf("http://%s:%s/server-status?auto", ci.Host(t), ci.MappedPort(t, apachePort))
}),
scraperinttest.WithCompareOptions(
Expand Down
8 changes: 4 additions & 4 deletions receiver/apachesparkreceiver/config.go
Expand Up @@ -25,10 +25,10 @@ var (

// Config defines the configuration for the various elements of the receiver agent.
type Config struct {
scraperhelper.ScraperControllerSettings `mapstructure:",squash"`
metadata.MetricsBuilderConfig `mapstructure:",squash"`
confighttp.ClientConfig `mapstructure:",squash"`
ApplicationNames []string `mapstructure:"application_names"`
scraperhelper.ControllerConfig `mapstructure:",squash"`
metadata.MetricsBuilderConfig `mapstructure:",squash"`
confighttp.ClientConfig `mapstructure:",squash"`
ApplicationNames []string `mapstructure:"application_names"`
}

// Validate validates missing and invalid configuration fields.
Expand Down
6 changes: 2 additions & 4 deletions receiver/apachesparkreceiver/config_test.go
Expand Up @@ -9,8 +9,6 @@ import (
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/config/confighttp"
"go.opentelemetry.io/collector/receiver/scraperhelper"

"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/apachesparkreceiver/internal/metadata"
)

func TestValidate(t *testing.T) {
Expand All @@ -22,7 +20,7 @@ func TestValidate(t *testing.T) {
{
desc: "default config",
cfg: &Config{
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expectedErr: nil,
},
Expand All @@ -32,7 +30,7 @@ func TestValidate(t *testing.T) {
ClientConfig: confighttp.ClientConfig{
Endpoint: "invalid://endpoint 12efg",
},
ScraperControllerSettings: scraperhelper.NewDefaultScraperControllerSettings(metadata.Type),
ControllerConfig: scraperhelper.NewDefaultControllerConfig(),
},
expectedErr: errInvalidEndpoint,
},
Expand Down

0 comments on commit e957541

Please sign in to comment.