Skip to content

Commit

Permalink
Merge branch 'main' into switch-to-ibm-sarama
Browse files Browse the repository at this point in the history
  • Loading branch information
hainenber authored Sep 13, 2023
2 parents 93f02fd + 2126955 commit 7910ca8
Show file tree
Hide file tree
Showing 27 changed files with 411 additions and 53 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ Main (unreleased)

- Flow: improve river config validation step in `prometheus.scrape` by comparing `scrape_timeout` with `scrape_interval`. (@wildum)


### Other changes

- Use Go 1.21.1 for builds. (@rfratto)
Expand Down
File renamed without changes.
10 changes: 9 additions & 1 deletion converter/internal/common/validate.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,17 @@ import (
)

func UnsupportedNotDeepEquals(a any, b any, name string) diag.Diagnostics {
return UnsupportedNotDeepEqualsMessage(a, b, name, "")
}

func UnsupportedNotDeepEqualsMessage(a any, b any, name string, message string) diag.Diagnostics {
var diags diag.Diagnostics
if !reflect.DeepEqual(a, b) {
diags.Add(diag.SeverityLevelError, fmt.Sprintf("unsupported %s config was provided.", name))
if message != "" {
diags.Add(diag.SeverityLevelError, fmt.Sprintf("unsupported %s config was provided: %s", name, message))
} else {
diags.Add(diag.SeverityLevelError, fmt.Sprintf("unsupported %s config was provided.", name))
}
}

return diags
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@ import (
)

func TestConvert(t *testing.T) {
test_common.TestDirectory(t, "testdata", ".yaml", prometheusconvert.Convert)
test_common.TestDirectory(t, "testdata", ".yaml", true, prometheusconvert.Convert)
}
2 changes: 1 addition & 1 deletion converter/internal/promtailconvert/promtailconvert_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@ import (
)

func TestConvert(t *testing.T) {
test_common.TestDirectory(t, "testdata", ".yaml", promtailconvert.Convert)
test_common.TestDirectory(t, "testdata", ".yaml", true, promtailconvert.Convert)
}
51 changes: 32 additions & 19 deletions converter/internal/staticconvert/internal/build/builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,28 @@ func NewIntegrationsV1ConfigBuilder(f *builder.File, diags *diag.Diagnostics, cf
}
}

func (b *IntegrationsV1ConfigBuilder) AppendIntegrations() {
func (b *IntegrationsV1ConfigBuilder) Build() {
b.appendLogging(b.cfg.Server)
b.appendServer(b.cfg.Server)
b.appendIntegrations()
}

func (b *IntegrationsV1ConfigBuilder) appendIntegrations() {
for _, integration := range b.cfg.Integrations.ConfigV1.Integrations {
if !integration.Common.Enabled {
continue
}

scrapeIntegration := b.cfg.Integrations.ConfigV1.ScrapeIntegrations
if integration.Common.ScrapeIntegration != nil {
scrapeIntegration = *integration.Common.ScrapeIntegration
}

if !scrapeIntegration {
b.diags.Add(diag.SeverityLevelError, fmt.Sprintf("unsupported integration which is not being scraped was provided: %s.", integration.Name()))
continue
}

var exports discovery.Exports
switch itg := integration.Config.(type) {
case *apache_http.Config:
Expand Down Expand Up @@ -116,27 +132,24 @@ func (b *IntegrationsV1ConfigBuilder) AppendIntegrations() {
}

func (b *IntegrationsV1ConfigBuilder) appendExporter(commonConfig *int_config.Common, name string, extraTargets []discovery.Target) {
scrapeConfigs := []*prom_config.ScrapeConfig{}
if b.cfg.Integrations.ConfigV1.ScrapeIntegrations {
scrapeConfig := prom_config.DefaultScrapeConfig
scrapeConfig.JobName = fmt.Sprintf("integrations/%s", name)
scrapeConfig.RelabelConfigs = commonConfig.RelabelConfigs
scrapeConfig.MetricRelabelConfigs = commonConfig.MetricRelabelConfigs
// TODO: Add support for scrapeConfig.HTTPClientConfig

scrapeConfig.ScrapeInterval = model.Duration(commonConfig.ScrapeInterval)
if commonConfig.ScrapeInterval == 0 {
scrapeConfig.ScrapeInterval = b.cfg.Integrations.ConfigV1.PrometheusGlobalConfig.ScrapeInterval
}

scrapeConfig.ScrapeTimeout = model.Duration(commonConfig.ScrapeTimeout)
if commonConfig.ScrapeTimeout == 0 {
scrapeConfig.ScrapeTimeout = b.cfg.Integrations.ConfigV1.PrometheusGlobalConfig.ScrapeTimeout
}
scrapeConfig := prom_config.DefaultScrapeConfig
scrapeConfig.JobName = fmt.Sprintf("integrations/%s", name)
scrapeConfig.RelabelConfigs = commonConfig.RelabelConfigs
scrapeConfig.MetricRelabelConfigs = commonConfig.MetricRelabelConfigs
scrapeConfig.HTTPClientConfig.TLSConfig = b.cfg.Integrations.ConfigV1.TLSConfig

scrapeConfig.ScrapeInterval = model.Duration(commonConfig.ScrapeInterval)
if commonConfig.ScrapeInterval == 0 {
scrapeConfig.ScrapeInterval = b.cfg.Integrations.ConfigV1.PrometheusGlobalConfig.ScrapeInterval
}

scrapeConfigs = []*prom_config.ScrapeConfig{&scrapeConfig}
scrapeConfig.ScrapeTimeout = model.Duration(commonConfig.ScrapeTimeout)
if commonConfig.ScrapeTimeout == 0 {
scrapeConfig.ScrapeTimeout = b.cfg.Integrations.ConfigV1.PrometheusGlobalConfig.ScrapeTimeout
}

scrapeConfigs := []*prom_config.ScrapeConfig{&scrapeConfig}

promConfig := &prom_config.Config{
GlobalConfig: b.cfg.Integrations.ConfigV1.PrometheusGlobalConfig,
ScrapeConfigs: scrapeConfigs,
Expand Down
27 changes: 27 additions & 0 deletions converter/internal/staticconvert/internal/build/logging.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
package build

import (
"reflect"

"github.com/grafana/agent/converter/internal/common"
"github.com/grafana/agent/pkg/flow/logging"
"github.com/grafana/agent/pkg/server"
)

func (b *IntegrationsV1ConfigBuilder) appendLogging(config *server.Config) {
args := toLogging(config)
if !reflect.DeepEqual(*args, logging.DefaultOptions) {
b.f.Body().AppendBlock(common.NewBlockWithOverride(
[]string{"logging"},
"",
args,
))
}
}

func toLogging(config *server.Config) *logging.Options {
return &logging.Options{
Level: logging.Level(config.LogLevel.String()),
Format: logging.Format(config.LogFormat.String()),
}
}
61 changes: 61 additions & 0 deletions converter/internal/staticconvert/internal/build/server.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
package build

import (
"reflect"

"github.com/grafana/agent/converter/internal/common"
"github.com/grafana/agent/pkg/server"
"github.com/grafana/agent/service/http"
)

func (b *IntegrationsV1ConfigBuilder) appendServer(config *server.Config) {
args := toServer(config)
if !reflect.DeepEqual(*args.TLS, http.TLSArguments{}) {
b.f.Body().AppendBlock(common.NewBlockWithOverride(
[]string{"http"},
"",
args,
))
}
}

func toServer(config *server.Config) *http.Arguments {
authType, err := server.GetClientAuthFromString(config.HTTP.TLSConfig.ClientAuth)
if err != nil {
panic(err)
}

return &http.Arguments{
TLS: &http.TLSArguments{
Cert: "",
CertFile: config.HTTP.TLSConfig.TLSCertPath,
Key: "",
KeyFile: config.HTTP.TLSConfig.TLSKeyPath,
ClientCA: "",
ClientCAFile: config.HTTP.TLSConfig.ClientCAs,
ClientAuth: http.ClientAuth(authType),
CipherSuites: toHTTPTLSCipher(config.HTTP.TLSConfig.CipherSuites),
CurvePreferences: toHTTPTLSCurve(config.HTTP.TLSConfig.CurvePreferences),
MinVersion: http.TLSVersion(config.HTTP.TLSConfig.MinVersion),
MaxVersion: http.TLSVersion(config.HTTP.TLSConfig.MaxVersion),
},
}
}

func toHTTPTLSCipher(cipherSuites []server.TLSCipher) []http.TLSCipher {
var result []http.TLSCipher
for _, cipcipherSuite := range cipherSuites {
result = append(result, http.TLSCipher(cipcipherSuite))
}

return result
}

func toHTTPTLSCurve(curvePreferences []server.TLSCurve) []http.TLSCurve {
var result []http.TLSCurve
for _, curvePreference := range curvePreferences {
result = append(result, http.TLSCurve(curvePreference))
}

return result
}
4 changes: 2 additions & 2 deletions converter/internal/staticconvert/staticconvert.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ func AppendAll(f *builder.File, staticConfig *config.Config) diag.Diagnostics {
diags.AddAll(appendStaticPrometheus(f, staticConfig))
diags.AddAll(appendStaticPromtail(f, staticConfig))
diags.AddAll(appendStaticIntegrationsV1(f, staticConfig))
// TODO integrations v2
// TODO otel
// TODO other

diags.AddAll(validate(staticConfig))

Expand Down Expand Up @@ -158,7 +158,7 @@ func appendStaticIntegrationsV1(f *builder.File, staticConfig *config.Config) di
var diags diag.Diagnostics

b := build.NewIntegrationsV1ConfigBuilder(f, &diags, staticConfig, &build.GlobalContext{LabelPrefix: "integrations"})
b.AppendIntegrations()
b.Build()

return diags
}
7 changes: 5 additions & 2 deletions converter/internal/staticconvert/staticconvert_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,12 @@ import (
)

func TestConvert(t *testing.T) {
test_common.TestDirectory(t, "testdata", ".yaml", staticconvert.Convert)
test_common.TestDirectory(t, "testdata", ".yaml", true, staticconvert.Convert)

// This test has a race condition due to downstream code so skip loading the config
test_common.TestDirectory(t, "testdata-race", ".yaml", false, staticconvert.Convert)

if runtime.GOOS == "windows" {
test_common.TestDirectory(t, "testdata_windows", ".yaml", staticconvert.Convert)
test_common.TestDirectory(t, "testdata_windows", ".yaml", true, staticconvert.Convert)
}
}
18 changes: 18 additions & 0 deletions converter/internal/staticconvert/testdata-race/example-cert.pem
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
-----BEGIN CERTIFICATE-----
MIIC6jCCAdICCQCOLEZvJLYQlDANBgkqhkiG9w0BAQsFADA3MQswCQYDVQQGEwJV
UzELMAkGA1UECAwCRkwxDDAKBgNVBAoMA09yZzENMAsGA1UEAwwEcm9vdDAeFw0y
MjAzMDkxNjM1NTRaFw0zMjAzMDYxNjM1NTRaMDcxCzAJBgNVBAYTAlVTMQswCQYD
VQQIDAJGTDEMMAoGA1UECgwDT3JnMQ0wCwYDVQQDDARyb290MIIBIjANBgkqhkiG
9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx63pDVP4z4psrU6i5qOCUSjUGFkGRUekdrJ9
FtkOEyoQSl2hpkF+QAGvM2L3+bqH8Y1CZ7yakkCncSmzpXShVg2D2nxHkwYVGhmz
rzwHttmewokrWtw72ta6v9gxljxNLjz+HsYovKFGbudnOcK3BxseluikrOM08fEi
SF7Y1FJkyr103K7yjtRyNH2tKHGiK73wjkLBkd6WWFIrtMbNP0McXqkipOSg9dwY
OKfuVDzD/fCkW24j2pgHAI+4TQWC6PSIGMVZ76I5hhYd0WLi/8KaBu/gfqmDjnBn
qqJONoAxT5kEmXWwE5jO0ZOWx88S2D9wmBNIx8HtMLh+7pVQ7QIDAQABMA0GCSqG
SIb3DQEBCwUAA4IBAQBM85fNb+7b+3q0+uDw/fgrUkYfAVjJX+uN3ONy50qnKWe7
SAqLC76HVHLa9hdT7OENQurCCrEtnV1Fzg0KNqtE8gW5rPrV44FZrC5YnpqrHoKp
VZeff+Mficioif5KkaELZILgduwYXe/H9r6qg87mHU4zpFlDUnUFCfLDtrO4fc79
BEpoUXLf5tCwRLUv/d0eeksMqUf5ES4tWfzUVLCjSEEcuX0GIgWdcyG3thCauPWC
9a/QEXqqDC46AgsvkHCNWRoC8TSob5usTJDflodHoree6eaWx6j8ZGA/Uc0ohalJ
XYGN7R9ge9KeqmwvYI6hr/n+WM92Jeqnz9BVWaiQ
-----END CERTIFICATE-----
27 changes: 27 additions & 0 deletions converter/internal/staticconvert/testdata-race/example-key.pem
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEpQIBAAKCAQEAx63pDVP4z4psrU6i5qOCUSjUGFkGRUekdrJ9FtkOEyoQSl2h
pkF+QAGvM2L3+bqH8Y1CZ7yakkCncSmzpXShVg2D2nxHkwYVGhmzrzwHttmewokr
Wtw72ta6v9gxljxNLjz+HsYovKFGbudnOcK3BxseluikrOM08fEiSF7Y1FJkyr10
3K7yjtRyNH2tKHGiK73wjkLBkd6WWFIrtMbNP0McXqkipOSg9dwYOKfuVDzD/fCk
W24j2pgHAI+4TQWC6PSIGMVZ76I5hhYd0WLi/8KaBu/gfqmDjnBnqqJONoAxT5kE
mXWwE5jO0ZOWx88S2D9wmBNIx8HtMLh+7pVQ7QIDAQABAoIBADh7XxLgD99U/oy/
U6D921ztuaDxfa6XJ1RUBMIzv6F4IoeGmLUYjYe5cj+M3SwMsWuIU6JYXTjFhRej
fidtKD3ZMNTalrxl2g45+vO0fVIhmKDagCMBbQTn/IdLtisS/5n2ssMttlQ1ImE4
n6BdDby61RpG0F3/HvjZBqOGALt92qaE8xmUKa8K7SVNnS7BSE+m9tn0pxJsvxCu
3WALdAELECLLKB2bpW5u+v5niBT7Min2Oi1uJbd5SWyWqGmiX8MQ+yXPjAmQxd5D
6L9okqOB6vkfgkuVCAc2d73NI3BE7HJqcE5PboY+ZVTcFdBGYMhvjLeXnUlMZREZ
B7TcT4ECgYEA9QNIoozXsRwpCQGDLm0a6ZGc1NjNUtd0udOqexTSPkdhvR0sNJep
3mjaWCBwipLTmBKs5gv+0i9V6S28r6Pq93EoJVToDPPLq+7UYMi/7vmshNWrMTBD
N/mWF92d7gSC8cgXSnZwAz40QwIZYU6OXJL5s1YN6r/1vLRoPsbkgVECgYEA0KI0
Ms4f9XqrrzzT9byaUUtXrSMyFVag995q5lvV5pipwkWOyWscD5tHt5GfOu15F4Ut
+k2pqXmO1FveUO9wMxFEP8LOKuoKUZ2jzJ7IUiz3TwMcQjlV7C6n5NtIsBrlElqW
C2/HYgSw+T87T63WK8467KLgQ09yEFEIg1p7Tt0CgYEAgEqz4cl1t1tTcU/FbK3c
hailQh4zhMkkaZkXj1Mbs1iVKPz5hKBVZgvpKHPz+dtfyCUfO2XUjCIVDf/Q6Pcf
tWke6E1JJF8Tqndn5TW4ql05pGRtO1hWGh0qJlz4sQTTu95Vs7vIcypDG0MiHv2P
NZIQBYNtzhmthp3AZ/6k78ECgYEAty6T8j+1I84PTA92c36jZ9llI+mRIdcsAjZR
We0sRAmqk56LHiJjQvit4WmEizLSbWpL0ke6PckzNRVsf1ecBdqVN/6NEnTnln14
wkJv1GcSxVcPyr2YyYS1eWVnzufuVU0gDO6Z+1/vGwj/xJf3QgMTDY58pdztY5Ii
jWI2fikCgYEAmGEmcPOu8IjYNN+YdQ1CeF909oSH++Nqr34IB5/e2Wr9WVknfHzZ
wIfzlUJUQO0so0LDaB7UQKk0Xk3+OP6Udw8xFfr/P5s++bvnKr3j5iHn6taqPs+v
PFxn+7KqdYVQ4RYRYLsy6NF+MhXt2sDAhiScxVnkh09t6sT1UG9xKW4=
-----END RSA PRIVATE KEY-----
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
(Error) global positions configuration is not supported - each Flow Mode's loki.source.file component has its own positions file in the component's data directory
(Warning) server.log_level is not supported - Flow mode components may produce different logs
(Error) unsupported integration which is not being scraped was provided: mssql.
(Error) mapping_config is not supported in statsd_exporter integrations config
(Warning) Please review your agent command line flags and ensure they are set in your Flow mode config file where necessary.
(Error) unsupported log_level server config was provided.
(Error) unsupported log_format server config was provided.
(Error) unsupported grpc_tls_config server config was provided.
(Error) unsupported http_tls_config server config was provided.
(Error) unsupported grpc_tls_config server config was provided: flow mode does not have a gRPC server to configure.
(Error) unsupported prefer_server_cipher_suites server config was provided.
(Error) unsupported windows_certificate_filter server config was provided.
(Error) unsupported wal_directory metrics config was provided. use the run command flag --storage.path for Flow mode instead.
(Error) unsupported integration agent was provided.
(Error) unsupported integration azure_exporter was provided.
(Error) unsupported integration cadvisor was provided.
(Error) unsupported disabled integration node_exporter.
(Warning) disabled integrations do nothing and are not included in the output: node_exporter.
(Error) unsupported traces config was provided.
(Error) unsupported agent_management config was provided.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,20 @@ prometheus.remote_write "metrics_agent" {
}
}

logging {
level = "debug"
format = "json"
}

http {
tls {
cert_file = "./testdata/example-cert.pem"
key_file = "./testdata/example-key.pem"
client_ca_file = "./testdata/example-cert.pem"
client_auth_type = "VerifyClientCertIfGiven"
}
}

prometheus.exporter.statsd "integrations_statsd_exporter" { }

prometheus.scrape "integrations_statsd_exporter" {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,18 @@ server:
log_level: debug
log_format: json
http_tls_config:
cert_file: "/something.cert"
client_ca_file: "./testdata/example-cert.pem"
cert_file: "./testdata/example-cert.pem"
key_file: "./testdata/example-key.pem"
client_auth_type: "VerifyClientCertIfGiven"
prefer_server_cipher_suites: true
windows_certificate_filter:
server:
store: "something"
grpc_tls_config:
cert_file: "/something2.cert"
client_ca_file: "/something4.cert"
cert_file: "/something5.cert"
key_file: "/something6.cert"

metrics:
wal_directory: /tmp/agent
Expand Down Expand Up @@ -45,6 +54,9 @@ integrations:
- nodepool
cadvisor:
enabled: true
mssql:
enabled: true
scrape_integration: false
node_exporter:
enabled: false
statsd_exporter:
Expand Down
Loading

0 comments on commit 7910ca8

Please sign in to comment.