diff --git a/CHANGELOG.md b/CHANGELOG.md index aa09c98f9619..175c53cb175e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -61,6 +61,7 @@ Main (unreleased) - Flow: improve river config validation step in `prometheus.scrape` by comparing `scrape_timeout` with `scrape_interval`. (@wildum) + ### Other changes - Use Go 1.21.1 for builds. (@rfratto) diff --git a/converter/internal/common/common.go b/converter/internal/common/river_utils.go similarity index 100% rename from converter/internal/common/common.go rename to converter/internal/common/river_utils.go diff --git a/converter/internal/common/validate.go b/converter/internal/common/validate.go index 22890ff286b1..ca514a668eca 100644 --- a/converter/internal/common/validate.go +++ b/converter/internal/common/validate.go @@ -10,9 +10,17 @@ import ( ) func UnsupportedNotDeepEquals(a any, b any, name string) diag.Diagnostics { + return UnsupportedNotDeepEqualsMessage(a, b, name, "") +} + +func UnsupportedNotDeepEqualsMessage(a any, b any, name string, message string) diag.Diagnostics { var diags diag.Diagnostics if !reflect.DeepEqual(a, b) { - diags.Add(diag.SeverityLevelError, fmt.Sprintf("unsupported %s config was provided.", name)) + if message != "" { + diags.Add(diag.SeverityLevelError, fmt.Sprintf("unsupported %s config was provided: %s", name, message)) + } else { + diags.Add(diag.SeverityLevelError, fmt.Sprintf("unsupported %s config was provided.", name)) + } } return diags diff --git a/converter/internal/prometheusconvert/prometheusconvert_test.go b/converter/internal/prometheusconvert/prometheusconvert_test.go index 4f7f4e2422cb..bded9ce3069c 100644 --- a/converter/internal/prometheusconvert/prometheusconvert_test.go +++ b/converter/internal/prometheusconvert/prometheusconvert_test.go @@ -9,5 +9,5 @@ import ( ) func TestConvert(t *testing.T) { - test_common.TestDirectory(t, "testdata", ".yaml", prometheusconvert.Convert) + test_common.TestDirectory(t, "testdata", ".yaml", true, prometheusconvert.Convert) } diff --git a/converter/internal/promtailconvert/promtailconvert_test.go b/converter/internal/promtailconvert/promtailconvert_test.go index 136b9ba395c6..2ebeb3c1ada0 100644 --- a/converter/internal/promtailconvert/promtailconvert_test.go +++ b/converter/internal/promtailconvert/promtailconvert_test.go @@ -9,5 +9,5 @@ import ( ) func TestConvert(t *testing.T) { - test_common.TestDirectory(t, "testdata", ".yaml", promtailconvert.Convert) + test_common.TestDirectory(t, "testdata", ".yaml", true, promtailconvert.Convert) } diff --git a/converter/internal/staticconvert/internal/build/builder.go b/converter/internal/staticconvert/internal/build/builder.go index bfed002e53d0..a4d7ae3a3e1f 100644 --- a/converter/internal/staticconvert/internal/build/builder.go +++ b/converter/internal/staticconvert/internal/build/builder.go @@ -53,12 +53,28 @@ func NewIntegrationsV1ConfigBuilder(f *builder.File, diags *diag.Diagnostics, cf } } -func (b *IntegrationsV1ConfigBuilder) AppendIntegrations() { +func (b *IntegrationsV1ConfigBuilder) Build() { + b.appendLogging(b.cfg.Server) + b.appendServer(b.cfg.Server) + b.appendIntegrations() +} + +func (b *IntegrationsV1ConfigBuilder) appendIntegrations() { for _, integration := range b.cfg.Integrations.ConfigV1.Integrations { if !integration.Common.Enabled { continue } + scrapeIntegration := b.cfg.Integrations.ConfigV1.ScrapeIntegrations + if integration.Common.ScrapeIntegration != nil { + scrapeIntegration = *integration.Common.ScrapeIntegration + } + + if !scrapeIntegration { + b.diags.Add(diag.SeverityLevelError, fmt.Sprintf("unsupported integration which is not being scraped was provided: %s.", integration.Name())) + continue + } + var exports discovery.Exports switch itg := integration.Config.(type) { case *apache_http.Config: @@ -116,27 +132,24 @@ func (b *IntegrationsV1ConfigBuilder) AppendIntegrations() { } func (b *IntegrationsV1ConfigBuilder) appendExporter(commonConfig *int_config.Common, name string, extraTargets []discovery.Target) { - scrapeConfigs := []*prom_config.ScrapeConfig{} - if b.cfg.Integrations.ConfigV1.ScrapeIntegrations { - scrapeConfig := prom_config.DefaultScrapeConfig - scrapeConfig.JobName = fmt.Sprintf("integrations/%s", name) - scrapeConfig.RelabelConfigs = commonConfig.RelabelConfigs - scrapeConfig.MetricRelabelConfigs = commonConfig.MetricRelabelConfigs - // TODO: Add support for scrapeConfig.HTTPClientConfig - - scrapeConfig.ScrapeInterval = model.Duration(commonConfig.ScrapeInterval) - if commonConfig.ScrapeInterval == 0 { - scrapeConfig.ScrapeInterval = b.cfg.Integrations.ConfigV1.PrometheusGlobalConfig.ScrapeInterval - } - - scrapeConfig.ScrapeTimeout = model.Duration(commonConfig.ScrapeTimeout) - if commonConfig.ScrapeTimeout == 0 { - scrapeConfig.ScrapeTimeout = b.cfg.Integrations.ConfigV1.PrometheusGlobalConfig.ScrapeTimeout - } + scrapeConfig := prom_config.DefaultScrapeConfig + scrapeConfig.JobName = fmt.Sprintf("integrations/%s", name) + scrapeConfig.RelabelConfigs = commonConfig.RelabelConfigs + scrapeConfig.MetricRelabelConfigs = commonConfig.MetricRelabelConfigs + scrapeConfig.HTTPClientConfig.TLSConfig = b.cfg.Integrations.ConfigV1.TLSConfig + + scrapeConfig.ScrapeInterval = model.Duration(commonConfig.ScrapeInterval) + if commonConfig.ScrapeInterval == 0 { + scrapeConfig.ScrapeInterval = b.cfg.Integrations.ConfigV1.PrometheusGlobalConfig.ScrapeInterval + } - scrapeConfigs = []*prom_config.ScrapeConfig{&scrapeConfig} + scrapeConfig.ScrapeTimeout = model.Duration(commonConfig.ScrapeTimeout) + if commonConfig.ScrapeTimeout == 0 { + scrapeConfig.ScrapeTimeout = b.cfg.Integrations.ConfigV1.PrometheusGlobalConfig.ScrapeTimeout } + scrapeConfigs := []*prom_config.ScrapeConfig{&scrapeConfig} + promConfig := &prom_config.Config{ GlobalConfig: b.cfg.Integrations.ConfigV1.PrometheusGlobalConfig, ScrapeConfigs: scrapeConfigs, diff --git a/converter/internal/staticconvert/internal/build/logging.go b/converter/internal/staticconvert/internal/build/logging.go new file mode 100644 index 000000000000..c70bc03d30cb --- /dev/null +++ b/converter/internal/staticconvert/internal/build/logging.go @@ -0,0 +1,27 @@ +package build + +import ( + "reflect" + + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/agent/pkg/flow/logging" + "github.com/grafana/agent/pkg/server" +) + +func (b *IntegrationsV1ConfigBuilder) appendLogging(config *server.Config) { + args := toLogging(config) + if !reflect.DeepEqual(*args, logging.DefaultOptions) { + b.f.Body().AppendBlock(common.NewBlockWithOverride( + []string{"logging"}, + "", + args, + )) + } +} + +func toLogging(config *server.Config) *logging.Options { + return &logging.Options{ + Level: logging.Level(config.LogLevel.String()), + Format: logging.Format(config.LogFormat.String()), + } +} diff --git a/converter/internal/staticconvert/internal/build/server.go b/converter/internal/staticconvert/internal/build/server.go new file mode 100644 index 000000000000..c1c2ada3814f --- /dev/null +++ b/converter/internal/staticconvert/internal/build/server.go @@ -0,0 +1,61 @@ +package build + +import ( + "reflect" + + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/agent/pkg/server" + "github.com/grafana/agent/service/http" +) + +func (b *IntegrationsV1ConfigBuilder) appendServer(config *server.Config) { + args := toServer(config) + if !reflect.DeepEqual(*args.TLS, http.TLSArguments{}) { + b.f.Body().AppendBlock(common.NewBlockWithOverride( + []string{"http"}, + "", + args, + )) + } +} + +func toServer(config *server.Config) *http.Arguments { + authType, err := server.GetClientAuthFromString(config.HTTP.TLSConfig.ClientAuth) + if err != nil { + panic(err) + } + + return &http.Arguments{ + TLS: &http.TLSArguments{ + Cert: "", + CertFile: config.HTTP.TLSConfig.TLSCertPath, + Key: "", + KeyFile: config.HTTP.TLSConfig.TLSKeyPath, + ClientCA: "", + ClientCAFile: config.HTTP.TLSConfig.ClientCAs, + ClientAuth: http.ClientAuth(authType), + CipherSuites: toHTTPTLSCipher(config.HTTP.TLSConfig.CipherSuites), + CurvePreferences: toHTTPTLSCurve(config.HTTP.TLSConfig.CurvePreferences), + MinVersion: http.TLSVersion(config.HTTP.TLSConfig.MinVersion), + MaxVersion: http.TLSVersion(config.HTTP.TLSConfig.MaxVersion), + }, + } +} + +func toHTTPTLSCipher(cipherSuites []server.TLSCipher) []http.TLSCipher { + var result []http.TLSCipher + for _, cipcipherSuite := range cipherSuites { + result = append(result, http.TLSCipher(cipcipherSuite)) + } + + return result +} + +func toHTTPTLSCurve(curvePreferences []server.TLSCurve) []http.TLSCurve { + var result []http.TLSCurve + for _, curvePreference := range curvePreferences { + result = append(result, http.TLSCurve(curvePreference)) + } + + return result +} diff --git a/converter/internal/staticconvert/staticconvert.go b/converter/internal/staticconvert/staticconvert.go index 23d3ac3d93e1..d649dfcf20b9 100644 --- a/converter/internal/staticconvert/staticconvert.go +++ b/converter/internal/staticconvert/staticconvert.go @@ -66,8 +66,8 @@ func AppendAll(f *builder.File, staticConfig *config.Config) diag.Diagnostics { diags.AddAll(appendStaticPrometheus(f, staticConfig)) diags.AddAll(appendStaticPromtail(f, staticConfig)) diags.AddAll(appendStaticIntegrationsV1(f, staticConfig)) + // TODO integrations v2 // TODO otel - // TODO other diags.AddAll(validate(staticConfig)) @@ -158,7 +158,7 @@ func appendStaticIntegrationsV1(f *builder.File, staticConfig *config.Config) di var diags diag.Diagnostics b := build.NewIntegrationsV1ConfigBuilder(f, &diags, staticConfig, &build.GlobalContext{LabelPrefix: "integrations"}) - b.AppendIntegrations() + b.Build() return diags } diff --git a/converter/internal/staticconvert/staticconvert_test.go b/converter/internal/staticconvert/staticconvert_test.go index 188ef481b710..e7017b119059 100644 --- a/converter/internal/staticconvert/staticconvert_test.go +++ b/converter/internal/staticconvert/staticconvert_test.go @@ -10,9 +10,12 @@ import ( ) func TestConvert(t *testing.T) { - test_common.TestDirectory(t, "testdata", ".yaml", staticconvert.Convert) + test_common.TestDirectory(t, "testdata", ".yaml", true, staticconvert.Convert) + + // This test has a race condition due to downstream code so skip loading the config + test_common.TestDirectory(t, "testdata-race", ".yaml", false, staticconvert.Convert) if runtime.GOOS == "windows" { - test_common.TestDirectory(t, "testdata_windows", ".yaml", staticconvert.Convert) + test_common.TestDirectory(t, "testdata_windows", ".yaml", true, staticconvert.Convert) } } diff --git a/converter/internal/staticconvert/testdata-race/example-cert.pem b/converter/internal/staticconvert/testdata-race/example-cert.pem new file mode 100644 index 000000000000..761f93aeea21 --- /dev/null +++ b/converter/internal/staticconvert/testdata-race/example-cert.pem @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE----- +MIIC6jCCAdICCQCOLEZvJLYQlDANBgkqhkiG9w0BAQsFADA3MQswCQYDVQQGEwJV +UzELMAkGA1UECAwCRkwxDDAKBgNVBAoMA09yZzENMAsGA1UEAwwEcm9vdDAeFw0y +MjAzMDkxNjM1NTRaFw0zMjAzMDYxNjM1NTRaMDcxCzAJBgNVBAYTAlVTMQswCQYD +VQQIDAJGTDEMMAoGA1UECgwDT3JnMQ0wCwYDVQQDDARyb290MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx63pDVP4z4psrU6i5qOCUSjUGFkGRUekdrJ9 +FtkOEyoQSl2hpkF+QAGvM2L3+bqH8Y1CZ7yakkCncSmzpXShVg2D2nxHkwYVGhmz +rzwHttmewokrWtw72ta6v9gxljxNLjz+HsYovKFGbudnOcK3BxseluikrOM08fEi +SF7Y1FJkyr103K7yjtRyNH2tKHGiK73wjkLBkd6WWFIrtMbNP0McXqkipOSg9dwY +OKfuVDzD/fCkW24j2pgHAI+4TQWC6PSIGMVZ76I5hhYd0WLi/8KaBu/gfqmDjnBn +qqJONoAxT5kEmXWwE5jO0ZOWx88S2D9wmBNIx8HtMLh+7pVQ7QIDAQABMA0GCSqG +SIb3DQEBCwUAA4IBAQBM85fNb+7b+3q0+uDw/fgrUkYfAVjJX+uN3ONy50qnKWe7 +SAqLC76HVHLa9hdT7OENQurCCrEtnV1Fzg0KNqtE8gW5rPrV44FZrC5YnpqrHoKp +VZeff+Mficioif5KkaELZILgduwYXe/H9r6qg87mHU4zpFlDUnUFCfLDtrO4fc79 +BEpoUXLf5tCwRLUv/d0eeksMqUf5ES4tWfzUVLCjSEEcuX0GIgWdcyG3thCauPWC +9a/QEXqqDC46AgsvkHCNWRoC8TSob5usTJDflodHoree6eaWx6j8ZGA/Uc0ohalJ +XYGN7R9ge9KeqmwvYI6hr/n+WM92Jeqnz9BVWaiQ +-----END CERTIFICATE----- diff --git a/converter/internal/staticconvert/testdata-race/example-key.pem b/converter/internal/staticconvert/testdata-race/example-key.pem new file mode 100644 index 000000000000..8be1dc127d7a --- /dev/null +++ b/converter/internal/staticconvert/testdata-race/example-key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpQIBAAKCAQEAx63pDVP4z4psrU6i5qOCUSjUGFkGRUekdrJ9FtkOEyoQSl2h +pkF+QAGvM2L3+bqH8Y1CZ7yakkCncSmzpXShVg2D2nxHkwYVGhmzrzwHttmewokr +Wtw72ta6v9gxljxNLjz+HsYovKFGbudnOcK3BxseluikrOM08fEiSF7Y1FJkyr10 +3K7yjtRyNH2tKHGiK73wjkLBkd6WWFIrtMbNP0McXqkipOSg9dwYOKfuVDzD/fCk +W24j2pgHAI+4TQWC6PSIGMVZ76I5hhYd0WLi/8KaBu/gfqmDjnBnqqJONoAxT5kE +mXWwE5jO0ZOWx88S2D9wmBNIx8HtMLh+7pVQ7QIDAQABAoIBADh7XxLgD99U/oy/ +U6D921ztuaDxfa6XJ1RUBMIzv6F4IoeGmLUYjYe5cj+M3SwMsWuIU6JYXTjFhRej +fidtKD3ZMNTalrxl2g45+vO0fVIhmKDagCMBbQTn/IdLtisS/5n2ssMttlQ1ImE4 +n6BdDby61RpG0F3/HvjZBqOGALt92qaE8xmUKa8K7SVNnS7BSE+m9tn0pxJsvxCu +3WALdAELECLLKB2bpW5u+v5niBT7Min2Oi1uJbd5SWyWqGmiX8MQ+yXPjAmQxd5D +6L9okqOB6vkfgkuVCAc2d73NI3BE7HJqcE5PboY+ZVTcFdBGYMhvjLeXnUlMZREZ +B7TcT4ECgYEA9QNIoozXsRwpCQGDLm0a6ZGc1NjNUtd0udOqexTSPkdhvR0sNJep +3mjaWCBwipLTmBKs5gv+0i9V6S28r6Pq93EoJVToDPPLq+7UYMi/7vmshNWrMTBD +N/mWF92d7gSC8cgXSnZwAz40QwIZYU6OXJL5s1YN6r/1vLRoPsbkgVECgYEA0KI0 +Ms4f9XqrrzzT9byaUUtXrSMyFVag995q5lvV5pipwkWOyWscD5tHt5GfOu15F4Ut ++k2pqXmO1FveUO9wMxFEP8LOKuoKUZ2jzJ7IUiz3TwMcQjlV7C6n5NtIsBrlElqW +C2/HYgSw+T87T63WK8467KLgQ09yEFEIg1p7Tt0CgYEAgEqz4cl1t1tTcU/FbK3c +hailQh4zhMkkaZkXj1Mbs1iVKPz5hKBVZgvpKHPz+dtfyCUfO2XUjCIVDf/Q6Pcf +tWke6E1JJF8Tqndn5TW4ql05pGRtO1hWGh0qJlz4sQTTu95Vs7vIcypDG0MiHv2P +NZIQBYNtzhmthp3AZ/6k78ECgYEAty6T8j+1I84PTA92c36jZ9llI+mRIdcsAjZR +We0sRAmqk56LHiJjQvit4WmEizLSbWpL0ke6PckzNRVsf1ecBdqVN/6NEnTnln14 +wkJv1GcSxVcPyr2YyYS1eWVnzufuVU0gDO6Z+1/vGwj/xJf3QgMTDY58pdztY5Ii +jWI2fikCgYEAmGEmcPOu8IjYNN+YdQ1CeF909oSH++Nqr34IB5/e2Wr9WVknfHzZ +wIfzlUJUQO0so0LDaB7UQKk0Xk3+OP6Udw8xFfr/P5s++bvnKr3j5iHn6taqPs+v +PFxn+7KqdYVQ4RYRYLsy6NF+MhXt2sDAhiScxVnkh09t6sT1UG9xKW4= +-----END RSA PRIVATE KEY----- diff --git a/converter/internal/staticconvert/testdata/unsupported.diags b/converter/internal/staticconvert/testdata-race/unsupported.diags similarity index 66% rename from converter/internal/staticconvert/testdata/unsupported.diags rename to converter/internal/staticconvert/testdata-race/unsupported.diags index 3d1a4e778a70..d75a2f33ce42 100644 --- a/converter/internal/staticconvert/testdata/unsupported.diags +++ b/converter/internal/staticconvert/testdata-race/unsupported.diags @@ -1,15 +1,15 @@ (Error) global positions configuration is not supported - each Flow Mode's loki.source.file component has its own positions file in the component's data directory (Warning) server.log_level is not supported - Flow mode components may produce different logs +(Error) unsupported integration which is not being scraped was provided: mssql. (Error) mapping_config is not supported in statsd_exporter integrations config (Warning) Please review your agent command line flags and ensure they are set in your Flow mode config file where necessary. -(Error) unsupported log_level server config was provided. -(Error) unsupported log_format server config was provided. -(Error) unsupported grpc_tls_config server config was provided. -(Error) unsupported http_tls_config server config was provided. +(Error) unsupported grpc_tls_config server config was provided: flow mode does not have a gRPC server to configure. +(Error) unsupported prefer_server_cipher_suites server config was provided. +(Error) unsupported windows_certificate_filter server config was provided. (Error) unsupported wal_directory metrics config was provided. use the run command flag --storage.path for Flow mode instead. (Error) unsupported integration agent was provided. (Error) unsupported integration azure_exporter was provided. (Error) unsupported integration cadvisor was provided. -(Error) unsupported disabled integration node_exporter. +(Warning) disabled integrations do nothing and are not included in the output: node_exporter. (Error) unsupported traces config was provided. (Error) unsupported agent_management config was provided. \ No newline at end of file diff --git a/converter/internal/staticconvert/testdata/unsupported.river b/converter/internal/staticconvert/testdata-race/unsupported.river similarity index 76% rename from converter/internal/staticconvert/testdata/unsupported.river rename to converter/internal/staticconvert/testdata-race/unsupported.river index a4b104b52877..849a491eda36 100644 --- a/converter/internal/staticconvert/testdata/unsupported.river +++ b/converter/internal/staticconvert/testdata-race/unsupported.river @@ -17,6 +17,20 @@ prometheus.remote_write "metrics_agent" { } } +logging { + level = "debug" + format = "json" +} + +http { + tls { + cert_file = "./testdata/example-cert.pem" + key_file = "./testdata/example-key.pem" + client_ca_file = "./testdata/example-cert.pem" + client_auth_type = "VerifyClientCertIfGiven" + } +} + prometheus.exporter.statsd "integrations_statsd_exporter" { } prometheus.scrape "integrations_statsd_exporter" { diff --git a/converter/internal/staticconvert/testdata/unsupported.yaml b/converter/internal/staticconvert/testdata-race/unsupported.yaml similarity index 79% rename from converter/internal/staticconvert/testdata/unsupported.yaml rename to converter/internal/staticconvert/testdata-race/unsupported.yaml index be9e30401501..a61b289c0647 100644 --- a/converter/internal/staticconvert/testdata/unsupported.yaml +++ b/converter/internal/staticconvert/testdata-race/unsupported.yaml @@ -2,9 +2,18 @@ server: log_level: debug log_format: json http_tls_config: - cert_file: "/something.cert" + client_ca_file: "./testdata/example-cert.pem" + cert_file: "./testdata/example-cert.pem" + key_file: "./testdata/example-key.pem" + client_auth_type: "VerifyClientCertIfGiven" + prefer_server_cipher_suites: true + windows_certificate_filter: + server: + store: "something" grpc_tls_config: - cert_file: "/something2.cert" + client_ca_file: "/something4.cert" + cert_file: "/something5.cert" + key_file: "/something6.cert" metrics: wal_directory: /tmp/agent @@ -45,6 +54,9 @@ integrations: - nodepool cadvisor: enabled: true + mssql: + enabled: true + scrape_integration: false node_exporter: enabled: false statsd_exporter: diff --git a/converter/internal/staticconvert/testdata/integrations.river b/converter/internal/staticconvert/testdata/integrations.river index d4f1495bc399..e411f4c556d5 100644 --- a/converter/internal/staticconvert/testdata/integrations.river +++ b/converter/internal/staticconvert/testdata/integrations.river @@ -6,6 +6,12 @@ prometheus.scrape "integrations_apache_http" { targets = prometheus.exporter.apache.integrations_apache_http.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/apache_http" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.remote_write "integrations" { @@ -30,6 +36,12 @@ prometheus.scrape "integrations_blackbox" { targets = prometheus.exporter.blackbox.integrations_blackbox.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/blackbox" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.snmp "integrations_snmp" { @@ -52,6 +64,12 @@ prometheus.scrape "integrations_snmp" { targets = prometheus.exporter.snmp.integrations_snmp.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/snmp" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.cloudwatch "integrations_cloudwatch_exporter" { @@ -84,6 +102,12 @@ prometheus.scrape "integrations_cloudwatch_exporter" { targets = prometheus.exporter.cloudwatch.integrations_cloudwatch_exporter.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/cloudwatch_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.consul "integrations_consul_exporter" { } @@ -92,6 +116,12 @@ prometheus.scrape "integrations_consul_exporter" { targets = prometheus.exporter.consul.integrations_consul_exporter.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/consul_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.dnsmasq "integrations_dnsmasq_exporter" { @@ -112,6 +142,12 @@ prometheus.scrape "integrations_dnsmasq_exporter" { targets = discovery.relabel.integrations_dnsmasq_exporter.output forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/dnsmasq_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.elasticsearch "integrations_elasticsearch_exporter" { } @@ -120,6 +156,12 @@ prometheus.scrape "integrations_elasticsearch_exporter" { targets = prometheus.exporter.elasticsearch.integrations_elasticsearch_exporter.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/elasticsearch_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.gcp "integrations_gcp_exporter" { @@ -132,6 +174,12 @@ prometheus.scrape "integrations_gcp_exporter" { targets = prometheus.exporter.gcp.integrations_gcp_exporter.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/gcp_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.github "integrations_github_exporter" { @@ -143,6 +191,12 @@ prometheus.scrape "integrations_github_exporter" { targets = prometheus.exporter.github.integrations_github_exporter.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/github_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.kafka "integrations_kafka_exporter" { } @@ -151,6 +205,12 @@ prometheus.scrape "integrations_kafka_exporter" { targets = prometheus.exporter.kafka.integrations_kafka_exporter.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/kafka_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.memcached "integrations_memcached_exporter" { @@ -171,6 +231,12 @@ prometheus.scrape "integrations_memcached_exporter" { targets = discovery.relabel.integrations_memcached_exporter.output forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/memcached_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.mongodb "integrations_mongodb_exporter" { @@ -198,6 +264,12 @@ prometheus.scrape "integrations_mongodb_exporter" { targets = discovery.relabel.integrations_mongodb_exporter.output forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/mongodb_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.mssql "integrations_mssql" { @@ -208,6 +280,12 @@ prometheus.scrape "integrations_mssql" { targets = prometheus.exporter.mssql.integrations_mssql.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/mssql" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.mysql "integrations_mysqld_exporter" { @@ -228,6 +306,12 @@ prometheus.scrape "integrations_mysqld_exporter" { targets = discovery.relabel.integrations_mysqld_exporter.output forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/mysqld_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.unix { } @@ -255,6 +339,12 @@ prometheus.scrape "integrations_node_exporter" { targets = discovery.relabel.integrations_node_exporter.output forward_to = [prometheus.relabel.integrations_node_exporter.receiver] job_name = "integrations/node_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.relabel "integrations_node_exporter" { @@ -280,6 +370,12 @@ prometheus.scrape "integrations_oracledb" { forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/oracledb" scrape_timeout = "1m0s" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.postgres "integrations_postgres_exporter" { @@ -300,6 +396,12 @@ prometheus.scrape "integrations_postgres_exporter" { targets = discovery.relabel.integrations_postgres_exporter.output forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/postgres_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.process "integrations_process_exporter" { @@ -313,6 +415,12 @@ prometheus.scrape "integrations_process_exporter" { targets = prometheus.exporter.process.integrations_process_exporter.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/process_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.redis "integrations_redis_exporter" { @@ -333,6 +441,12 @@ prometheus.scrape "integrations_redis_exporter" { targets = discovery.relabel.integrations_redis_exporter.output forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/redis_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.snowflake "integrations_snowflake" { @@ -346,6 +460,12 @@ prometheus.scrape "integrations_snowflake" { targets = prometheus.exporter.snowflake.integrations_snowflake.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/snowflake" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.squid "integrations_squid" { @@ -357,6 +477,12 @@ prometheus.scrape "integrations_squid" { forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/squid" scrape_timeout = "1m0s" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } prometheus.exporter.statsd "integrations_statsd_exporter" { } @@ -365,4 +491,10 @@ prometheus.scrape "integrations_statsd_exporter" { targets = prometheus.exporter.statsd.integrations_statsd_exporter.targets forward_to = [prometheus.remote_write.integrations.receiver] job_name = "integrations/statsd_exporter" + + tls_config { + ca_file = "/something7.cert" + cert_file = "/something8.cert" + key_file = "/something9.cert" + } } diff --git a/converter/internal/staticconvert/testdata/integrations.yaml b/converter/internal/staticconvert/testdata/integrations.yaml index 8e471a625568..27884f65f6ac 100644 --- a/converter/internal/staticconvert/testdata/integrations.yaml +++ b/converter/internal/staticconvert/testdata/integrations.yaml @@ -4,6 +4,10 @@ metrics: - url: http://localhost:9009/api/prom/push integrations: + http_tls_config: + ca_file: "/something7.cert" + cert_file: "/something8.cert" + key_file: "/something9.cert" scrape_integrations: true apache_http: enabled: true diff --git a/converter/internal/staticconvert/validate.go b/converter/internal/staticconvert/validate.go index 695f2454b0d6..24ced7fff293 100644 --- a/converter/internal/staticconvert/validate.go +++ b/converter/internal/staticconvert/validate.go @@ -65,14 +65,15 @@ func validateServer(serverConfig *server.Config) diag.Diagnostics { var diags diag.Diagnostics defaultServerConfig := server.DefaultConfig() - diags.AddAll(common.UnsupportedNotDeepEquals(serverConfig.LogLevel.Level.Logrus, defaultServerConfig.LogLevel.Level.Logrus, "log_level server")) - diags.AddAll(common.UnsupportedNotDeepEquals(serverConfig.LogFormat, defaultServerConfig.LogFormat, "log_format server")) - diags.AddAll(common.UnsupportedNotDeepEquals(serverConfig.GRPC, defaultServerConfig.GRPC, "grpc_tls_config server")) - diags.AddAll(common.UnsupportedNotDeepEquals(serverConfig.HTTP, defaultServerConfig.HTTP, "http_tls_config server")) + diags.AddAll(common.UnsupportedNotDeepEqualsMessage(serverConfig.GRPC, defaultServerConfig.GRPC, "grpc_tls_config server", "flow mode does not have a gRPC server to configure.")) + diags.AddAll(common.UnsupportedNotEquals(serverConfig.HTTP.TLSConfig.PreferServerCipherSuites, defaultServerConfig.HTTP.TLSConfig.PreferServerCipherSuites, "prefer_server_cipher_suites server")) + diags.AddAll(common.UnsupportedNotDeepEquals(serverConfig.HTTP.TLSConfig.WindowsCertificateFilter, defaultServerConfig.HTTP.TLSConfig.WindowsCertificateFilter, "windows_certificate_filter server")) return diags } +// validateMetrics validates the metrics config for anything not already +// covered by appendStaticPrometheus. func validateMetrics(metricsConfig metrics.Config, grpcListenPort int) diag.Diagnostics { var diags diag.Diagnostics @@ -100,7 +101,7 @@ func validateIntegrations(integrationsConfig config.VersionedIntegrations) diag. for _, integration := range integrationsConfig.ConfigV1.Integrations { if !integration.Common.Enabled { - diags.Add(diag.SeverityLevelError, fmt.Sprintf("unsupported disabled integration %s.", integration.Name())) + diags.Add(diag.SeverityLevelWarn, fmt.Sprintf("disabled integrations do nothing and are not included in the output: %s.", integration.Name())) continue } @@ -144,6 +145,8 @@ func validateTraces(tracesConfig traces.Config) diag.Diagnostics { return diags } +// validateLogs validates the logs config for anything not already covered +// by appendStaticPromtail. func validateLogs(logsConfig *logs.Config) diag.Diagnostics { var diags diag.Diagnostics diff --git a/converter/internal/test_common/testing.go b/converter/internal/test_common/testing.go index 1d47079acb5a..ebcd83f302fb 100644 --- a/converter/internal/test_common/testing.go +++ b/converter/internal/test_common/testing.go @@ -38,7 +38,7 @@ const ( // 4. If the current filename.sourceSuffix has a matching filename.river, read // the contents of filename.river and validate that they match the river // configuration generated by calling convert in step 1. -func TestDirectory(t *testing.T, folderPath string, sourceSuffix string, convert func(in []byte) ([]byte, diag.Diagnostics)) { +func TestDirectory(t *testing.T, folderPath string, sourceSuffix string, loadFlowConfig bool, convert func(in []byte) ([]byte, diag.Diagnostics)) { require.NoError(t, filepath.WalkDir(folderPath, func(path string, d fs.DirEntry, _ error) error { if d.IsDir() { return nil @@ -63,7 +63,7 @@ func TestDirectory(t *testing.T, folderPath string, sourceSuffix string, convert validateDiags(t, expectedDiags, actualDiags) expectedRiver := getExpectedRiver(t, riverFile) - validateRiver(t, expectedRiver, actualRiver) + validateRiver(t, expectedRiver, actualRiver, loadFlowConfig) }) } @@ -152,7 +152,7 @@ func fileExists(path string) bool { } // validateRiver makes sure the expected river and actual river are a match -func validateRiver(t *testing.T, expectedRiver []byte, actualRiver []byte) { +func validateRiver(t *testing.T, expectedRiver []byte, actualRiver []byte, loadFlowConfig bool) { if len(expectedRiver) > 0 { if !reflect.DeepEqual(expectedRiver, actualRiver) { fmt.Println("============== ACTUAL =============") @@ -162,7 +162,9 @@ func validateRiver(t *testing.T, expectedRiver []byte, actualRiver []byte) { require.Equal(t, string(expectedRiver), string(normalizeLineEndings(actualRiver))) - attemptLoadingFlowConfig(t, actualRiver) + if loadFlowConfig { + attemptLoadingFlowConfig(t, actualRiver) + } } } diff --git a/operations/helm/charts/grafana-agent/CHANGELOG.md b/operations/helm/charts/grafana-agent/CHANGELOG.md index 1969dc92c7fe..7104cf1d004f 100644 --- a/operations/helm/charts/grafana-agent/CHANGELOG.md +++ b/operations/helm/charts/grafana-agent/CHANGELOG.md @@ -10,6 +10,10 @@ internal API changes are not present. Unreleased ---------- +### Enhancements + +- An image's digest can now be used in place of a tag. (@hainenber) + 0.24.0 (2023-09-08) ------------------- diff --git a/operations/helm/charts/grafana-agent/README.md b/operations/helm/charts/grafana-agent/README.md index 7240c4e332e4..f2982e5f9c1d 100644 --- a/operations/helm/charts/grafana-agent/README.md +++ b/operations/helm/charts/grafana-agent/README.md @@ -62,6 +62,7 @@ use the older mode (called "static mode"), set the `agent.mode` value to | agent.storagePath | string | `"/tmp/agent"` | Path to where Grafana Agent stores data (for example, the Write-Ahead Log). By default, data is lost between reboots. | | configReloader.customArgs | list | `[]` | Override the args passed to the container. | | configReloader.enabled | bool | `true` | Enables automatically reloading when the agent config changes. | +| configReloader.image.digest | string | `""` | SHA256 digest of image to use for config reloading (either in format "sha256:XYZ" or "XYZ"). When set, will override `configReloader.image.tag` | | configReloader.image.registry | string | `"docker.io"` | Config reloader image registry (defaults to docker.io) | | configReloader.image.repository | string | `"jimmidyson/configmap-reload"` | Repository to get config reloader image from. | | configReloader.image.tag | string | `"v0.8.0"` | Tag of image to use for config reloading. | @@ -92,6 +93,7 @@ use the older mode (called "static mode"), set the `agent.mode` value to | global.image.pullSecrets | list | `[]` | Optional set of global image pull secrets. | | global.image.registry | string | `""` | Global image registry to use if it needs to be overriden for some specific use cases (e.g local registries, custom images, ...) | | global.podSecurityContext | object | `{}` | Security context to apply to the Grafana Agent pod. | +| image.digest | string | `nil` | Grafana Agent image's SHA256 digest (either in format "sha256:XYZ" or "XYZ"). When set, will override `image.tag`. | | image.pullPolicy | string | `"IfNotPresent"` | Grafana Agent image pull policy. | | image.pullSecrets | list | `[]` | Optional set of image pull secrets. | | image.registry | string | `"docker.io"` | Grafana Agent image registry (defaults to docker.io) | diff --git a/operations/helm/charts/grafana-agent/templates/_helpers.tpl b/operations/helm/charts/grafana-agent/templates/_helpers.tpl index 0aa65bf0e964..b9336e976d62 100644 --- a/operations/helm/charts/grafana-agent/templates/_helpers.tpl +++ b/operations/helm/charts/grafana-agent/templates/_helpers.tpl @@ -82,13 +82,36 @@ Create the name of the service account to use {{- end }} {{/* -Calculate name of image tag to use. +Calculate name of image ID to use for "grafana-agent". */}} -{{- define "grafana-agent.imageTag" -}} -{{- if .Values.image.tag -}} -{{- .Values.image.tag }} -{{- else -}} -{{- .Chart.AppVersion }} +{{- define "grafana-agent.imageId" -}} +{{- if .Values.image.digest }} +{{- $digest := .Values.image.digest }} +{{- if not (hasPrefix "sha256:" $digest) }} +{{- $digest = printf "sha256:%s" $digest }} +{{- end }} +{{- printf "@%s" $digest }} +{{- else if .Values.image.tag }} +{{- printf ":%s" .Values.image.tag }} +{{- else }} +{{- printf ":%s" .Chart.AppVersion }} +{{- end }} +{{- end }} + +{{/* +Calculate name of image ID to use for "config-reloader". +*/}} +{{- define "config-reloader.imageId" -}} +{{- if .Values.configReloader.image.digest }} +{{- $digest := .Values.configReloader.digest }} +{{- if not (hasPrefix "sha256:" $digest) }} +{{- $digest = printf "sha256:%s" $digest }} +{{- end }} +{{- printf "@%s" $digest }} +{{- else if .Values.configReloader.image.tag }} +{{- printf ":%s" .Values.configReloader.image.tag }} +{{- else }} +{{- printf ":%s" "v0.8.0" }} {{- end }} {{- end }} diff --git a/operations/helm/charts/grafana-agent/templates/containers/_agent.yaml b/operations/helm/charts/grafana-agent/templates/containers/_agent.yaml index 7563e11b96a7..2de13343258c 100644 --- a/operations/helm/charts/grafana-agent/templates/containers/_agent.yaml +++ b/operations/helm/charts/grafana-agent/templates/containers/_agent.yaml @@ -1,6 +1,6 @@ {{- define "grafana-agent.container" -}} - name: grafana-agent - image: {{ .Values.global.image.registry | default .Values.image.registry }}/{{ .Values.image.repository }}:{{ include "grafana-agent.imageTag" . }} + image: {{ .Values.global.image.registry | default .Values.image.registry }}/{{ .Values.image.repository }}{{ include "grafana-agent.imageId" . }} imagePullPolicy: {{ .Values.image.pullPolicy }} args: {{- if eq .Values.agent.mode "flow"}} diff --git a/operations/helm/charts/grafana-agent/templates/containers/_watch.yaml b/operations/helm/charts/grafana-agent/templates/containers/_watch.yaml index 410e56eeeafe..250e4b4f0da7 100644 --- a/operations/helm/charts/grafana-agent/templates/containers/_watch.yaml +++ b/operations/helm/charts/grafana-agent/templates/containers/_watch.yaml @@ -1,7 +1,7 @@ {{- define "grafana-agent.watch-container" -}} {{- if .Values.configReloader.enabled -}} - name: config-reloader - image: {{ .Values.global.image.registry | default .Values.configReloader.image.registry }}/{{ .Values.configReloader.image.repository }}:{{ .Values.configReloader.image.tag }} + image: {{ .Values.global.image.registry | default .Values.configReloader.image.registry }}/{{ .Values.configReloader.image.repository }}{{ include "config-reloader.imageId" . }} {{- if .Values.configReloader.customArgs }} args: {{- toYaml .Values.configReloader.customArgs | nindent 4 }} diff --git a/operations/helm/charts/grafana-agent/values.yaml b/operations/helm/charts/grafana-agent/values.yaml index b51efbeafdfb..da4a1b737073 100644 --- a/operations/helm/charts/grafana-agent/values.yaml +++ b/operations/helm/charts/grafana-agent/values.yaml @@ -99,6 +99,8 @@ image: # -- (string) Grafana Agent image tag. When empty, the Chart's appVersion is # used. tag: null + # -- Grafana Agent image's SHA256 digest (either in format "sha256:XYZ" or "XYZ"). When set, will override `image.tag`. + digest: null # -- Grafana Agent image pull policy. pullPolicy: IfNotPresent # -- Optional set of image pull secrets. @@ -128,6 +130,8 @@ configReloader: repository: jimmidyson/configmap-reload # -- Tag of image to use for config reloading. tag: v0.8.0 + # -- SHA256 digest of image to use for config reloading (either in format "sha256:XYZ" or "XYZ"). When set, will override `configReloader.image.tag` + digest: "" # -- Override the args passed to the container. customArgs: [] # -- Resource requests and limits to apply to the config reloader container. diff --git a/pkg/server/tls.go b/pkg/server/tls.go index 3e784b01586e..59fff20c87e8 100644 --- a/pkg/server/tls.go +++ b/pkg/server/tls.go @@ -265,7 +265,7 @@ func (l *tlsListener) applyNormalTLS(c TLSConfig) error { newConfig.ClientCAs = clientCAPool } - clientAuth, err := getClientAuthFromString(c.ClientAuth) + clientAuth, err := GetClientAuthFromString(c.ClientAuth) if err != nil { return err } @@ -290,7 +290,7 @@ func (l *tlsListener) getCertificate(*tls.ClientHelloInfo) (*tls.Certificate, er return &cert, nil } -func getClientAuthFromString(clientAuth string) (tls.ClientAuthType, error) { +func GetClientAuthFromString(clientAuth string) (tls.ClientAuthType, error) { switch clientAuth { case "RequestClientCert": return tls.RequestClientCert, nil diff --git a/pkg/server/tls_certstore_windows.go b/pkg/server/tls_certstore_windows.go index 72c7ce0fd042..60a28e689a3e 100644 --- a/pkg/server/tls_certstore_windows.go +++ b/pkg/server/tls_certstore_windows.go @@ -100,7 +100,7 @@ func (l *tlsListener) applyWindowsCertificateStore(c TLSConfig) error { MaxVersion: tls.VersionTLS12, } - ca, err := getClientAuthFromString(c.ClientAuth) + ca, err := GetClientAuthFromString(c.ClientAuth) if err != nil { return err }