Skip to content

Commit

Permalink
add static conversion for blackbox and cloudwatch (#4818)
Browse files Browse the repository at this point in the history
Signed-off-by: erikbaranowski <[email protected]>
  • Loading branch information
erikbaranowski authored Aug 16, 2023
1 parent 7fab464 commit 989aae1
Show file tree
Hide file tree
Showing 7 changed files with 271 additions and 11 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package build

import (
"fmt"
"time"

"github.com/grafana/agent/component/discovery"
"github.com/grafana/agent/component/prometheus/exporter/blackbox"
"github.com/grafana/agent/converter/internal/common"
"github.com/grafana/agent/converter/internal/prometheusconvert"
"github.com/grafana/agent/pkg/integrations/blackbox_exporter"
"github.com/grafana/agent/pkg/river/rivertypes"
)

func (b *IntegrationsV1ConfigBuilder) appendBlackboxExporter(config *blackbox_exporter.Config) discovery.Exports {
args := toBlackboxExporter(config)
compLabel := common.LabelForParts(b.globalCtx.LabelPrefix, config.Name())
b.f.Body().AppendBlock(common.NewBlockWithOverride(
[]string{"prometheus", "exporter", "blackbox"},
compLabel,
args,
))

return prometheusconvert.NewDiscoverExports(fmt.Sprintf("prometheus.exporter.blackbox.%s.targets", compLabel))
}

func toBlackboxExporter(config *blackbox_exporter.Config) *blackbox.Arguments {
return &blackbox.Arguments{
ConfigFile: config.BlackboxConfigFile,
Config: rivertypes.OptionalSecret{},
Targets: toBlackboxTargets(config.BlackboxTargets),
ProbeTimeoutOffset: time.Duration(config.ProbeTimeoutOffset),
ConfigStruct: config.BlackboxConfig,
}
}

func toBlackboxTargets(blackboxTargets []blackbox_exporter.BlackboxTarget) blackbox.TargetBlock {
var targetBlock blackbox.TargetBlock

for _, bt := range blackboxTargets {
targetBlock = append(targetBlock, toBlackboxTarget(bt))
}

return targetBlock
}

func toBlackboxTarget(target blackbox_exporter.BlackboxTarget) blackbox.BlackboxTarget {
return blackbox.BlackboxTarget{
Name: target.Name,
Target: target.Target,
Module: target.Module,
}
}
7 changes: 6 additions & 1 deletion converter/internal/staticconvert/internal/build/builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ import (
"github.com/grafana/agent/converter/internal/prometheusconvert"
"github.com/grafana/agent/pkg/config"
"github.com/grafana/agent/pkg/integrations/apache_http"
"github.com/grafana/agent/pkg/integrations/blackbox_exporter"
"github.com/grafana/agent/pkg/integrations/cloudwatch_exporter"
int_config "github.com/grafana/agent/pkg/integrations/config"
"github.com/grafana/agent/pkg/integrations/node_exporter"
"github.com/grafana/agent/pkg/river/token/builder"
Expand Down Expand Up @@ -44,6 +46,10 @@ func (b *IntegrationsV1ConfigBuilder) AppendIntegrations() {
exports = b.appendApacheExporter(itg)
case *node_exporter.Config:
exports = b.appendNodeExporter(itg)
case *blackbox_exporter.Config:
exports = b.appendBlackboxExporter(itg)
case *cloudwatch_exporter.Config:
exports = b.appendCloudwatchExporter(itg)
}

if len(exports.Targets) > 0 {
Expand All @@ -56,7 +62,6 @@ func (b *IntegrationsV1ConfigBuilder) appendExporter(commonConfig *int_config.Co
scrapeConfigs := []*prom_config.ScrapeConfig{}
if b.cfg.Integrations.ConfigV1.ScrapeIntegrations {
scrapeConfig := prom_config.DefaultScrapeConfig
scrapeConfig.MetricsPath = fmt.Sprintf("integrations/%s/metrics", name)
scrapeConfig.JobName = fmt.Sprintf("integrations/%s", name)
scrapeConfig.RelabelConfigs = commonConfig.RelabelConfigs
scrapeConfig.MetricRelabelConfigs = commonConfig.MetricRelabelConfigs
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
package build

import (
"fmt"

"github.com/grafana/agent/component/discovery"
"github.com/grafana/agent/component/prometheus/exporter/cloudwatch"
"github.com/grafana/agent/converter/internal/common"
"github.com/grafana/agent/converter/internal/prometheusconvert"
"github.com/grafana/agent/pkg/integrations/cloudwatch_exporter"
)

func (b *IntegrationsV1ConfigBuilder) appendCloudwatchExporter(config *cloudwatch_exporter.Config) discovery.Exports {
args := toCloudwatchExporter(config)
compLabel := common.LabelForParts(b.globalCtx.LabelPrefix, config.Name())
b.f.Body().AppendBlock(common.NewBlockWithOverride(
[]string{"prometheus", "exporter", "cloudwatch"},
compLabel,
args,
))

return prometheusconvert.NewDiscoverExports(fmt.Sprintf("prometheus.exporter.cloudwatch.%s.targets", compLabel))
}

func toCloudwatchExporter(config *cloudwatch_exporter.Config) *cloudwatch.Arguments {
return &cloudwatch.Arguments{
STSRegion: config.STSRegion,
FIPSDisabled: config.FIPSDisabled,
Debug: config.Debug,
DiscoveryExportedTags: config.Discovery.ExportedTags,
Discovery: toDiscoveryJobs(config.Discovery.Jobs),
Static: []cloudwatch.StaticJob{},
}
}

func toDiscoveryJobs(jobs []*cloudwatch_exporter.DiscoveryJob) []cloudwatch.DiscoveryJob {
var out []cloudwatch.DiscoveryJob
for _, job := range jobs {
out = append(out, toDiscoveryJob(job))
}
return out
}

func toDiscoveryJob(job *cloudwatch_exporter.DiscoveryJob) cloudwatch.DiscoveryJob {
return cloudwatch.DiscoveryJob{
Auth: cloudwatch.RegionAndRoles{
Regions: job.InlineRegionAndRoles.Regions,
Roles: toRoles(job.InlineRegionAndRoles.Roles),
},
CustomTags: toTags(job.CustomTags),
SearchTags: toTags(job.SearchTags),
Type: job.Type,
DimensionNameRequirements: job.DimensionNameRequirements,
Metrics: toMetrics(job.Metrics),
}
}

func toRoles(roles []cloudwatch_exporter.Role) []cloudwatch.Role {
var out []cloudwatch.Role
for _, role := range roles {
out = append(out, toRole(role))
}
return out
}

func toRole(role cloudwatch_exporter.Role) cloudwatch.Role {
return cloudwatch.Role{
RoleArn: role.RoleArn,
ExternalID: role.ExternalID,
}
}

func toTags(tags []cloudwatch_exporter.Tag) cloudwatch.Tags {
out := make(cloudwatch.Tags, 0)
for _, tag := range tags {
out[tag.Key] = tag.Value
}
return out
}

func toMetrics(metrics []cloudwatch_exporter.Metric) []cloudwatch.Metric {
var out []cloudwatch.Metric
for _, metric := range metrics {
out = append(out, toMetric(metric))
}
return out
}

func toMetric(metric cloudwatch_exporter.Metric) cloudwatch.Metric {
return cloudwatch.Metric{
Name: metric.Name,
Statistics: metric.Statistics,
Period: metric.Period,
Length: metric.Length,
}
}
4 changes: 3 additions & 1 deletion converter/internal/staticconvert/testdata/integrations.diags
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
(Warning) Please review your agent command line flags and ensure they are set in your Flow mode config file where necessary.
(Warning) Please review your agent command line flags and ensure they are set in your Flow mode config file where necessary.
(Error) unsupported integration azure_exporter was provided.
(Error) unsupported integration cadvisor was provided.
58 changes: 50 additions & 8 deletions converter/internal/staticconvert/testdata/integrations.river
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,9 @@ prometheus.exporter.apache "integrations_apache_http" {
}

prometheus.scrape "integrations_apache_http" {
targets = prometheus.exporter.apache.integrations_apache_http.targets
forward_to = [prometheus.remote_write.integrations.receiver]
job_name = "integrations/apache_http"
metrics_path = "integrations/apache_http/metrics"
targets = prometheus.exporter.apache.integrations_apache_http.targets
forward_to = [prometheus.remote_write.integrations.receiver]
job_name = "integrations/apache_http"
}

prometheus.remote_write "integrations" {
Expand All @@ -19,6 +18,50 @@ prometheus.remote_write "integrations" {
}
}

prometheus.exporter.blackbox "integrations_blackbox" {
target "example" {
address = "http://example.com"
module = "http_2xx"
}
probe_timeout_offset = "0s"
}

prometheus.scrape "integrations_blackbox" {
targets = prometheus.exporter.blackbox.integrations_blackbox.targets
forward_to = [prometheus.remote_write.integrations.receiver]
job_name = "integrations/blackbox"
}

prometheus.exporter.cloudwatch "integrations_cloudwatch_exporter" {
sts_region = "us-east-2"
fips_disabled = false

discovery {
regions = ["us-east-2"]
custom_tags = {}
search_tags = {}
type = "AWS/EC2"

metric {
name = "CPUUtilization"
statistics = ["Average"]
period = "5m0s"
}

metric {
name = "NetworkPacketsIn"
statistics = ["Average"]
period = "5m0s"
}
}
}

prometheus.scrape "integrations_cloudwatch_exporter" {
targets = prometheus.exporter.cloudwatch.integrations_cloudwatch_exporter.targets
forward_to = [prometheus.remote_write.integrations.receiver]
job_name = "integrations/cloudwatch_exporter"
}

prometheus.exporter.unix { }

discovery.relabel "integrations_node_exporter" {
Expand All @@ -41,10 +84,9 @@ discovery.relabel "integrations_node_exporter" {
}

prometheus.scrape "integrations_node_exporter" {
targets = discovery.relabel.integrations_node_exporter.output
forward_to = [prometheus.relabel.integrations_node_exporter.receiver]
job_name = "integrations/node_exporter"
metrics_path = "integrations/node_exporter/metrics"
targets = discovery.relabel.integrations_node_exporter.output
forward_to = [prometheus.relabel.integrations_node_exporter.receiver]
job_name = "integrations/node_exporter"
}

prometheus.relabel "integrations_node_exporter" {
Expand Down
60 changes: 59 additions & 1 deletion converter/internal/staticconvert/testdata/integrations.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,62 @@ integrations:
target_label: __metric_param_target1
- source_labels: [__metric_address2__]
target_label: __metric_param_target2
scrape_integrations: true
scrape_integrations: true
azure_exporter:
enabled: true
scrape_interval: 60s
subscriptions:
- ABCDEFGH-1234-ABCD-1234-ABCDEFGHIJKL
resource_type: microsoft.containerservice/managedclusters
metrics:
- node_cpu_usage_millicores
- node_cpu_usage_percentage
- node_disk_usage_bytes
- node_disk_usage_percentage
- node_memory_rss_bytes
- node_memory_rss_percentage
- node_memory_working_set_bytes
- node_memory_working_set_percentage
- node_network_in_bytes
- node_network_out_bytes
included_resource_tags:
- environment
included_dimensions:
- node
- nodepool
blackbox:
enabled: true
blackbox_targets:
- name: example
address: http://example.com
module: http_2xx
blackbox_config:
modules:
http_2xx:
prober: http
timeout: 5s
http:
method: POST
headers:
Content-Type: application/json
body: '{}'
preferred_ip_protocol: "ip4"
cadvisor:
enabled: true
cloudwatch_exporter:
enabled: true
sts_region: us-east-2
discovery:
jobs:
- type: AWS/EC2
regions:
- us-east-2
metrics:
- name: CPUUtilization
period: 5m
statistics:
- Average
- name: NetworkPacketsIn
period: 5m
statistics:
- Average
4 changes: 4 additions & 0 deletions converter/internal/staticconvert/validate.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ import (
"github.com/grafana/agent/converter/internal/common"
"github.com/grafana/agent/pkg/config"
"github.com/grafana/agent/pkg/integrations/apache_http"
"github.com/grafana/agent/pkg/integrations/blackbox_exporter"
"github.com/grafana/agent/pkg/integrations/cloudwatch_exporter"
"github.com/grafana/agent/pkg/integrations/node_exporter"
"github.com/grafana/agent/pkg/logs"
"github.com/grafana/agent/pkg/metrics"
Expand Down Expand Up @@ -85,6 +87,8 @@ func validateIntegrations(integrationsConfig config.VersionedIntegrations) diag.
switch itg := integration.Config.(type) {
case *apache_http.Config:
case *node_exporter.Config:
case *blackbox_exporter.Config:
case *cloudwatch_exporter.Config:
default:
diags.Add(diag.SeverityLevelError, fmt.Sprintf("unsupported integration %s was provided.", itg.Name()))
}
Expand Down

0 comments on commit 989aae1

Please sign in to comment.