diff --git a/go.mod b/go.mod index d372d91f0d..700e08984b 100644 --- a/go.mod +++ b/go.mod @@ -29,7 +29,7 @@ require ( github.com/IBM/ibm-hpcs-tke-sdk v0.0.0-20211109141421-a4b61b05f7d1 github.com/IBM/ibm-hpcs-uko-sdk v0.0.20-beta github.com/IBM/keyprotect-go-client v0.15.1 - github.com/IBM/logs-go-sdk v0.3.0 + github.com/IBM/logs-go-sdk v0.4.0 github.com/IBM/logs-router-go-sdk v1.0.5 github.com/IBM/mqcloud-go-sdk v0.2.0 github.com/IBM/networking-go-sdk v0.49.0 diff --git a/go.sum b/go.sum index 333fc0efd3..a592622189 100644 --- a/go.sum +++ b/go.sum @@ -148,8 +148,8 @@ github.com/IBM/ibm-hpcs-uko-sdk v0.0.20-beta/go.mod h1:MLVNHMYoKsvovJZ4v1gQCpIYt github.com/IBM/keyprotect-go-client v0.5.1/go.mod h1:5TwDM/4FRJq1ZOlwQL1xFahLWQ3TveR88VmL1u3njyI= github.com/IBM/keyprotect-go-client v0.15.1 h1:m4qzqF5zOumRxKZ8s7vtK7A/UV/D278L8xpRG+WgT0s= github.com/IBM/keyprotect-go-client v0.15.1/go.mod h1:asXtHwL/4uCHA221Vd/7SkXEi2pcRHDzPyyksc1DthE= -github.com/IBM/logs-go-sdk v0.3.0 h1:FHzTCCMyp9DvQGXgkppzcOPywC4ggt7x8xu0MR5h8xI= -github.com/IBM/logs-go-sdk v0.3.0/go.mod h1:yv/GCXC4/p+MZEeXl4xjZAOMvDAVRwu61WyHZFKFXQM= +github.com/IBM/logs-go-sdk v0.4.0 h1:CyUjm19EUtcJjf4mxsj6Rc7gkZDT8JEY5rLUIz8Eoag= +github.com/IBM/logs-go-sdk v0.4.0/go.mod h1:yv/GCXC4/p+MZEeXl4xjZAOMvDAVRwu61WyHZFKFXQM= github.com/IBM/logs-router-go-sdk v1.0.5 h1:r0kC1+HfmSeQCD6zQTUp4PDI/zp4Ueo1Zo19ipHuNlw= github.com/IBM/logs-router-go-sdk v1.0.5/go.mod h1:tCN2vFgu5xG0ob9iJcxi5M4bJ6mWmu3nhmRPnvlwev0= github.com/IBM/mqcloud-go-sdk v0.2.0 h1:QOWk8ZGk0QfIL0MOGTKzNdM3Qe0Hk+ifAFtNSFQo5HU= diff --git a/ibm/provider/provider.go b/ibm/provider/provider.go index f73bda2686..de1615c540 100644 --- a/ibm/provider/provider.go +++ b/ibm/provider/provider.go @@ -1040,6 +1040,8 @@ func Provider() *schema.Provider { "ibm_logs_data_usage_metrics": logs.AddLogsInstanceFields(logs.DataSourceIbmLogsDataUsageMetrics()), "ibm_logs_enrichments": logs.AddLogsInstanceFields(logs.DataSourceIbmLogsEnrichments()), "ibm_logs_data_access_rules": logs.AddLogsInstanceFields(logs.DataSourceIbmLogsDataAccessRules()), + "ibm_logs_stream": logs.AddLogsInstanceFields(logs.DataSourceIbmLogsStream()), + "ibm_logs_streams": logs.AddLogsInstanceFields(logs.DataSourceIbmLogsStreams()), // Logs Router Service "ibm_logs_router_tenants": logsrouting.DataSourceIBMLogsRouterTenants(), @@ -1681,6 +1683,7 @@ func Provider() *schema.Provider { "ibm_logs_data_usage_metrics": logs.AddLogsInstanceFields(logs.ResourceIbmLogsDataUsageMetrics()), "ibm_logs_enrichment": logs.AddLogsInstanceFields(logs.ResourceIbmLogsEnrichment()), "ibm_logs_data_access_rule": logs.AddLogsInstanceFields(logs.ResourceIbmLogsDataAccessRule()), + "ibm_logs_stream": logs.AddLogsInstanceFields(logs.ResourceIbmLogsStream()), // Logs Router Service "ibm_logs_router_tenant": logsrouting.ResourceIBMLogsRouterTenant(), @@ -2174,6 +2177,7 @@ func Validator() validate.ValidatorDict { "ibm_logs_dashboard_folder": logs.ResourceIbmLogsDashboardFolderValidator(), "ibm_logs_enrichment": logs.ResourceIbmLogsEnrichmentValidator(), "ibm_logs_data_access_rule": logs.ResourceIbmLogsDataAccessRuleValidator(), + "ibm_logs_stream": logs.ResourceIbmLogsStreamValidator(), // Added for Logs Router Service "ibm_logs_router_tenant": logsrouting.ResourceIBMLogsRouterTenantValidator(), diff --git a/ibm/service/logs/data_source_ibm_logs_outgoing_webhook.go b/ibm/service/logs/data_source_ibm_logs_outgoing_webhook.go index 5ab42bb485..65ca8e5a73 100644 --- a/ibm/service/logs/data_source_ibm_logs_outgoing_webhook.go +++ b/ibm/service/logs/data_source_ibm_logs_outgoing_webhook.go @@ -69,6 +69,11 @@ func DataSourceIbmLogsOutgoingWebhook() *schema.Resource { Computed: true, Description: "The ID of the selected IBM Event Notifications instance.", }, + "endpoint_type": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The endpoint type of integration", + }, "region_id": &schema.Schema{ Type: schema.TypeString, Computed: true, @@ -167,6 +172,9 @@ func DataSourceIbmLogsOutgoingWebhookOutgoingWebhooksV1IbmEventNotificationsConf modelMap := make(map[string]interface{}) modelMap["event_notifications_instance_id"] = model.EventNotificationsInstanceID.String() modelMap["region_id"] = *model.RegionID + if model.EndpointType != nil { + modelMap["endpoint_type"] = *model.SourceID + } if model.SourceID != nil { modelMap["source_id"] = *model.SourceID } diff --git a/ibm/service/logs/data_source_ibm_logs_stream.go b/ibm/service/logs/data_source_ibm_logs_stream.go new file mode 100644 index 0000000000..ddc0ca3b14 --- /dev/null +++ b/ibm/service/logs/data_source_ibm_logs_stream.go @@ -0,0 +1,175 @@ +// Copyright IBM Corp. 2024 All Rights Reserved. +// Licensed under the Mozilla Public License v2.0 + +/* + * IBM OpenAPI Terraform Generator Version: 3.96.0-d6dec9d7-20241008-212902 + */ + +package logs + +import ( + "context" + "fmt" + "log" + "strconv" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/IBM-Cloud/terraform-provider-ibm/ibm/conns" + "github.com/IBM-Cloud/terraform-provider-ibm/ibm/flex" + "github.com/IBM/go-sdk-core/v5/core" + "github.com/IBM/logs-go-sdk/logsv0" +) + +func DataSourceIbmLogsStream() *schema.Resource { + return &schema.Resource{ + ReadContext: dataSourceIbmLogsStreamRead, + + Schema: map[string]*schema.Schema{ + "name": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The name of the Event stream.", + }, + "logs_streams_id": &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "The ID of the Event stream.", + }, + "is_active": &schema.Schema{ + Type: schema.TypeBool, + Computed: true, + Description: "Whether the Event stream is active.", + }, + "dpxl_expression": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The DPXL expression of the Event stream.", + }, + "created_at": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The creation time of the Event stream.", + }, + "updated_at": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The update time of the Event stream.", + }, + "compression_type": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The compression type of the stream.", + }, + "ibm_event_streams": &schema.Schema{ + Type: schema.TypeList, + Computed: true, + Description: "Configuration for IBM Event Streams.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "brokers": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The brokers of the IBM Event Streams.", + }, + "topic": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The topic of the IBM Event Streams.", + }, + }, + }, + }, + }, + } +} + +func dataSourceIbmLogsStreamRead(context context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + logsClient, err := meta.(conns.ClientSession).LogsV0() + if err != nil { + tfErr := flex.DiscriminatedTerraformErrorf(err, err.Error(), "(Data) ibm_logs_stream", "read", "initialize-client") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + region := getLogsInstanceRegion(logsClient, d) + instanceId := d.Get("instance_id").(string) + logsClient = getClientWithLogsInstanceEndpoint(logsClient, instanceId, region, getLogsInstanceEndpointType(logsClient, d)) + + streamsID, _ := strconv.ParseInt(d.Get("logs_streams_id").(string), 10, 64) + + getEventStreamTargetsOptions := &logsv0.GetEventStreamTargetsOptions{} + + streams, _, err := logsClient.GetEventStreamTargetsWithContext(context, getEventStreamTargetsOptions) + if err != nil { + tfErr := flex.TerraformErrorf(err, fmt.Sprintf("GetEventStreamTargetsWithContext failed: %s", err.Error()), "(Data) ibm_logs_stream", "read") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + if streams != nil { + streamIds := make(map[int64]interface{}, 0) + for _, stream := range streams.Streams { + streamIds[*stream.ID] = nil + if *stream.ID == streamsID { + d.SetId(fmt.Sprintf("%d", *stream.ID)) + + if err = d.Set("name", stream.Name); err != nil { + return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting name: %s", err), "(Data) ibm_logs_stream", "read", "set-name").GetDiag() + } + + if !core.IsNil(stream.IsActive) { + if err = d.Set("is_active", stream.IsActive); err != nil { + return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting is_active: %s", err), "(Data) ibm_logs_stream", "read", "set-is_active").GetDiag() + } + } + + if err = d.Set("dpxl_expression", stream.DpxlExpression); err != nil { + return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting dpxl_expression: %s", err), "(Data) ibm_logs_stream", "read", "set-dpxl_expression").GetDiag() + } + + if !core.IsNil(stream.CreatedAt) { + if err = d.Set("created_at", flex.DateTimeToString(stream.CreatedAt)); err != nil { + return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting created_at: %s", err), "(Data) ibm_logs_stream", "read", "set-created_at").GetDiag() + } + } + + if !core.IsNil(stream.UpdatedAt) { + if err = d.Set("updated_at", flex.DateTimeToString(stream.UpdatedAt)); err != nil { + return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting updated_at: %s", err), "(Data) ibm_logs_stream", "read", "set-updated_at").GetDiag() + } + } + + if !core.IsNil(stream.CompressionType) { + if err = d.Set("compression_type", stream.CompressionType); err != nil { + return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting compression_type: %s", err), "(Data) ibm_logs_stream", "read", "set-compression_type").GetDiag() + } + } + + if !core.IsNil(stream.IbmEventStreams) { + ibmEventStreams := []map[string]interface{}{} + ibmEventStreamsMap, err := DataSourceIbmLogsStreamIbmEventStreamsToMap(stream.IbmEventStreams) + if err != nil { + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "(Data) ibm_logs_stream", "read", "ibm_event_streams-to-map").GetDiag() + } + ibmEventStreams = append(ibmEventStreams, ibmEventStreamsMap) + if err = d.Set("ibm_event_streams", ibmEventStreams); err != nil { + return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting ibm_event_streams: %s", err), "(Data) ibm_logs_stream", "read", "set-ibm_event_streams").GetDiag() + } + } + } + } + if _, ok := streamIds[streamsID]; !ok { + d.SetId("") + return flex.TerraformErrorf(err, fmt.Sprintf("Stream ID (%d) not found ", streamsID), "(Data) ibm_logs_stream", "read").GetDiag() + } + } + + return nil +} + +func DataSourceIbmLogsStreamIbmEventStreamsToMap(model *logsv0.IbmEventStreams) (map[string]interface{}, error) { + modelMap := make(map[string]interface{}) + modelMap["brokers"] = *model.Brokers + modelMap["topic"] = *model.Topic + return modelMap, nil +} diff --git a/ibm/service/logs/data_source_ibm_logs_stream_test.go b/ibm/service/logs/data_source_ibm_logs_stream_test.go new file mode 100644 index 0000000000..5ec65aefc6 --- /dev/null +++ b/ibm/service/logs/data_source_ibm_logs_stream_test.go @@ -0,0 +1,115 @@ +// Copyright IBM Corp. 2024 All Rights Reserved. +// Licensed under the Mozilla Public License v2.0 + +/* + * IBM OpenAPI Terraform Generator Version: 3.96.0-d6dec9d7-20241008-212902 + */ + +package logs_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + acc "github.com/IBM-Cloud/terraform-provider-ibm/ibm/acctest" +) + +func TestAccIbmLogsStreamDataSourceBasic(t *testing.T) { + streamName := fmt.Sprintf("tf_name_%d", acctest.RandIntRange(10, 100)) + streamDpxlExpression := "contains(kubernetes.labels.CX_AZ, 'eu-west-1')" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheckCloudLogs(t) }, + Providers: acc.TestAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccCheckIbmLogsStreamDataSourceConfigBasic(streamName, streamDpxlExpression), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "id"), + resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "name"), + resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "dpxl_expression"), + ), + }, + }, + }) +} + +func TestAccIbmLogsStreamDataSourceAllArgs(t *testing.T) { + streamName := fmt.Sprintf("tf_name_%d", acctest.RandIntRange(10, 100)) + streamIsActive := "false" + streamDpxlExpression := "contains(kubernetes.labels.CX_AZ, 'eu-west-1')" + streamCompressionType := "gzip" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheckCloudLogs(t) }, + Providers: acc.TestAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccCheckIbmLogsStreamDataSourceConfig(streamName, streamIsActive, streamDpxlExpression, streamCompressionType), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "id"), + resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "name"), + resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "dpxl_expression"), + resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "created_at"), + resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "updated_at"), + resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "compression_type"), + resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "ibm_event_streams.#"), + ), + }, + }, + }) +} + +func testAccCheckIbmLogsStreamDataSourceConfigBasic(streamName string, streamDpxlExpression string) string { + return fmt.Sprintf(` + resource "ibm_logs_stream" "logs_stream_instance" { + instance_id = "%s" + region = "%s" + name = "%s" + dpxl_expression = "%s" + compression_type = "gzip" + ibm_event_streams { + brokers = "kafka01.example.com:9093" + topic = "live.screen" + } + } + + data "ibm_logs_stream" "logs_stream_instance" { + instance_id = "%[1]s" + region = "%[2]s" + logs_streams_id = ibm_logs_stream.logs_stream_instance.streams_id + depends_on = [ + ibm_logs_stream.logs_stream_instance + ] + } + `, acc.LogsInstanceId, acc.LogsInstanceRegion, streamName, streamDpxlExpression) +} + +func testAccCheckIbmLogsStreamDataSourceConfig(streamName string, streamIsActive string, streamDpxlExpression string, streamCompressionType string) string { + return fmt.Sprintf(` + resource "ibm_logs_stream" "logs_stream_instance" { + instance_id = "%s" + region = "%s" + name = "%s" + is_active = %s + dpxl_expression = "%s" + compression_type = "%s" + ibm_event_streams { + brokers = "kafka01.example.com:9093" + topic = "live.screen" + } + } + + data "ibm_logs_stream" "logs_stream_instance" { + instance_id = "%[1]s" + region = "%[2]s" + logs_streams_id = ibm_logs_stream.logs_stream_instance.streams_id + depends_on = [ + ibm_logs_stream.logs_stream_instance + ] + } + `, acc.LogsInstanceId, acc.LogsInstanceRegion, streamName, streamIsActive, streamDpxlExpression, streamCompressionType) +} diff --git a/ibm/service/logs/data_source_ibm_logs_streams.go b/ibm/service/logs/data_source_ibm_logs_streams.go new file mode 100644 index 0000000000..eae5cf08e7 --- /dev/null +++ b/ibm/service/logs/data_source_ibm_logs_streams.go @@ -0,0 +1,172 @@ +// Copyright IBM Corp. 2024 All Rights Reserved. +// Licensed under the Mozilla Public License v2.0 + +/* + * IBM OpenAPI Terraform Generator Version: 3.96.0-d6dec9d7-20241008-212902 + */ + +package logs + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/IBM-Cloud/terraform-provider-ibm/ibm/conns" + "github.com/IBM-Cloud/terraform-provider-ibm/ibm/flex" + "github.com/IBM/logs-go-sdk/logsv0" +) + +func DataSourceIbmLogsStreams() *schema.Resource { + return &schema.Resource{ + ReadContext: dataSourceIbmLogsStreamsRead, + + Schema: map[string]*schema.Schema{ + "streams": &schema.Schema{ + Type: schema.TypeList, + Computed: true, + Description: "Collection of Event Streams.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "id": &schema.Schema{ + Type: schema.TypeInt, + Computed: true, + Description: "The ID of the Event stream.", + }, + "name": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The name of the Event stream.", + }, + "is_active": &schema.Schema{ + Type: schema.TypeBool, + Computed: true, + Description: "Whether the Event stream is active.", + }, + "dpxl_expression": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The DPXL expression of the Event stream.", + }, + "created_at": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The creation time of the Event stream.", + }, + "updated_at": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The update time of the Event stream.", + }, + "compression_type": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The compression type of the stream.", + }, + "ibm_event_streams": &schema.Schema{ + Type: schema.TypeList, + Computed: true, + Description: "Configuration for IBM Event Streams.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "brokers": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The brokers of the IBM Event Streams.", + }, + "topic": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The topic of the IBM Event Streams.", + }, + }, + }, + }, + }, + }, + }, + }, + } +} + +func dataSourceIbmLogsStreamsRead(context context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + logsClient, err := meta.(conns.ClientSession).LogsV0() + if err != nil { + tfErr := flex.DiscriminatedTerraformErrorf(err, err.Error(), "(Data) ibm_logs_streams", "read", "initialize-client") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + region := getLogsInstanceRegion(logsClient, d) + instanceId := d.Get("instance_id").(string) + logsClient = getClientWithLogsInstanceEndpoint(logsClient, instanceId, region, getLogsInstanceEndpointType(logsClient, d)) + + getEventStreamTargetsOptions := &logsv0.GetEventStreamTargetsOptions{} + + streamCollection, _, err := logsClient.GetEventStreamTargetsWithContext(context, getEventStreamTargetsOptions) + if err != nil { + tfErr := flex.TerraformErrorf(err, fmt.Sprintf("GetEventStreamTargetsWithContext failed: %s", err.Error()), "(Data) ibm_logs_streams", "read") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + + d.SetId(dataSourceIbmLogsStreamsID(d)) + + streams := []map[string]interface{}{} + for _, streamsItem := range streamCollection.Streams { + streamsItemMap, err := DataSourceIbmLogsStreamsStreamToMap(&streamsItem) // #nosec G601 + if err != nil { + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "(Data) ibm_logs_streams", "read", "streams-to-map").GetDiag() + } + streams = append(streams, streamsItemMap) + } + if err = d.Set("streams", streams); err != nil { + return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting streams: %s", err), "(Data) ibm_logs_streams", "read", "set-streams").GetDiag() + } + + return nil +} + +// dataSourceIbmLogsStreamsID returns a reasonable ID for the list. +func dataSourceIbmLogsStreamsID(d *schema.ResourceData) string { + return time.Now().UTC().String() +} + +func DataSourceIbmLogsStreamsStreamToMap(model *logsv0.Stream) (map[string]interface{}, error) { + modelMap := make(map[string]interface{}) + if model.ID != nil { + modelMap["id"] = flex.IntValue(model.ID) + } + modelMap["name"] = *model.Name + if model.IsActive != nil { + modelMap["is_active"] = *model.IsActive + } + modelMap["dpxl_expression"] = *model.DpxlExpression + if model.CreatedAt != nil { + modelMap["created_at"] = model.CreatedAt.String() + } + if model.UpdatedAt != nil { + modelMap["updated_at"] = model.UpdatedAt.String() + } + if model.CompressionType != nil { + modelMap["compression_type"] = *model.CompressionType + } + if model.IbmEventStreams != nil { + ibmEventStreamsMap, err := DataSourceIbmLogsStreamsIbmEventStreamsToMap(model.IbmEventStreams) + if err != nil { + return modelMap, err + } + modelMap["ibm_event_streams"] = []map[string]interface{}{ibmEventStreamsMap} + } + return modelMap, nil +} + +func DataSourceIbmLogsStreamsIbmEventStreamsToMap(model *logsv0.IbmEventStreams) (map[string]interface{}, error) { + modelMap := make(map[string]interface{}) + modelMap["brokers"] = *model.Brokers + modelMap["topic"] = *model.Topic + return modelMap, nil +} diff --git a/ibm/service/logs/data_source_ibm_logs_streams_test.go b/ibm/service/logs/data_source_ibm_logs_streams_test.go new file mode 100644 index 0000000000..e2587150e8 --- /dev/null +++ b/ibm/service/logs/data_source_ibm_logs_streams_test.go @@ -0,0 +1,116 @@ +// Copyright IBM Corp. 2024 All Rights Reserved. +// Licensed under the Mozilla Public License v2.0 + +/* + * IBM OpenAPI Terraform Generator Version: 3.96.0-d6dec9d7-20241008-212902 + */ + +package logs_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + acc "github.com/IBM-Cloud/terraform-provider-ibm/ibm/acctest" +) + +func TestAccIbmLogsStreamsDataSourceBasic(t *testing.T) { + streamName := fmt.Sprintf("tf_name_%d", acctest.RandIntRange(10, 100)) + streamDpxlExpression := "contains(kubernetes.labels.CX_AZ, 'eu-west-1')" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheckCloudLogs(t) }, + Providers: acc.TestAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccCheckIbmLogsStreamsDataSourceConfigBasic(streamName, streamDpxlExpression), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.ibm_logs_streams.logs_streams_instance", "id"), + resource.TestCheckResourceAttrSet("data.ibm_logs_streams.logs_streams_instance", "streams.#"), + resource.TestCheckResourceAttr("data.ibm_logs_streams.logs_streams_instance", "streams.0.name", streamName), + resource.TestCheckResourceAttr("data.ibm_logs_streams.logs_streams_instance", "streams.0.dpxl_expression", streamDpxlExpression), + ), + }, + }, + }) +} + +func TestAccIbmLogsStreamsDataSourceAllArgs(t *testing.T) { + streamName := fmt.Sprintf("tf_name_%d", acctest.RandIntRange(10, 100)) + streamIsActive := "false" + streamDpxlExpression := "contains(kubernetes.labels.CX_AZ, 'eu-west-1')" + streamCompressionType := "gzip" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheckCloudLogs(t) }, + Providers: acc.TestAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccCheckIbmLogsStreamsDataSourceConfig(streamName, streamIsActive, streamDpxlExpression, streamCompressionType), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.ibm_logs_streams.logs_streams_instance", "id"), + resource.TestCheckResourceAttrSet("data.ibm_logs_streams.logs_streams_instance", "streams.#"), + resource.TestCheckResourceAttrSet("data.ibm_logs_streams.logs_streams_instance", "streams.0.id"), + resource.TestCheckResourceAttr("data.ibm_logs_streams.logs_streams_instance", "streams.0.name", streamName), + resource.TestCheckResourceAttr("data.ibm_logs_streams.logs_streams_instance", "streams.0.is_active", streamIsActive), + resource.TestCheckResourceAttr("data.ibm_logs_streams.logs_streams_instance", "streams.0.dpxl_expression", streamDpxlExpression), + resource.TestCheckResourceAttrSet("data.ibm_logs_streams.logs_streams_instance", "streams.0.created_at"), + resource.TestCheckResourceAttrSet("data.ibm_logs_streams.logs_streams_instance", "streams.0.updated_at"), + resource.TestCheckResourceAttr("data.ibm_logs_streams.logs_streams_instance", "streams.0.compression_type", streamCompressionType), + ), + }, + }, + }) +} + +func testAccCheckIbmLogsStreamsDataSourceConfigBasic(streamName string, streamDpxlExpression string) string { + return fmt.Sprintf(` + resource "ibm_logs_stream" "logs_stream_instance" { + instance_id = "%s" + region = "%s" + name = "%s" + dpxl_expression = "%s" + compression_type = "gzip" + ibm_event_streams { + brokers = "kafka01.example.com:9093" + topic = "live.screen" + } + } + + data "ibm_logs_streams" "logs_streams_instance" { + instance_id = "%[1]s" + region = "%[2]s" + depends_on = [ + ibm_logs_stream.logs_stream_instance + ] + } + `, acc.LogsInstanceId, acc.LogsInstanceRegion, streamName, streamDpxlExpression) +} + +func testAccCheckIbmLogsStreamsDataSourceConfig(streamName string, streamIsActive string, streamDpxlExpression string, streamCompressionType string) string { + return fmt.Sprintf(` + resource "ibm_logs_stream" "logs_stream_instance" { + instance_id = "%s" + region = "%s" + name = "%s" + is_active = %s + dpxl_expression = "%s" + compression_type = "%s" + ibm_event_streams { + brokers = "kafka01.example.com:9093" + topic = "live.screen" + } + } + + data "ibm_logs_streams" "logs_streams_instance" { + instance_id = "%[1]s" + region = "%[2]s" + depends_on = [ + ibm_logs_stream.logs_stream_instance + ] + } + `, acc.LogsInstanceId, acc.LogsInstanceRegion, streamName, streamIsActive, streamDpxlExpression, streamCompressionType) +} diff --git a/ibm/service/logs/resource_ibm_logs_alert.go b/ibm/service/logs/resource_ibm_logs_alert.go index 291fa6d61d..543f57686e 100644 --- a/ibm/service/logs/resource_ibm_logs_alert.go +++ b/ibm/service/logs/resource_ibm_logs_alert.go @@ -224,6 +224,7 @@ func ResourceIbmLogsAlert() *schema.Resource { Type: schema.TypeList, MaxItems: 1, Optional: true, + Computed: true, Description: "Deadman configuration.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -1250,7 +1251,7 @@ func ResourceIbmLogsAlert() *schema.Resource { }, "notification_groups": &schema.Schema{ Type: schema.TypeList, - Required: true, + Optional: true, Description: "Alert notification groups.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -1776,13 +1777,16 @@ func resourceIbmLogsAlertRead(context context.Context, d *schema.ResourceData, m if err = d.Set("notification_groups", notificationGroups); err != nil { return diag.FromErr(fmt.Errorf("Error setting notification_groups: %s", err)) } - filtersMap, err := ResourceIbmLogsAlertAlertsV1AlertFiltersToMap(alert.Filters) - if err != nil { - return diag.FromErr(err) - } - if err = d.Set("filters", []map[string]interface{}{filtersMap}); err != nil { - return diag.FromErr(fmt.Errorf("Error setting filters: %s", err)) + if alert.Filters != nil { + filtersMap, err := ResourceIbmLogsAlertAlertsV1AlertFiltersToMap(alert.Filters) + if err != nil { + return diag.FromErr(err) + } + if err = d.Set("filters", []map[string]interface{}{filtersMap}); err != nil { + return diag.FromErr(fmt.Errorf("Error setting filters: %s", err)) + } } + if !core.IsNil(alert.ActiveWhen) { activeWhenMap, err := ResourceIbmLogsAlertAlertsV1AlertActiveWhenToMap(alert.ActiveWhen) if err != nil { diff --git a/ibm/service/logs/resource_ibm_logs_data_access_rule_test.go b/ibm/service/logs/resource_ibm_logs_data_access_rule_test.go index b6f6485301..c8f33f6472 100644 --- a/ibm/service/logs/resource_ibm_logs_data_access_rule_test.go +++ b/ibm/service/logs/resource_ibm_logs_data_access_rule_test.go @@ -22,9 +22,9 @@ import ( func TestAccIbmLogsDataAccessRuleBasic(t *testing.T) { var conf logsv0.DataAccessRule displayName := fmt.Sprintf("tf_display_name_%d", acctest.RandIntRange(10, 100)) - defaultExpression := "true" + defaultExpression := "true" displayNameUpdate := fmt.Sprintf("tf_display_name_%d", acctest.RandIntRange(10, 100)) - defaultExpressionUpdate := "false" + defaultExpressionUpdate := "false" resource.Test(t, resource.TestCase{ PreCheck: func() { acc.TestAccPreCheckCloudLogs(t) }, diff --git a/ibm/service/logs/resource_ibm_logs_outgoing_webhook.go b/ibm/service/logs/resource_ibm_logs_outgoing_webhook.go index 55fa0ce936..ff9c4a1045 100644 --- a/ibm/service/logs/resource_ibm_logs_outgoing_webhook.go +++ b/ibm/service/logs/resource_ibm_logs_outgoing_webhook.go @@ -63,6 +63,12 @@ func ResourceIbmLogsOutgoingWebhook() *schema.Resource { Required: true, Description: "The region ID of the selected IBM Event Notifications instance.", }, + "endpoint_type": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The endpoint type of integration.", + }, "source_id": &schema.Schema{ Type: schema.TypeString, Optional: true, @@ -342,6 +348,9 @@ func ResourceIbmLogsOutgoingWebhookMapToOutgoingWebhooksV1IbmEventNotificationsC model := &logsv0.OutgoingWebhooksV1IbmEventNotificationsConfig{} model.EventNotificationsInstanceID = core.UUIDPtr(strfmt.UUID(modelMap["event_notifications_instance_id"].(string))) model.RegionID = core.StringPtr(modelMap["region_id"].(string)) + if modelMap["endpoint_type"] != nil && modelMap["endpoint_type"].(string) != "" { + model.EndpointType = core.StringPtr(modelMap["endpoint_type"].(string)) + } if modelMap["source_id"] != nil && modelMap["source_id"].(string) != "" { model.SourceID = core.StringPtr(modelMap["source_id"].(string)) } @@ -389,6 +398,9 @@ func ResourceIbmLogsOutgoingWebhookOutgoingWebhooksV1IbmEventNotificationsConfig modelMap := make(map[string]interface{}) modelMap["event_notifications_instance_id"] = model.EventNotificationsInstanceID.String() modelMap["region_id"] = *model.RegionID + if model.EndpointType != nil { + modelMap["endpoint_type"] = *model.EndpointType + } if model.SourceID != nil { modelMap["source_id"] = *model.SourceID } diff --git a/ibm/service/logs/resource_ibm_logs_stream.go b/ibm/service/logs/resource_ibm_logs_stream.go new file mode 100644 index 0000000000..5989d16061 --- /dev/null +++ b/ibm/service/logs/resource_ibm_logs_stream.go @@ -0,0 +1,361 @@ +// Copyright IBM Corp. 2024 All Rights Reserved. +// Licensed under the Mozilla Public License v2.0 + +/* + * IBM OpenAPI Terraform Generator Version: 3.96.0-d6dec9d7-20241008-212902 + */ + +package logs + +import ( + "context" + "fmt" + "log" + "strconv" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/IBM-Cloud/terraform-provider-ibm/ibm/conns" + "github.com/IBM-Cloud/terraform-provider-ibm/ibm/flex" + "github.com/IBM-Cloud/terraform-provider-ibm/ibm/validate" + "github.com/IBM/go-sdk-core/v5/core" + "github.com/IBM/logs-go-sdk/logsv0" +) + +func ResourceIbmLogsStream() *schema.Resource { + return &schema.Resource{ + CreateContext: resourceIbmLogsStreamCreate, + ReadContext: resourceIbmLogsStreamRead, + UpdateContext: resourceIbmLogsStreamUpdate, + DeleteContext: resourceIbmLogsStreamDelete, + Importer: &schema.ResourceImporter{}, + + Schema: map[string]*schema.Schema{ + "name": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.InvokeValidator("ibm_logs_stream", "name"), + Description: "The name of the Event stream.", + }, + "is_active": &schema.Schema{ + Type: schema.TypeBool, + Optional: true, + Description: "Whether the Event stream is active.", + }, + "dpxl_expression": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.InvokeValidator("ibm_logs_stream", "dpxl_expression"), + Description: "The DPXL expression of the Event stream.", + }, + "compression_type": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.InvokeValidator("ibm_logs_stream", "compression_type"), + Description: "The compression type of the stream.", + }, + "ibm_event_streams": &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Configuration for IBM Event Streams.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "brokers": &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "The brokers of the IBM Event Streams.", + }, + "topic": &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "The topic of the IBM Event Streams.", + }, + }, + }, + }, + "streams_id": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The ID of the Event stream.", + }, + "created_at": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The creation time of the Event stream.", + }, + "updated_at": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The update time of the Event stream.", + }, + }, + } +} + +func ResourceIbmLogsStreamValidator() *validate.ResourceValidator { + validateSchema := make([]validate.ValidateSchema, 0) + validateSchema = append(validateSchema, + validate.ValidateSchema{ + Identifier: "name", + ValidateFunctionIdentifier: validate.ValidateRegexpLen, + Type: validate.TypeString, + Required: true, + Regexp: `^[a-zA-ZÀ-ÖØ-öø-ÿĀ-ſΑ-ωА-я一-龥ぁ-ゔァ-ヴー々〆〤0-9_\.,\-"{}()\[\]=!:#\/$|' ]+$`, + MinValueLength: 1, + MaxValueLength: 4096, + }, + validate.ValidateSchema{ + Identifier: "dpxl_expression", + ValidateFunctionIdentifier: validate.ValidateRegexpLen, + Type: validate.TypeString, + Required: true, + Regexp: `^[\p{L}\p{N}\p{P}\p{Z}\p{S}\p{M}]+$`, + MinValueLength: 1, + MaxValueLength: 4096, + }, + validate.ValidateSchema{ + Identifier: "compression_type", + ValidateFunctionIdentifier: validate.ValidateAllowedStringValue, + Type: validate.TypeString, + Optional: true, + AllowedValues: "gzip, unspecified", + }, + ) + + resourceValidator := validate.ResourceValidator{ResourceName: "ibm_logs_stream", Schema: validateSchema} + return &resourceValidator +} + +func resourceIbmLogsStreamCreate(context context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + logsClient, err := meta.(conns.ClientSession).LogsV0() + if err != nil { + tfErr := flex.TerraformErrorf(err, err.Error(), "ibm_logs_rule_group", "create") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + + region := getLogsInstanceRegion(logsClient, d) + instanceId := d.Get("instance_id").(string) + logsClient = getClientWithLogsInstanceEndpoint(logsClient, instanceId, region, getLogsInstanceEndpointType(logsClient, d)) + + upsertEventStreamTargetOptions := &logsv0.CreateEventStreamTargetOptions{} + + upsertEventStreamTargetOptions.SetName(d.Get("name").(string)) + upsertEventStreamTargetOptions.SetDpxlExpression(d.Get("dpxl_expression").(string)) + if _, ok := d.GetOk("is_active"); ok { + upsertEventStreamTargetOptions.SetIsActive(d.Get("is_active").(bool)) + } + if _, ok := d.GetOk("compression_type"); ok { + upsertEventStreamTargetOptions.SetCompressionType(d.Get("compression_type").(string)) + } + if _, ok := d.GetOk("ibm_event_streams"); ok { + ibmEventStreamsModel, err := ResourceIbmLogsStreamMapToIbmEventStreams(d.Get("ibm_event_streams.0").(map[string]interface{})) + if err != nil { + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "create", "parse-ibm_event_streams").GetDiag() + } + upsertEventStreamTargetOptions.SetIbmEventStreams(ibmEventStreamsModel) + } + + stream, _, err := logsClient.CreateEventStreamTargetWithContext(context, upsertEventStreamTargetOptions) + if err != nil { + tfErr := flex.TerraformErrorf(err, fmt.Sprintf("CreateEventStreamTargetWithContext failed: %s", err.Error()), "ibm_logs_stream", "create") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + streamsID := fmt.Sprintf("%s/%s/%d", region, instanceId, *stream.ID) + d.SetId(streamsID) + + return resourceIbmLogsStreamRead(context, d, meta) +} + +func resourceIbmLogsStreamRead(context context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + logsClient, err := meta.(conns.ClientSession).LogsV0() + if err != nil { + tfErr := flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "read", "initialize-client") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + logsClient, region, instanceId, streamsID, err := updateClientURLWithInstanceEndpoint(d.Id(), logsClient, d) + if err != nil { + return diag.FromErr(err) + } + streamsIDInt, _ := strconv.ParseInt(streamsID, 10, 64) + + getEventStreamTargetsOptions := &logsv0.GetEventStreamTargetsOptions{} + + // getEventStreamTargetsOptions.SetID(streamsID) + + streamCollection, response, err := logsClient.GetEventStreamTargetsWithContext(context, getEventStreamTargetsOptions) + if err != nil { + if response != nil && response.StatusCode == 404 { + d.SetId("") + return nil + } + tfErr := flex.TerraformErrorf(err, fmt.Sprintf("GetEventStreamTargetsWithContext failed: %s", err.Error()), "ibm_logs_stream", "read") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + if streamCollection != nil { + streamIds := make(map[int64]interface{}, 0) + for _, stream := range streamCollection.Streams { + streamIds[*stream.ID] = nil + if *stream.ID == streamsIDInt { + if err = d.Set("streams_id", streamsID); err != nil { + return diag.FromErr(fmt.Errorf("Error setting rule_group_id: %s", err)) + } + if err = d.Set("instance_id", instanceId); err != nil { + return diag.FromErr(fmt.Errorf("Error setting instance_id: %s", err)) + } + if err = d.Set("region", region); err != nil { + return diag.FromErr(fmt.Errorf("Error setting region: %s", err)) + } + + if err = d.Set("name", stream.Name); err != nil { + err = fmt.Errorf("Error setting name: %s", err) + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "read", "set-name").GetDiag() + } + if !core.IsNil(stream.IsActive) { + if err = d.Set("is_active", stream.IsActive); err != nil { + err = fmt.Errorf("Error setting is_active: %s", err) + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "read", "set-is_active").GetDiag() + } + } + if err = d.Set("dpxl_expression", stream.DpxlExpression); err != nil { + err = fmt.Errorf("Error setting dpxl_expression: %s", err) + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "read", "set-dpxl_expression").GetDiag() + } + if !core.IsNil(stream.CompressionType) { + if err = d.Set("compression_type", stream.CompressionType); err != nil { + err = fmt.Errorf("Error setting compression_type: %s", err) + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "read", "set-compression_type").GetDiag() + } + } + if !core.IsNil(stream.IbmEventStreams) { + ibmEventStreamsMap, err := ResourceIbmLogsStreamIbmEventStreamsToMap(stream.IbmEventStreams) + if err != nil { + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "read", "ibm_event_streams-to-map").GetDiag() + } + if err = d.Set("ibm_event_streams", []map[string]interface{}{ibmEventStreamsMap}); err != nil { + err = fmt.Errorf("Error setting ibm_event_streams: %s", err) + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "read", "set-ibm_event_streams").GetDiag() + } + } + if !core.IsNil(stream.CreatedAt) { + if err = d.Set("created_at", flex.DateTimeToString(stream.CreatedAt)); err != nil { + err = fmt.Errorf("Error setting created_at: %s", err) + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "read", "set-created_at").GetDiag() + } + } + if !core.IsNil(stream.UpdatedAt) { + if err = d.Set("updated_at", flex.DateTimeToString(stream.UpdatedAt)); err != nil { + err = fmt.Errorf("Error setting updated_at: %s", err) + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "read", "set-updated_at").GetDiag() + } + } + } + } + if _, ok := streamIds[streamsIDInt]; !ok { + d.SetId("") + return nil + } + } + + return nil +} + +func resourceIbmLogsStreamUpdate(context context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + logsClient, err := meta.(conns.ClientSession).LogsV0() + if err != nil { + tfErr := flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "update", "initialize-client") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + logsClient, _, _, streamsID, err := updateClientURLWithInstanceEndpoint(d.Id(), logsClient, d) + if err != nil { + return diag.FromErr(err) + } + streamsIDInt, _ := strconv.ParseInt(streamsID, 10, 64) + updateEventStreamTargetOptions := &logsv0.UpdateEventStreamTargetOptions{} + + updateEventStreamTargetOptions.SetID(streamsIDInt) + + hasChange := false + + if d.HasChange("name") || + d.HasChange("dpxl_expression") || + d.HasChange("is_active") || + d.HasChange("compression_type") || + d.HasChange("ibm_event_streams") { + + updateEventStreamTargetOptions.SetName(d.Get("name").(string)) + + updateEventStreamTargetOptions.SetDpxlExpression(d.Get("dpxl_expression").(string)) + + updateEventStreamTargetOptions.SetIsActive(d.Get("is_active").(bool)) + + updateEventStreamTargetOptions.SetCompressionType(d.Get("compression_type").(string)) + + ibmEventStreams, err := ResourceIbmLogsStreamMapToIbmEventStreams(d.Get("ibm_event_streams.0").(map[string]interface{})) + if err != nil { + return flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "update", "parse-ibm_event_streams").GetDiag() + } + updateEventStreamTargetOptions.SetIbmEventStreams(ibmEventStreams) + + hasChange = true + } + + if hasChange { + _, _, err = logsClient.UpdateEventStreamTargetWithContext(context, updateEventStreamTargetOptions) + if err != nil { + tfErr := flex.TerraformErrorf(err, fmt.Sprintf("UpdateEventStreamTargetWithContext failed: %s", err.Error()), "ibm_logs_stream", "update") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + } + + return resourceIbmLogsStreamRead(context, d, meta) +} + +func resourceIbmLogsStreamDelete(context context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + logsClient, err := meta.(conns.ClientSession).LogsV0() + if err != nil { + tfErr := flex.DiscriminatedTerraformErrorf(err, err.Error(), "ibm_logs_stream", "delete", "initialize-client") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + logsClient, _, _, streamsID, err := updateClientURLWithInstanceEndpoint(d.Id(), logsClient, d) + if err != nil { + return diag.FromErr(err) + } + streamsIDInt, _ := strconv.ParseInt(streamsID, 10, 64) + deleteEventStreamTargetOptions := &logsv0.DeleteEventStreamTargetOptions{} + + deleteEventStreamTargetOptions.SetID(streamsIDInt) + + _, err = logsClient.DeleteEventStreamTargetWithContext(context, deleteEventStreamTargetOptions) + if err != nil { + tfErr := flex.TerraformErrorf(err, fmt.Sprintf("DeleteEventStreamTargetWithContext failed: %s", err.Error()), "ibm_logs_stream", "delete") + log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage()) + return tfErr.GetDiag() + } + + d.SetId("") + + return nil +} + +func ResourceIbmLogsStreamMapToIbmEventStreams(modelMap map[string]interface{}) (*logsv0.IbmEventStreams, error) { + model := &logsv0.IbmEventStreams{} + model.Brokers = core.StringPtr(modelMap["brokers"].(string)) + model.Topic = core.StringPtr(modelMap["topic"].(string)) + return model, nil +} + +func ResourceIbmLogsStreamIbmEventStreamsToMap(model *logsv0.IbmEventStreams) (map[string]interface{}, error) { + modelMap := make(map[string]interface{}) + modelMap["brokers"] = *model.Brokers + modelMap["topic"] = *model.Topic + return modelMap, nil +} diff --git a/ibm/service/logs/resource_ibm_logs_stream_test.go b/ibm/service/logs/resource_ibm_logs_stream_test.go new file mode 100644 index 0000000000..3f51d26e25 --- /dev/null +++ b/ibm/service/logs/resource_ibm_logs_stream_test.go @@ -0,0 +1,193 @@ +// Copyright IBM Corp. 2024 All Rights Reserved. +// Licensed under the Mozilla Public License v2.0 + +package logs_test + +import ( + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + acc "github.com/IBM-Cloud/terraform-provider-ibm/ibm/acctest" + "github.com/IBM-Cloud/terraform-provider-ibm/ibm/conns" + "github.com/IBM-Cloud/terraform-provider-ibm/ibm/flex" + "github.com/IBM/logs-go-sdk/logsv0" +) + +func TestAccIbmLogsStreamBasic(t *testing.T) { + var conf logsv0.Stream + name := fmt.Sprintf("tf_name_%d", acctest.RandIntRange(10, 100)) + dpxlExpression := "contains(kubernetes.labels.CX_AZ, 'eu-west-1')" + nameUpdate := fmt.Sprintf("tf_name_%d", acctest.RandIntRange(10, 100)) + dpxlExpressionUpdate := "contains(kubernetes.labels.CX_AZ, 'eu-west-2')" + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheckCloudLogs(t) }, + Providers: acc.TestAccProviders, + CheckDestroy: testAccCheckIbmLogsStreamDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccCheckIbmLogsStreamConfigBasic(name, dpxlExpression), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckIbmLogsStreamExists("ibm_logs_stream.logs_stream_instance", conf), + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "name", name), + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "dpxl_expression", dpxlExpression), + ), + }, + resource.TestStep{ + Config: testAccCheckIbmLogsStreamConfigBasic(nameUpdate, dpxlExpressionUpdate), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "name", nameUpdate), + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "dpxl_expression", dpxlExpressionUpdate), + ), + }, + }, + }) +} + +func TestAccIbmLogsStreamAllArgs(t *testing.T) { + var conf logsv0.Stream + name := fmt.Sprintf("tf_name_%d", acctest.RandIntRange(10, 100)) + isActive := "false" + dpxlExpression := "contains(kubernetes.labels.CX_AZ, 'eu-west-1')" + compressionType := "gzip" + nameUpdate := fmt.Sprintf("tf_name_%d", acctest.RandIntRange(10, 100)) + isActiveUpdate := "true" + dpxlExpressionUpdate := "contains(kubernetes.labels.CX_AZ, 'eu-west-2')" + compressionTypeUpdate := "gzip" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheckCloudLogs(t) }, + Providers: acc.TestAccProviders, + CheckDestroy: testAccCheckIbmLogsStreamDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccCheckIbmLogsStreamConfig(name, isActive, dpxlExpression, compressionType), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckIbmLogsStreamExists("ibm_logs_stream.logs_stream_instance", conf), + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "name", name), + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "is_active", isActive), + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "dpxl_expression", dpxlExpression), + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "compression_type", compressionType), + ), + }, + resource.TestStep{ + Config: testAccCheckIbmLogsStreamConfig(nameUpdate, isActiveUpdate, dpxlExpressionUpdate, compressionTypeUpdate), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "name", nameUpdate), + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "is_active", isActiveUpdate), + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "dpxl_expression", dpxlExpressionUpdate), + resource.TestCheckResourceAttr("ibm_logs_stream.logs_stream_instance", "compression_type", compressionTypeUpdate), + ), + }, + resource.TestStep{ + ResourceName: "ibm_logs_stream.logs_stream_instance", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckIbmLogsStreamConfigBasic(name string, dpxlExpression string) string { + return fmt.Sprintf(` + resource "ibm_logs_stream" "logs_stream_instance" { + instance_id = "%s" + region = "%s" + name = "%s" + dpxl_expression = "%s" + compression_type = "gzip" + ibm_event_streams { + brokers = "kafka01.example.com:9093" + topic = "live.screen" + } + } + `, acc.LogsInstanceId, acc.LogsInstanceRegion, name, dpxlExpression) +} + +func testAccCheckIbmLogsStreamConfig(name string, isActive string, dpxlExpression string, compressionType string) string { + return fmt.Sprintf(` + + resource "ibm_logs_stream" "logs_stream_instance" { + instance_id = "%s" + region = "%s" + name = "%s" + is_active = %s + dpxl_expression = "%s" + compression_type = "%s" + ibm_event_streams { + brokers = "kafka01.example.com:9093" + topic = "live.screen" + } + } + `, acc.LogsInstanceId, acc.LogsInstanceRegion, name, isActive, dpxlExpression, compressionType) +} + +func testAccCheckIbmLogsStreamExists(n string, obj logsv0.Stream) resource.TestCheckFunc { + + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + logsClient, err := acc.TestAccProvider.Meta().(conns.ClientSession).LogsV0() + if err != nil { + return err + } + logsClient = getTestClientWithLogsInstanceEndpoint(logsClient) + + resourceID, err := flex.IdParts(rs.Primary.ID) + if err != nil { + return err + } + streamsIDInt, _ := strconv.ParseInt(resourceID[2], 10, 64) + + getEventStreamTargetsOptions := &logsv0.GetEventStreamTargetsOptions{} + + stream, _, err := logsClient.GetEventStreamTargets(getEventStreamTargetsOptions) + if err != nil { + return err + } + for _, stream := range stream.Streams { + if stream.ID == &streamsIDInt { + obj = stream + return nil + } + } + return nil + } +} + +func testAccCheckIbmLogsStreamDestroy(s *terraform.State) error { + logsClient, err := acc.TestAccProvider.Meta().(conns.ClientSession).LogsV0() + if err != nil { + return err + } + logsClient = getTestClientWithLogsInstanceEndpoint(logsClient) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "ibm_logs_stream" { + continue + } + + getEventStreamTargetsOptions := &logsv0.GetEventStreamTargetsOptions{} + resourceID, err := flex.IdParts(rs.Primary.ID) + if err != nil { + return err + } + streamsIDInt, _ := strconv.ParseInt(resourceID[2], 10, 64) + // Try to find the key + streams, _, _ := logsClient.GetEventStreamTargets(getEventStreamTargetsOptions) + for _, stream := range streams.Streams { + if stream.ID == &streamsIDInt { + return fmt.Errorf("logs_streams still exists: %s", rs.Primary.ID) + } + } + } + + return nil +} diff --git a/website/docs/d/logs_outgoing_webhook.html.markdown b/website/docs/d/logs_outgoing_webhook.html.markdown index 05bca20040..e8828428ef 100644 --- a/website/docs/d/logs_outgoing_webhook.html.markdown +++ b/website/docs/d/logs_outgoing_webhook.html.markdown @@ -45,7 +45,8 @@ Nested schema for **ibm_event_notifications**: * Constraints: The maximum length is `36` characters. The minimum length is `36` characters. The value must match regular expression `/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/`. * `region_id` - (String) The region ID of the selected IBM Event Notifications instance. * Constraints: The maximum length is `4096` characters. The minimum length is `4` characters. The value must match regular expression `/^[a-z]{2}-[a-z]+$/`. - * `source_id` - (String) The ID of the created source in the IBM Event Notifications instance. Corresponds to the Cloud Logs instance crn. Not required when creating an Outbound Integration. + * `endpoint_type` - (String) The endpoint type of integration. + * `source_id` - (String) The ID of the created source in the IBM Event Notifications instance. Corresponds to the Cloud Logs instance crn. Not required when creating an Outbound Integration. * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. * `source_name` - (String) The name of the created source in the IBM Event Notifications instance. Not required when creating an Outbound Integration. * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. diff --git a/website/docs/d/logs_stream.html.markdown b/website/docs/d/logs_stream.html.markdown new file mode 100644 index 0000000000..8f865188be --- /dev/null +++ b/website/docs/d/logs_stream.html.markdown @@ -0,0 +1,51 @@ +--- +layout: "ibm" +page_title: "IBM : ibm_logs_stream" +description: |- + Get information about logs_stream +subcategory: "Cloud Logs" +--- + +# ibm_logs_stream + +Provides a read-only data source to retrieve information about a logs_stream. You can then reference the fields of the data source in other resources within the same configuration by using interpolation syntax. + +## Example Usage + +```hcl +data "ibm_logs_stream" "logs_stream" { + instance_id = ibm_logs_stream.logs_stream_instance.instance_id + region = ibm_logs_stream.logs_stream_instance.region + logs_streams_id = ibm_logs_stream.logs_stream_instance.streams_id +} +``` + +## Argument Reference + +You can specify the following arguments for this data source. + +* `instance_id` - (Required, String) Cloud Logs Instance GUID. +* `region` - (Optional, String) Cloud Logs Instance Region. +* `logs_streams_id` - (Required, String) Streams ID. + +## Attribute Reference + +After your data source is created, you can read values from the following attributes. + +* `id` - The unique identifier of the logs_stream. +* `compression_type` - (String) The compression type of the stream. + * Constraints: Allowable values are: `unspecified`, `gzip`. +* `created_at` - (String) The creation time of the Event stream. +* `dpxl_expression` - (String) The DPXL expression of the Event stream. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. +* `ibm_event_streams` - (List) Configuration for IBM Event Streams. +Nested schema for **ibm_event_streams**: + * `brokers` - (String) The brokers of the IBM Event Streams. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. + * `topic` - (String) The topic of the IBM Event Streams. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. +* `is_active` - (Boolean) Whether the Event stream is active. +* `name` - (String) The name of the Event stream. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. +* `updated_at` - (String) The update time of the Event stream. + diff --git a/website/docs/d/logs_streams.html.markdown b/website/docs/d/logs_streams.html.markdown new file mode 100644 index 0000000000..c58ead6501 --- /dev/null +++ b/website/docs/d/logs_streams.html.markdown @@ -0,0 +1,53 @@ +--- +layout: "ibm" +page_title: "IBM : ibm_logs_streams" +description: |- + Get information about logs_streams +subcategory: "Cloud Logs" +--- + +# ibm_logs_streams + +Provides a read-only data source to retrieve information about logs_streams. You can then reference the fields of the data source in other resources within the same configuration by using interpolation syntax. + +## Example Usage + +```hcl +data "ibm_logs_streams" "logs_streams" { + instance_id = ibm_resource_instance.logs_instance.guid + region = ibm_resource_instance.logs_instance.location +} +``` + +## Argument Reference + +You can specify the following arguments for this resource. +* `instance_id` - (Required, Forces new resource, String) Cloud Logs Instance GUID. +* `region` - (Optional, Forces new resource, String) Cloud Logs Instance Region. + +## Attribute Reference + +After your data source is created, you can read values from the following attributes. + +* `id` - The unique identifier of the logs_streams. +* `streams` - (List) Collection of Event Streams. + * Constraints: The maximum length is `4096` items. The minimum length is `0` items. +Nested schema for **streams**: + * `compression_type` - (String) The compression type of the stream. + * Constraints: Allowable values are: `unspecified`, `gzip`. + * `created_at` - (String) The creation time of the Event stream. + * `dpxl_expression` - (String) The DPXL expression of the Event stream. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. + * `ibm_event_streams` - (List) Configuration for IBM Event Streams. + Nested schema for **ibm_event_streams**: + * `brokers` - (String) The brokers of the IBM Event Streams. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. + * `topic` - (String) The topic of the IBM Event Streams. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. + * `id` - (Integer) The ID of the Event stream. + * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * `is_active` - (Boolean) Whether the Event stream is active. + * `name` - (String) The name of the Event stream. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. + * `updated_at` - (String) The update time of the Event stream. + diff --git a/website/docs/r/logs_alert.html.markdown b/website/docs/r/logs_alert.html.markdown index 5b19c2b95f..4cedb00dfc 100644 --- a/website/docs/r/logs_alert.html.markdown +++ b/website/docs/r/logs_alert.html.markdown @@ -6,12 +6,12 @@ description: |- subcategory: "Cloud Logs" --- - # ibm_logs_alert Create, update, and delete logs_alerts with this resource. ## Example Usage + ```hcl resource "ibm_resource_instance" "logs_instance" { name = "logs-instance" @@ -69,13 +69,19 @@ Nested schema for **active_when**: * `end` - (Required, List) Start time. Nested schema for **end**: * `hours` - (Optional, Integer) Hours of the day. + * Constraints: The maximum value is `24`. * `minutes` - (Optional, Integer) Minutes of the hour. + * Constraints: The maximum value is `59`. * `seconds` - (Optional, Integer) Seconds of the minute. + * Constraints: The maximum value is `59`. * `start` - (Required, List) Start time. Nested schema for **start**: * `hours` - (Optional, Integer) Hours of the day. + * Constraints: The maximum value is `24`. * `minutes` - (Optional, Integer) Minutes of the hour. + * Constraints: The maximum value is `59`. * `seconds` - (Optional, Integer) Seconds of the minute. + * Constraints: The maximum value is `59`. * `condition` - (Required, List) Alert condition. Nested schema for **condition**: * `flow` - (Optional, List) Condition for flow alert. @@ -84,35 +90,35 @@ Nested schema for **condition**: * `parameters` - (Optional, List) The Less than alert condition parameters. Nested schema for **parameters**: * `cardinality_fields` - (Optional, List) Cardinality fields for unique count alert. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `4096` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `4096` items. The minimum length is `0` items. * `group_by` - (Optional, List) The group by fields for the alert condition. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `3` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `3` items. The minimum length is `0` items. * `ignore_infinity` - (Optional, Boolean) Should the evaluation ignore infinity value. * `metric_alert_parameters` - (Optional, List) The lucene metric alert parameters if it is a lucene metric alert. Nested schema for **metric_alert_parameters**: * `arithmetic_operator` - (Required, String) The arithmetic operator of the metric promql alert. * Constraints: Allowable values are: `avg_or_unspecified`, `min`, `max`, `sum`, `count`, `percentile`. * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator modifier of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `metric_field` - (Required, String) The metric field of the metric alert. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `metric_source` - (Required, String) The metric source of the metric alert. * Constraints: Allowable values are: `logs2metrics_or_unspecified`, `prometheus`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `sample_threshold_percentage` - (Optional, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `metric_alert_promql_parameters` - (Optional, List) The promql metric alert parameters if is is a promql metric alert. Nested schema for **metric_alert_promql_parameters**: * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `promql_text` - (Required, String) The promql text of the metric alert by fields for the alert condition. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `sample_threshold_percentage` - (Required, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `related_extended_data` - (Optional, List) Deadman configuration. Nested schema for **related_extended_data**: @@ -121,7 +127,7 @@ Nested schema for **condition**: * `should_trigger_deadman` - (Optional, Boolean) Should we trigger deadman. * `relative_timeframe` - (Optional, String) The relative timeframe for time relative alerts. * Constraints: Allowable values are: `hour_or_unspecified`, `day`, `week`, `month`. - * `threshold` - (Required, Float) The threshold for the alert condition. + * `threshold` - (Optional, Float) The threshold for the alert condition. * `timeframe` - (Required, String) The timeframe for the alert condition. * Constraints: Allowable values are: `timeframe_5_min_or_unspecified`, `timeframe_10_min`, `timeframe_20_min`, `timeframe_30_min`, `timeframe_1_h`, `timeframe_2_h`, `timeframe_3_h`, `timeframe_4_h`, `timeframe_6_h`, `timeframe_12_h`, `timeframe_24_h`, `timeframe_48_h`, `timeframe_72_h`, `timeframe_1_w`, `timeframe_1_m`, `timeframe_2_m`, `timeframe_3_m`, `timeframe_15_min`, `timeframe_1_min`, `timeframe_2_min`, `timeframe_36_h`. * `stages` - (Optional, List) The Flow alert condition parameters. @@ -138,14 +144,14 @@ Nested schema for **condition**: * Constraints: The maximum length is `4096` items. The minimum length is `0` items. Nested schema for **values**: * `id` - (Optional, String) The alert ID. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `not` - (Optional, Boolean) The alert not. * `next_op` - (Optional, String) Operator for the alerts. * Constraints: Allowable values are: `and`, `or`. * `timeframe` - (Optional, List) Timeframe for the flow. Nested schema for **timeframe**: * `ms` - (Optional, Integer) Timeframe in milliseconds. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `immediate` - (Optional, List) Condition for immediate standard alert. Nested schema for **immediate**: * `less_than` - (Optional, List) Condition for less than alert. @@ -153,35 +159,35 @@ Nested schema for **condition**: * `parameters` - (Required, List) The Less than alert condition parameters. Nested schema for **parameters**: * `cardinality_fields` - (Optional, List) Cardinality fields for unique count alert. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `4096` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `4096` items. The minimum length is `0` items. * `group_by` - (Optional, List) The group by fields for the alert condition. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `3` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `3` items. The minimum length is `0` items. * `ignore_infinity` - (Optional, Boolean) Should the evaluation ignore infinity value. * `metric_alert_parameters` - (Optional, List) The lucene metric alert parameters if it is a lucene metric alert. Nested schema for **metric_alert_parameters**: * `arithmetic_operator` - (Required, String) The arithmetic operator of the metric promql alert. * Constraints: Allowable values are: `avg_or_unspecified`, `min`, `max`, `sum`, `count`, `percentile`. * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator modifier of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `metric_field` - (Required, String) The metric field of the metric alert. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `metric_source` - (Required, String) The metric source of the metric alert. * Constraints: Allowable values are: `logs2metrics_or_unspecified`, `prometheus`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `sample_threshold_percentage` - (Optional, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `metric_alert_promql_parameters` - (Optional, List) The promql metric alert parameters if is is a promql metric alert. Nested schema for **metric_alert_promql_parameters**: * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `promql_text` - (Required, String) The promql text of the metric alert by fields for the alert condition. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `sample_threshold_percentage` - (Required, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `related_extended_data` - (Optional, List) Deadman configuration. Nested schema for **related_extended_data**: @@ -190,7 +196,7 @@ Nested schema for **condition**: * `should_trigger_deadman` - (Optional, Boolean) Should we trigger deadman. * `relative_timeframe` - (Optional, String) The relative timeframe for time relative alerts. * Constraints: Allowable values are: `hour_or_unspecified`, `day`, `week`, `month`. - * `threshold` - (Required, Float) The threshold for the alert condition. + * `threshold` - (Optional, Float) The threshold for the alert condition. * `timeframe` - (Required, String) The timeframe for the alert condition. * Constraints: Allowable values are: `timeframe_5_min_or_unspecified`, `timeframe_10_min`, `timeframe_20_min`, `timeframe_30_min`, `timeframe_1_h`, `timeframe_2_h`, `timeframe_3_h`, `timeframe_4_h`, `timeframe_6_h`, `timeframe_12_h`, `timeframe_24_h`, `timeframe_48_h`, `timeframe_72_h`, `timeframe_1_w`, `timeframe_1_m`, `timeframe_2_m`, `timeframe_3_m`, `timeframe_15_min`, `timeframe_1_min`, `timeframe_2_min`, `timeframe_36_h`. * `less_than_usual` - (Optional, List) Condition for less than usual alert. @@ -198,35 +204,35 @@ Nested schema for **condition**: * `parameters` - (Required, List) The Less than alert condition parameters. Nested schema for **parameters**: * `cardinality_fields` - (Optional, List) Cardinality fields for unique count alert. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `4096` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `4096` items. The minimum length is `0` items. * `group_by` - (Optional, List) The group by fields for the alert condition. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `3` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `3` items. The minimum length is `0` items. * `ignore_infinity` - (Optional, Boolean) Should the evaluation ignore infinity value. * `metric_alert_parameters` - (Optional, List) The lucene metric alert parameters if it is a lucene metric alert. Nested schema for **metric_alert_parameters**: * `arithmetic_operator` - (Required, String) The arithmetic operator of the metric promql alert. * Constraints: Allowable values are: `avg_or_unspecified`, `min`, `max`, `sum`, `count`, `percentile`. * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator modifier of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `metric_field` - (Required, String) The metric field of the metric alert. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `metric_source` - (Required, String) The metric source of the metric alert. * Constraints: Allowable values are: `logs2metrics_or_unspecified`, `prometheus`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `sample_threshold_percentage` - (Optional, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `metric_alert_promql_parameters` - (Optional, List) The promql metric alert parameters if is is a promql metric alert. Nested schema for **metric_alert_promql_parameters**: * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `promql_text` - (Required, String) The promql text of the metric alert by fields for the alert condition. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `sample_threshold_percentage` - (Required, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `related_extended_data` - (Optional, List) Deadman configuration. Nested schema for **related_extended_data**: @@ -235,7 +241,7 @@ Nested schema for **condition**: * `should_trigger_deadman` - (Optional, Boolean) Should we trigger deadman. * `relative_timeframe` - (Optional, String) The relative timeframe for time relative alerts. * Constraints: Allowable values are: `hour_or_unspecified`, `day`, `week`, `month`. - * `threshold` - (Required, Float) The threshold for the alert condition. + * `threshold` - (Optional, Float) The threshold for the alert condition. * `timeframe` - (Required, String) The timeframe for the alert condition. * Constraints: Allowable values are: `timeframe_5_min_or_unspecified`, `timeframe_10_min`, `timeframe_20_min`, `timeframe_30_min`, `timeframe_1_h`, `timeframe_2_h`, `timeframe_3_h`, `timeframe_4_h`, `timeframe_6_h`, `timeframe_12_h`, `timeframe_24_h`, `timeframe_48_h`, `timeframe_72_h`, `timeframe_1_w`, `timeframe_1_m`, `timeframe_2_m`, `timeframe_3_m`, `timeframe_15_min`, `timeframe_1_min`, `timeframe_2_min`, `timeframe_36_h`. * `more_than` - (Optional, List) Condition for more than alert. @@ -245,35 +251,35 @@ Nested schema for **condition**: * `parameters` - (Required, List) The Less than alert condition parameters. Nested schema for **parameters**: * `cardinality_fields` - (Optional, List) Cardinality fields for unique count alert. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `4096` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `4096` items. The minimum length is `0` items. * `group_by` - (Optional, List) The group by fields for the alert condition. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `3` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `3` items. The minimum length is `0` items. * `ignore_infinity` - (Optional, Boolean) Should the evaluation ignore infinity value. * `metric_alert_parameters` - (Optional, List) The lucene metric alert parameters if it is a lucene metric alert. Nested schema for **metric_alert_parameters**: * `arithmetic_operator` - (Required, String) The arithmetic operator of the metric promql alert. * Constraints: Allowable values are: `avg_or_unspecified`, `min`, `max`, `sum`, `count`, `percentile`. * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator modifier of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `metric_field` - (Required, String) The metric field of the metric alert. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `metric_source` - (Required, String) The metric source of the metric alert. * Constraints: Allowable values are: `logs2metrics_or_unspecified`, `prometheus`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `sample_threshold_percentage` - (Optional, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `metric_alert_promql_parameters` - (Optional, List) The promql metric alert parameters if is is a promql metric alert. Nested schema for **metric_alert_promql_parameters**: * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `promql_text` - (Required, String) The promql text of the metric alert by fields for the alert condition. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `sample_threshold_percentage` - (Required, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `related_extended_data` - (Optional, List) Deadman configuration. Nested schema for **related_extended_data**: @@ -282,7 +288,7 @@ Nested schema for **condition**: * `should_trigger_deadman` - (Optional, Boolean) Should we trigger deadman. * `relative_timeframe` - (Optional, String) The relative timeframe for time relative alerts. * Constraints: Allowable values are: `hour_or_unspecified`, `day`, `week`, `month`. - * `threshold` - (Required, Float) The threshold for the alert condition. + * `threshold` - (Optional, Float) The threshold for the alert condition. * `timeframe` - (Required, String) The timeframe for the alert condition. * Constraints: Allowable values are: `timeframe_5_min_or_unspecified`, `timeframe_10_min`, `timeframe_20_min`, `timeframe_30_min`, `timeframe_1_h`, `timeframe_2_h`, `timeframe_3_h`, `timeframe_4_h`, `timeframe_6_h`, `timeframe_12_h`, `timeframe_24_h`, `timeframe_48_h`, `timeframe_72_h`, `timeframe_1_w`, `timeframe_1_m`, `timeframe_2_m`, `timeframe_3_m`, `timeframe_15_min`, `timeframe_1_min`, `timeframe_2_min`, `timeframe_36_h`. * `more_than_usual` - (Optional, List) Condition for more than usual alert. @@ -290,35 +296,35 @@ Nested schema for **condition**: * `parameters` - (Required, List) The Less than alert condition parameters. Nested schema for **parameters**: * `cardinality_fields` - (Optional, List) Cardinality fields for unique count alert. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `4096` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `4096` items. The minimum length is `0` items. * `group_by` - (Optional, List) The group by fields for the alert condition. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `3` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `3` items. The minimum length is `0` items. * `ignore_infinity` - (Optional, Boolean) Should the evaluation ignore infinity value. * `metric_alert_parameters` - (Optional, List) The lucene metric alert parameters if it is a lucene metric alert. Nested schema for **metric_alert_parameters**: * `arithmetic_operator` - (Required, String) The arithmetic operator of the metric promql alert. * Constraints: Allowable values are: `avg_or_unspecified`, `min`, `max`, `sum`, `count`, `percentile`. * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator modifier of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `metric_field` - (Required, String) The metric field of the metric alert. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `metric_source` - (Required, String) The metric source of the metric alert. * Constraints: Allowable values are: `logs2metrics_or_unspecified`, `prometheus`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `sample_threshold_percentage` - (Optional, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `metric_alert_promql_parameters` - (Optional, List) The promql metric alert parameters if is is a promql metric alert. Nested schema for **metric_alert_promql_parameters**: * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `promql_text` - (Required, String) The promql text of the metric alert by fields for the alert condition. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `sample_threshold_percentage` - (Required, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `related_extended_data` - (Optional, List) Deadman configuration. Nested schema for **related_extended_data**: @@ -327,7 +333,7 @@ Nested schema for **condition**: * `should_trigger_deadman` - (Optional, Boolean) Should we trigger deadman. * `relative_timeframe` - (Optional, String) The relative timeframe for time relative alerts. * Constraints: Allowable values are: `hour_or_unspecified`, `day`, `week`, `month`. - * `threshold` - (Required, Float) The threshold for the alert condition. + * `threshold` - (Optional, Float) The threshold for the alert condition. * `timeframe` - (Required, String) The timeframe for the alert condition. * Constraints: Allowable values are: `timeframe_5_min_or_unspecified`, `timeframe_10_min`, `timeframe_20_min`, `timeframe_30_min`, `timeframe_1_h`, `timeframe_2_h`, `timeframe_3_h`, `timeframe_4_h`, `timeframe_6_h`, `timeframe_12_h`, `timeframe_24_h`, `timeframe_48_h`, `timeframe_72_h`, `timeframe_1_w`, `timeframe_1_m`, `timeframe_2_m`, `timeframe_3_m`, `timeframe_15_min`, `timeframe_1_min`, `timeframe_2_min`, `timeframe_36_h`. * `new_value` - (Optional, List) Condition for new value alert. @@ -335,35 +341,35 @@ Nested schema for **condition**: * `parameters` - (Required, List) The Less than alert condition parameters. Nested schema for **parameters**: * `cardinality_fields` - (Optional, List) Cardinality fields for unique count alert. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `4096` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `4096` items. The minimum length is `0` items. * `group_by` - (Optional, List) The group by fields for the alert condition. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `3` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `3` items. The minimum length is `0` items. * `ignore_infinity` - (Optional, Boolean) Should the evaluation ignore infinity value. * `metric_alert_parameters` - (Optional, List) The lucene metric alert parameters if it is a lucene metric alert. Nested schema for **metric_alert_parameters**: * `arithmetic_operator` - (Required, String) The arithmetic operator of the metric promql alert. * Constraints: Allowable values are: `avg_or_unspecified`, `min`, `max`, `sum`, `count`, `percentile`. * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator modifier of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `metric_field` - (Required, String) The metric field of the metric alert. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `metric_source` - (Required, String) The metric source of the metric alert. * Constraints: Allowable values are: `logs2metrics_or_unspecified`, `prometheus`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `sample_threshold_percentage` - (Optional, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `metric_alert_promql_parameters` - (Optional, List) The promql metric alert parameters if is is a promql metric alert. Nested schema for **metric_alert_promql_parameters**: * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `promql_text` - (Required, String) The promql text of the metric alert by fields for the alert condition. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `sample_threshold_percentage` - (Required, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `related_extended_data` - (Optional, List) Deadman configuration. Nested schema for **related_extended_data**: @@ -372,7 +378,7 @@ Nested schema for **condition**: * `should_trigger_deadman` - (Optional, Boolean) Should we trigger deadman. * `relative_timeframe` - (Optional, String) The relative timeframe for time relative alerts. * Constraints: Allowable values are: `hour_or_unspecified`, `day`, `week`, `month`. - * `threshold` - (Required, Float) The threshold for the alert condition. + * `threshold` - (Optional, Float) The threshold for the alert condition. * `timeframe` - (Required, String) The timeframe for the alert condition. * Constraints: Allowable values are: `timeframe_5_min_or_unspecified`, `timeframe_10_min`, `timeframe_20_min`, `timeframe_30_min`, `timeframe_1_h`, `timeframe_2_h`, `timeframe_3_h`, `timeframe_4_h`, `timeframe_6_h`, `timeframe_12_h`, `timeframe_24_h`, `timeframe_48_h`, `timeframe_72_h`, `timeframe_1_w`, `timeframe_1_m`, `timeframe_2_m`, `timeframe_3_m`, `timeframe_15_min`, `timeframe_1_min`, `timeframe_2_min`, `timeframe_36_h`. * `unique_count` - (Optional, List) Condition for unique count alert. @@ -380,35 +386,35 @@ Nested schema for **condition**: * `parameters` - (Required, List) The Less than alert condition parameters. Nested schema for **parameters**: * `cardinality_fields` - (Optional, List) Cardinality fields for unique count alert. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `4096` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `4096` items. The minimum length is `0` items. * `group_by` - (Optional, List) The group by fields for the alert condition. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `3` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `3` items. The minimum length is `0` items. * `ignore_infinity` - (Optional, Boolean) Should the evaluation ignore infinity value. * `metric_alert_parameters` - (Optional, List) The lucene metric alert parameters if it is a lucene metric alert. Nested schema for **metric_alert_parameters**: * `arithmetic_operator` - (Required, String) The arithmetic operator of the metric promql alert. * Constraints: Allowable values are: `avg_or_unspecified`, `min`, `max`, `sum`, `count`, `percentile`. * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator modifier of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `metric_field` - (Required, String) The metric field of the metric alert. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `metric_source` - (Required, String) The metric source of the metric alert. * Constraints: Allowable values are: `logs2metrics_or_unspecified`, `prometheus`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `sample_threshold_percentage` - (Optional, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `metric_alert_promql_parameters` - (Optional, List) The promql metric alert parameters if is is a promql metric alert. Nested schema for **metric_alert_promql_parameters**: * `arithmetic_operator_modifier` - (Optional, Integer) The arithmetic operator of the metric promql alert. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `non_null_percentage` - (Optional, Integer) Non null percentage of the evaluation. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `promql_text` - (Required, String) The promql text of the metric alert by fields for the alert condition. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `sample_threshold_percentage` - (Required, Integer) The threshold percentage. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `swap_null_values` - (Optional, Boolean) Should we swap null values with zero. * `related_extended_data` - (Optional, List) Deadman configuration. Nested schema for **related_extended_data**: @@ -417,76 +423,79 @@ Nested schema for **condition**: * `should_trigger_deadman` - (Optional, Boolean) Should we trigger deadman. * `relative_timeframe` - (Optional, String) The relative timeframe for time relative alerts. * Constraints: Allowable values are: `hour_or_unspecified`, `day`, `week`, `month`. - * `threshold` - (Required, Float) The threshold for the alert condition. + * `threshold` - (Optional, Float) The threshold for the alert condition. * `timeframe` - (Required, String) The timeframe for the alert condition. * Constraints: Allowable values are: `timeframe_5_min_or_unspecified`, `timeframe_10_min`, `timeframe_20_min`, `timeframe_30_min`, `timeframe_1_h`, `timeframe_2_h`, `timeframe_3_h`, `timeframe_4_h`, `timeframe_6_h`, `timeframe_12_h`, `timeframe_24_h`, `timeframe_48_h`, `timeframe_72_h`, `timeframe_1_w`, `timeframe_1_m`, `timeframe_2_m`, `timeframe_3_m`, `timeframe_15_min`, `timeframe_1_min`, `timeframe_2_min`, `timeframe_36_h`. * `description` - (Optional, String) Alert description. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[A-Za-z0-9_\\-\\s]+$/`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}0-9_\\-\\s]+$/`. * `expiration` - (Optional, List) Alert expiration date. Nested schema for **expiration**: * `day` - (Optional, Integer) Day of the month. + * Constraints: The maximum value is `31`. The minimum value is `1`. * `month` - (Optional, Integer) Month of the year. + * Constraints: The maximum value is `12`. The minimum value is `1`. * `year` - (Optional, Integer) Year. -* `filters` - (Required, List) Alert filters. + * Constraints: The maximum value is `2147483647`. +* `filters` - (Optional, List) Alert filters. Nested schema for **filters**: * `alias` - (Optional, String) The alias of the filter. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `filter_type` - (Optional, String) The type of the filter. * Constraints: Allowable values are: `text_or_unspecified`, `template`, `ratio`, `unique_count`, `time_relative`, `metric`, `flow`. * `metadata` - (Optional, List) The metadata filters. Nested schema for **metadata**: * `applications` - (Optional, List) The applications to filter. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `100` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `100` items. The minimum length is `0` items. * `subsystems` - (Optional, List) The subsystems to filter. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `100` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `100` items. The minimum length is `0` items. * `ratio_alerts` - (Optional, List) The ratio alerts. * Constraints: The maximum length is `4096` items. The minimum length is `0` items. Nested schema for **ratio_alerts**: * `alias` - (Required, String) The alias of the filter. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `applications` - (Optional, List) The applications to filter. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `100` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `100` items. The minimum length is `0` items. * `group_by` - (Optional, List) The group by fields. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `100` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `100` items. The minimum length is `0` items. * `severities` - (Optional, List) The severities to filter. * Constraints: Allowable list items are: `debug_or_unspecified`, `verbose`, `info`, `warning`, `error`, `critical`. The maximum length is `4096` items. The minimum length is `0` items. * `subsystems` - (Optional, List) The subsystems to filter. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `100` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `100` items. The minimum length is `0` items. * `text` - (Optional, String) The text to filter. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `severities` - (Optional, List) The severity of the logs to filter. * Constraints: Allowable list items are: `debug_or_unspecified`, `verbose`, `info`, `warning`, `error`, `critical`. The maximum length is `4096` items. The minimum length is `0` items. * `text` - (Optional, String) The text to filter. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `incident_settings` - (Optional, List) Incident settings, will create the incident based on this configuration. Nested schema for **incident_settings**: * `notify_on` - (Optional, String) Notify on setting. * Constraints: Allowable values are: `triggered_only`, `triggered_and_resolved`. * `retriggering_period_seconds` - (Optional, Integer) The retriggering period of the alert in seconds. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `use_as_notification_settings` - (Optional, Boolean) Use these settings for all notificaion webhook. * `is_active` - (Required, Boolean) Alert is active. * `meta_labels` - (Optional, List) The Meta labels to add to the alert. * Constraints: The maximum length is `200` items. The minimum length is `0` items. Nested schema for **meta_labels**: * `key` - (Optional, String) The key of the label. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `value` - (Optional, String) The value of the label. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. * `meta_labels_strings` - (Optional, List) The Meta labels to add to the alert as string with ':' separator. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `4096` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `4096` items. The minimum length is `0` items. * `name` - (Required, String) Alert name. - * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. -* `notification_groups` - (Required, List) Alert notification groups. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. +* `notification_groups` - (Optional, List) Alert notification groups. * Constraints: The maximum length is `10` items. The minimum length is `1` item. Nested schema for **notification_groups**: * `group_by_fields` - (Optional, List) Group by fields to group the values by. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `20` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `20` items. The minimum length is `0` items. * `notifications` - (Optional, List) Webhook target settings for the the notification. * Constraints: The maximum length is `20` items. The minimum length is `0` items. Nested schema for **notifications**: * `integration_id` - (Optional, Integer) Integration ID. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `notify_on` - (Optional, String) Notify on setting. * Constraints: Allowable values are: `triggered_only`, `triggered_and_resolved`. * `recipients` - (Optional, List) Recipients. @@ -494,9 +503,9 @@ Nested schema for **notification_groups**: * `emails` - (Optional, List) Email addresses. * Constraints: The list items must match regular expression `/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$/`. The maximum length is `20` items. The minimum length is `0` items. * `retriggering_period_seconds` - (Optional, Integer) Retriggering period of the alert in seconds. - * Constraints: The maximum value is `4294967295`. The minimum value is `0`. + * Constraints: The maximum value is `4294967295`. * `notification_payload_filters` - (Optional, List) JSON keys to include in the alert notification, if left empty get the full log text in the alert notification. - * Constraints: The list items must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. The maximum length is `100` items. The minimum length is `0` items. + * Constraints: The list items must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. The maximum length is `100` items. The minimum length is `0` items. * `severity` - (Required, String) Alert severity. * Constraints: Allowable values are: `info_or_unspecified`, `warning`, `critical`, `error`. diff --git a/website/docs/r/logs_outgoing_webhook.html.markdown b/website/docs/r/logs_outgoing_webhook.html.markdown index d70478e743..4957b45b24 100644 --- a/website/docs/r/logs_outgoing_webhook.html.markdown +++ b/website/docs/r/logs_outgoing_webhook.html.markdown @@ -39,6 +39,7 @@ Nested schema for **ibm_event_notifications**: * Constraints: The maximum length is `36` characters. The minimum length is `36` characters. The value must match regular expression `/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/`. * `region_id` - (Required, String) The region ID of the selected IBM Event Notifications instance. * Constraints: The maximum length is `4096` characters. The minimum length is `4` characters. The value must match regular expression `/^[a-z]{2}-[a-z]+$/`. + * `endpoint_type` - (Optional, String) The endpoint type of integration. Allowed values: `private` and `public`. Default is `public`. * `source_id` - (Optional, String) The ID of the created source in the IBM Event Notifications instance. Corresponds to the Cloud Logs instance crn. Not required when creating an Outbound Integration. * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$`. * `source_name` - (Optional, String) The name of the created source in the IBM Event Notifications instance. Not required when creating an Outbound Integration. diff --git a/website/docs/r/logs_stream.html.markdown b/website/docs/r/logs_stream.html.markdown new file mode 100644 index 0000000000..c40ba3cd3e --- /dev/null +++ b/website/docs/r/logs_stream.html.markdown @@ -0,0 +1,66 @@ +--- +layout: "ibm" +page_title: "IBM : ibm_logs_stream" +description: |- + Manages logs_stream. +subcategory: "Cloud Logs" +--- + +# ibm_logs_stream + +Create, update, and delete logs_streams with this resource. + +## Example Usage + +```hcl +resource "ibm_logs_stream" "logs_stream_instance" { + instance_id = ibm_resource_instance.logs_instance.guid + region = ibm_resource_instance.logs_instance.location + compression_type = "gzip" + is_active = true + dpxl_expression = "contains(kubernetes.labels.CX_AZ, 'eu-west-1')" + ibm_event_streams { + brokers = "kafka01.example.com:9093" + topic = "live.screen.v2" + } + name = "Live Screen" +} +``` + +## Argument Reference + +You can specify the following arguments for this resource. +* `instance_id` - (Required, Forces new resource, String) Cloud Logs Instance GUID. +* `region` - (Optional, Forces new resource, String) Cloud Logs Instance Region. +* `compression_type` - (Optional, String) The compression type of the stream. + * Constraints: Allowable values are: `unspecified`, `gzip`. +* `dpxl_expression` - (Required, String) The DPXL expression of the Event stream. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. +* `ibm_event_streams` - (Optional, List) Configuration for IBM Event Streams. +Nested schema for **ibm_event_streams**: + * `brokers` - (Required, String) The brokers of the IBM Event Streams. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. + * `topic` - (Required, String) The topic of the IBM Event Streams. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. +* `is_active` - (Optional, Boolean) Whether the Event stream is active. +* `name` - (Required, String) The name of the Event stream. + * Constraints: The maximum length is `4096` characters. The minimum length is `1` character. The value must match regular expression `/^[\\p{L}\\p{N}\\p{P}\\p{Z}\\p{S}\\p{M}]+$/`. + +## Attribute Reference + +After your resource is created, you can read values from the listed arguments and the following attributes. + +* `id` - The unique identifier of the logs_stream resource. +* `streams_id` - The unique identifier of the logs_stream. +* `created_at` - (String) The creation time of the Event stream. +* `updated_at` - (String) The update time of the Event stream. + + +## Import + +You can import the `ibm_logs_stream` resource by using `id`. `id`. `id` combination of `region`, `instance_id` and `streams_id`. + +# Syntax +
+$ terraform import ibm_logs_stream.logs_stream eu-gb/3dc02998-0b50-4ea8-b68a-4779d716fa1f/1;
+