Skip to content

Commit

Permalink
Support streams in cloud logs (#5913)
Browse files Browse the repository at this point in the history
* fix alerts filters  resource

* Support Streams API

* fix streams API

* Fix logs alert immediate condition

* Fix streams docs

* Support endpoint type

* fix streams testcases

* Update logs SDK to 0.4.0
  • Loading branch information
kavya498 authored Jan 10, 2025
1 parent d6f3e79 commit 5882507
Show file tree
Hide file tree
Showing 19 changed files with 1,455 additions and 114 deletions.
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ require (
github.com/IBM/ibm-hpcs-tke-sdk v0.0.0-20211109141421-a4b61b05f7d1
github.com/IBM/ibm-hpcs-uko-sdk v0.0.20-beta
github.com/IBM/keyprotect-go-client v0.15.1
github.com/IBM/logs-go-sdk v0.3.0
github.com/IBM/logs-go-sdk v0.4.0
github.com/IBM/logs-router-go-sdk v1.0.5
github.com/IBM/mqcloud-go-sdk v0.2.0
github.com/IBM/networking-go-sdk v0.49.0
Expand Down
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,8 @@ github.com/IBM/ibm-hpcs-uko-sdk v0.0.20-beta/go.mod h1:MLVNHMYoKsvovJZ4v1gQCpIYt
github.com/IBM/keyprotect-go-client v0.5.1/go.mod h1:5TwDM/4FRJq1ZOlwQL1xFahLWQ3TveR88VmL1u3njyI=
github.com/IBM/keyprotect-go-client v0.15.1 h1:m4qzqF5zOumRxKZ8s7vtK7A/UV/D278L8xpRG+WgT0s=
github.com/IBM/keyprotect-go-client v0.15.1/go.mod h1:asXtHwL/4uCHA221Vd/7SkXEi2pcRHDzPyyksc1DthE=
github.com/IBM/logs-go-sdk v0.3.0 h1:FHzTCCMyp9DvQGXgkppzcOPywC4ggt7x8xu0MR5h8xI=
github.com/IBM/logs-go-sdk v0.3.0/go.mod h1:yv/GCXC4/p+MZEeXl4xjZAOMvDAVRwu61WyHZFKFXQM=
github.com/IBM/logs-go-sdk v0.4.0 h1:CyUjm19EUtcJjf4mxsj6Rc7gkZDT8JEY5rLUIz8Eoag=
github.com/IBM/logs-go-sdk v0.4.0/go.mod h1:yv/GCXC4/p+MZEeXl4xjZAOMvDAVRwu61WyHZFKFXQM=
github.com/IBM/logs-router-go-sdk v1.0.5 h1:r0kC1+HfmSeQCD6zQTUp4PDI/zp4Ueo1Zo19ipHuNlw=
github.com/IBM/logs-router-go-sdk v1.0.5/go.mod h1:tCN2vFgu5xG0ob9iJcxi5M4bJ6mWmu3nhmRPnvlwev0=
github.com/IBM/mqcloud-go-sdk v0.2.0 h1:QOWk8ZGk0QfIL0MOGTKzNdM3Qe0Hk+ifAFtNSFQo5HU=
Expand Down
4 changes: 4 additions & 0 deletions ibm/provider/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -1040,6 +1040,8 @@ func Provider() *schema.Provider {
"ibm_logs_data_usage_metrics": logs.AddLogsInstanceFields(logs.DataSourceIbmLogsDataUsageMetrics()),
"ibm_logs_enrichments": logs.AddLogsInstanceFields(logs.DataSourceIbmLogsEnrichments()),
"ibm_logs_data_access_rules": logs.AddLogsInstanceFields(logs.DataSourceIbmLogsDataAccessRules()),
"ibm_logs_stream": logs.AddLogsInstanceFields(logs.DataSourceIbmLogsStream()),
"ibm_logs_streams": logs.AddLogsInstanceFields(logs.DataSourceIbmLogsStreams()),

// Logs Router Service
"ibm_logs_router_tenants": logsrouting.DataSourceIBMLogsRouterTenants(),
Expand Down Expand Up @@ -1681,6 +1683,7 @@ func Provider() *schema.Provider {
"ibm_logs_data_usage_metrics": logs.AddLogsInstanceFields(logs.ResourceIbmLogsDataUsageMetrics()),
"ibm_logs_enrichment": logs.AddLogsInstanceFields(logs.ResourceIbmLogsEnrichment()),
"ibm_logs_data_access_rule": logs.AddLogsInstanceFields(logs.ResourceIbmLogsDataAccessRule()),
"ibm_logs_stream": logs.AddLogsInstanceFields(logs.ResourceIbmLogsStream()),

// Logs Router Service
"ibm_logs_router_tenant": logsrouting.ResourceIBMLogsRouterTenant(),
Expand Down Expand Up @@ -2174,6 +2177,7 @@ func Validator() validate.ValidatorDict {
"ibm_logs_dashboard_folder": logs.ResourceIbmLogsDashboardFolderValidator(),
"ibm_logs_enrichment": logs.ResourceIbmLogsEnrichmentValidator(),
"ibm_logs_data_access_rule": logs.ResourceIbmLogsDataAccessRuleValidator(),
"ibm_logs_stream": logs.ResourceIbmLogsStreamValidator(),

// Added for Logs Router Service
"ibm_logs_router_tenant": logsrouting.ResourceIBMLogsRouterTenantValidator(),
Expand Down
8 changes: 8 additions & 0 deletions ibm/service/logs/data_source_ibm_logs_outgoing_webhook.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,11 @@ func DataSourceIbmLogsOutgoingWebhook() *schema.Resource {
Computed: true,
Description: "The ID of the selected IBM Event Notifications instance.",
},
"endpoint_type": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The endpoint type of integration",
},
"region_id": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Expand Down Expand Up @@ -167,6 +172,9 @@ func DataSourceIbmLogsOutgoingWebhookOutgoingWebhooksV1IbmEventNotificationsConf
modelMap := make(map[string]interface{})
modelMap["event_notifications_instance_id"] = model.EventNotificationsInstanceID.String()
modelMap["region_id"] = *model.RegionID
if model.EndpointType != nil {
modelMap["endpoint_type"] = *model.SourceID
}
if model.SourceID != nil {
modelMap["source_id"] = *model.SourceID
}
Expand Down
175 changes: 175 additions & 0 deletions ibm/service/logs/data_source_ibm_logs_stream.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
// Copyright IBM Corp. 2024 All Rights Reserved.
// Licensed under the Mozilla Public License v2.0

/*
* IBM OpenAPI Terraform Generator Version: 3.96.0-d6dec9d7-20241008-212902
*/

package logs

import (
"context"
"fmt"
"log"
"strconv"

"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"

"github.com/IBM-Cloud/terraform-provider-ibm/ibm/conns"
"github.com/IBM-Cloud/terraform-provider-ibm/ibm/flex"
"github.com/IBM/go-sdk-core/v5/core"
"github.com/IBM/logs-go-sdk/logsv0"
)

func DataSourceIbmLogsStream() *schema.Resource {
return &schema.Resource{
ReadContext: dataSourceIbmLogsStreamRead,

Schema: map[string]*schema.Schema{
"name": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The name of the Event stream.",
},
"logs_streams_id": &schema.Schema{
Type: schema.TypeString,
Required: true,
Description: "The ID of the Event stream.",
},
"is_active": &schema.Schema{
Type: schema.TypeBool,
Computed: true,
Description: "Whether the Event stream is active.",
},
"dpxl_expression": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The DPXL expression of the Event stream.",
},
"created_at": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The creation time of the Event stream.",
},
"updated_at": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The update time of the Event stream.",
},
"compression_type": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The compression type of the stream.",
},
"ibm_event_streams": &schema.Schema{
Type: schema.TypeList,
Computed: true,
Description: "Configuration for IBM Event Streams.",
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"brokers": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The brokers of the IBM Event Streams.",
},
"topic": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The topic of the IBM Event Streams.",
},
},
},
},
},
}
}

func dataSourceIbmLogsStreamRead(context context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
logsClient, err := meta.(conns.ClientSession).LogsV0()
if err != nil {
tfErr := flex.DiscriminatedTerraformErrorf(err, err.Error(), "(Data) ibm_logs_stream", "read", "initialize-client")
log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage())
return tfErr.GetDiag()
}
region := getLogsInstanceRegion(logsClient, d)
instanceId := d.Get("instance_id").(string)
logsClient = getClientWithLogsInstanceEndpoint(logsClient, instanceId, region, getLogsInstanceEndpointType(logsClient, d))

streamsID, _ := strconv.ParseInt(d.Get("logs_streams_id").(string), 10, 64)

getEventStreamTargetsOptions := &logsv0.GetEventStreamTargetsOptions{}

streams, _, err := logsClient.GetEventStreamTargetsWithContext(context, getEventStreamTargetsOptions)
if err != nil {
tfErr := flex.TerraformErrorf(err, fmt.Sprintf("GetEventStreamTargetsWithContext failed: %s", err.Error()), "(Data) ibm_logs_stream", "read")
log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage())
return tfErr.GetDiag()
}
if streams != nil {
streamIds := make(map[int64]interface{}, 0)
for _, stream := range streams.Streams {
streamIds[*stream.ID] = nil
if *stream.ID == streamsID {
d.SetId(fmt.Sprintf("%d", *stream.ID))

if err = d.Set("name", stream.Name); err != nil {
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting name: %s", err), "(Data) ibm_logs_stream", "read", "set-name").GetDiag()
}

if !core.IsNil(stream.IsActive) {
if err = d.Set("is_active", stream.IsActive); err != nil {
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting is_active: %s", err), "(Data) ibm_logs_stream", "read", "set-is_active").GetDiag()
}
}

if err = d.Set("dpxl_expression", stream.DpxlExpression); err != nil {
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting dpxl_expression: %s", err), "(Data) ibm_logs_stream", "read", "set-dpxl_expression").GetDiag()
}

if !core.IsNil(stream.CreatedAt) {
if err = d.Set("created_at", flex.DateTimeToString(stream.CreatedAt)); err != nil {
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting created_at: %s", err), "(Data) ibm_logs_stream", "read", "set-created_at").GetDiag()
}
}

if !core.IsNil(stream.UpdatedAt) {
if err = d.Set("updated_at", flex.DateTimeToString(stream.UpdatedAt)); err != nil {
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting updated_at: %s", err), "(Data) ibm_logs_stream", "read", "set-updated_at").GetDiag()
}
}

if !core.IsNil(stream.CompressionType) {
if err = d.Set("compression_type", stream.CompressionType); err != nil {
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting compression_type: %s", err), "(Data) ibm_logs_stream", "read", "set-compression_type").GetDiag()
}
}

if !core.IsNil(stream.IbmEventStreams) {
ibmEventStreams := []map[string]interface{}{}
ibmEventStreamsMap, err := DataSourceIbmLogsStreamIbmEventStreamsToMap(stream.IbmEventStreams)
if err != nil {
return flex.DiscriminatedTerraformErrorf(err, err.Error(), "(Data) ibm_logs_stream", "read", "ibm_event_streams-to-map").GetDiag()
}
ibmEventStreams = append(ibmEventStreams, ibmEventStreamsMap)
if err = d.Set("ibm_event_streams", ibmEventStreams); err != nil {
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting ibm_event_streams: %s", err), "(Data) ibm_logs_stream", "read", "set-ibm_event_streams").GetDiag()
}
}
}
}
if _, ok := streamIds[streamsID]; !ok {
d.SetId("")
return flex.TerraformErrorf(err, fmt.Sprintf("Stream ID (%d) not found ", streamsID), "(Data) ibm_logs_stream", "read").GetDiag()
}
}

return nil
}

func DataSourceIbmLogsStreamIbmEventStreamsToMap(model *logsv0.IbmEventStreams) (map[string]interface{}, error) {
modelMap := make(map[string]interface{})
modelMap["brokers"] = *model.Brokers
modelMap["topic"] = *model.Topic
return modelMap, nil
}
115 changes: 115 additions & 0 deletions ibm/service/logs/data_source_ibm_logs_stream_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
// Copyright IBM Corp. 2024 All Rights Reserved.
// Licensed under the Mozilla Public License v2.0

/*
* IBM OpenAPI Terraform Generator Version: 3.96.0-d6dec9d7-20241008-212902
*/

package logs_test

import (
"fmt"
"testing"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"

acc "github.com/IBM-Cloud/terraform-provider-ibm/ibm/acctest"
)

func TestAccIbmLogsStreamDataSourceBasic(t *testing.T) {
streamName := fmt.Sprintf("tf_name_%d", acctest.RandIntRange(10, 100))
streamDpxlExpression := "<v1>contains(kubernetes.labels.CX_AZ, 'eu-west-1')"

resource.Test(t, resource.TestCase{
PreCheck: func() { acc.TestAccPreCheckCloudLogs(t) },
Providers: acc.TestAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccCheckIbmLogsStreamDataSourceConfigBasic(streamName, streamDpxlExpression),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "id"),
resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "name"),
resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "dpxl_expression"),
),
},
},
})
}

func TestAccIbmLogsStreamDataSourceAllArgs(t *testing.T) {
streamName := fmt.Sprintf("tf_name_%d", acctest.RandIntRange(10, 100))
streamIsActive := "false"
streamDpxlExpression := "<v1>contains(kubernetes.labels.CX_AZ, 'eu-west-1')"
streamCompressionType := "gzip"

resource.Test(t, resource.TestCase{
PreCheck: func() { acc.TestAccPreCheckCloudLogs(t) },
Providers: acc.TestAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccCheckIbmLogsStreamDataSourceConfig(streamName, streamIsActive, streamDpxlExpression, streamCompressionType),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "id"),
resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "name"),
resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "dpxl_expression"),
resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "created_at"),
resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "updated_at"),
resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "compression_type"),
resource.TestCheckResourceAttrSet("data.ibm_logs_stream.logs_stream_instance", "ibm_event_streams.#"),
),
},
},
})
}

func testAccCheckIbmLogsStreamDataSourceConfigBasic(streamName string, streamDpxlExpression string) string {
return fmt.Sprintf(`
resource "ibm_logs_stream" "logs_stream_instance" {
instance_id = "%s"
region = "%s"
name = "%s"
dpxl_expression = "%s"
compression_type = "gzip"
ibm_event_streams {
brokers = "kafka01.example.com:9093"
topic = "live.screen"
}
}
data "ibm_logs_stream" "logs_stream_instance" {
instance_id = "%[1]s"
region = "%[2]s"
logs_streams_id = ibm_logs_stream.logs_stream_instance.streams_id
depends_on = [
ibm_logs_stream.logs_stream_instance
]
}
`, acc.LogsInstanceId, acc.LogsInstanceRegion, streamName, streamDpxlExpression)
}

func testAccCheckIbmLogsStreamDataSourceConfig(streamName string, streamIsActive string, streamDpxlExpression string, streamCompressionType string) string {
return fmt.Sprintf(`
resource "ibm_logs_stream" "logs_stream_instance" {
instance_id = "%s"
region = "%s"
name = "%s"
is_active = %s
dpxl_expression = "%s"
compression_type = "%s"
ibm_event_streams {
brokers = "kafka01.example.com:9093"
topic = "live.screen"
}
}
data "ibm_logs_stream" "logs_stream_instance" {
instance_id = "%[1]s"
region = "%[2]s"
logs_streams_id = ibm_logs_stream.logs_stream_instance.streams_id
depends_on = [
ibm_logs_stream.logs_stream_instance
]
}
`, acc.LogsInstanceId, acc.LogsInstanceRegion, streamName, streamIsActive, streamDpxlExpression, streamCompressionType)
}
Loading

0 comments on commit 5882507

Please sign in to comment.