Skip to content

Commit 87fb60b

Browse files
committed
Add aws_rdsdata_query data source
1 parent 0594997 commit 87fb60b

File tree

4 files changed

+396
-1
lines changed

4 files changed

+396
-1
lines changed
Lines changed: 168 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,168 @@
1+
// Copyright (c) HashiCorp, Inc.
2+
// SPDX-License-Identifier: MPL-2.0
3+
4+
package rdsdata
5+
6+
import (
7+
"context"
8+
"encoding/json"
9+
10+
"github.com/aws/aws-sdk-go-v2/service/rdsdata"
11+
rdsdatatypes "github.com/aws/aws-sdk-go-v2/service/rdsdata/types"
12+
"github.com/hashicorp/terraform-plugin-framework/datasource"
13+
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
14+
"github.com/hashicorp/terraform-plugin-framework/types"
15+
"github.com/hashicorp/terraform-provider-aws/internal/framework"
16+
"github.com/hashicorp/terraform-provider-aws/names"
17+
)
18+
19+
// @FrameworkDataSource("aws_rdsdata_query", name="Query")
20+
func newDataSourceQuery(context.Context) (datasource.DataSourceWithConfigure, error) {
21+
return &dataSourceQuery{}, nil
22+
}
23+
24+
type dataSourceQuery struct {
25+
framework.DataSourceWithModel[dataSourceQueryModel]
26+
}
27+
28+
func (d *dataSourceQuery) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
29+
resp.Schema = schema.Schema{
30+
Attributes: map[string]schema.Attribute{
31+
names.AttrID: framework.IDAttribute(),
32+
names.AttrDatabase: schema.StringAttribute{
33+
Optional: true,
34+
},
35+
names.AttrResourceARN: schema.StringAttribute{
36+
Required: true,
37+
},
38+
"secret_arn": schema.StringAttribute{
39+
Required: true,
40+
},
41+
"sql": schema.StringAttribute{
42+
Required: true,
43+
},
44+
"records": schema.StringAttribute{
45+
Computed: true,
46+
},
47+
"number_of_records_updated": schema.Int64Attribute{
48+
Computed: true,
49+
},
50+
},
51+
Blocks: map[string]schema.Block{
52+
names.AttrParameters: schema.ListNestedBlock{
53+
NestedObject: schema.NestedBlockObject{
54+
Attributes: map[string]schema.Attribute{
55+
names.AttrName: schema.StringAttribute{
56+
Required: true,
57+
},
58+
names.AttrValue: schema.StringAttribute{
59+
Required: true,
60+
},
61+
"type_hint": schema.StringAttribute{
62+
Optional: true,
63+
},
64+
},
65+
},
66+
},
67+
},
68+
}
69+
}
70+
71+
type dataSourceQueryModel struct {
72+
framework.WithRegionModel
73+
ID types.String `tfsdk:"id"`
74+
Database types.String `tfsdk:"database"`
75+
ResourceARN types.String `tfsdk:"resource_arn"`
76+
SecretARN types.String `tfsdk:"secret_arn"`
77+
SQL types.String `tfsdk:"sql"`
78+
Parameters []dataSourceQueryParameterModel `tfsdk:"parameters"`
79+
Records types.String `tfsdk:"records"`
80+
NumberOfRecordsUpdated types.Int64 `tfsdk:"number_of_records_updated"`
81+
}
82+
83+
type dataSourceQueryParameterModel struct {
84+
Name types.String `tfsdk:"name"`
85+
Value types.String `tfsdk:"value"`
86+
TypeHint types.String `tfsdk:"type_hint"`
87+
}
88+
89+
func (d *dataSourceQuery) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
90+
var data dataSourceQueryModel
91+
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
92+
if resp.Diagnostics.HasError() {
93+
return
94+
}
95+
96+
conn := d.Meta().RDSDataClient(ctx)
97+
98+
input := rdsdata.ExecuteStatementInput{
99+
ResourceArn: data.ResourceARN.ValueStringPointer(),
100+
SecretArn: data.SecretARN.ValueStringPointer(),
101+
Sql: data.SQL.ValueStringPointer(),
102+
FormatRecordsAs: rdsdatatypes.RecordsFormatTypeJson,
103+
}
104+
105+
if !data.Database.IsNull() {
106+
input.Database = data.Database.ValueStringPointer()
107+
}
108+
109+
if len(data.Parameters) > 0 {
110+
input.Parameters = expandSQLParameters(data.Parameters)
111+
}
112+
113+
output, err := conn.ExecuteStatement(ctx, &input)
114+
if err != nil {
115+
resp.Diagnostics.AddError("executing RDS Data API statement", err.Error())
116+
return
117+
}
118+
119+
data.ID = types.StringValue(data.ResourceARN.ValueString() + ":" + data.SQL.ValueString())
120+
data.Records = types.StringPointerValue(output.FormattedRecords)
121+
data.NumberOfRecordsUpdated = types.Int64Value(output.NumberOfRecordsUpdated)
122+
123+
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
124+
}
125+
126+
func expandSQLParameters(tfList []dataSourceQueryParameterModel) []rdsdatatypes.SqlParameter {
127+
if len(tfList) == 0 {
128+
return nil
129+
}
130+
131+
var apiObjects []rdsdatatypes.SqlParameter
132+
133+
for _, tfObj := range tfList {
134+
apiObject := rdsdatatypes.SqlParameter{
135+
Name: tfObj.Name.ValueStringPointer(),
136+
}
137+
138+
if !tfObj.TypeHint.IsNull() {
139+
apiObject.TypeHint = rdsdatatypes.TypeHint(tfObj.TypeHint.ValueString())
140+
}
141+
142+
// Convert value to Field type
143+
valueStr := tfObj.Value.ValueString()
144+
var field rdsdatatypes.Field
145+
146+
// Try to parse as JSON first, otherwise treat as string
147+
var jsonValue any
148+
if err := json.Unmarshal([]byte(valueStr), &jsonValue); err == nil {
149+
switch v := jsonValue.(type) {
150+
case string:
151+
field = &rdsdatatypes.FieldMemberStringValue{Value: v}
152+
case float64:
153+
field = &rdsdatatypes.FieldMemberDoubleValue{Value: v}
154+
case bool:
155+
field = &rdsdatatypes.FieldMemberBooleanValue{Value: v}
156+
default:
157+
field = &rdsdatatypes.FieldMemberStringValue{Value: valueStr}
158+
}
159+
} else {
160+
field = &rdsdatatypes.FieldMemberStringValue{Value: valueStr}
161+
}
162+
163+
apiObject.Value = field
164+
apiObjects = append(apiObjects, apiObject)
165+
}
166+
167+
return apiObjects
168+
}
Lines changed: 126 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,126 @@
1+
// Copyright (c) HashiCorp, Inc.
2+
// SPDX-License-Identifier: MPL-2.0
3+
4+
package rdsdata_test
5+
6+
import (
7+
"fmt"
8+
"testing"
9+
10+
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
11+
"github.com/hashicorp/terraform-provider-aws/internal/acctest"
12+
"github.com/hashicorp/terraform-provider-aws/names"
13+
)
14+
15+
func TestAccRDSDataQueryDataSource_basic(t *testing.T) {
16+
ctx := acctest.Context(t)
17+
dataSourceName := "data.aws_rdsdata_query.test"
18+
rName := acctest.RandomWithPrefix(t, acctest.ResourcePrefix)
19+
20+
resource.ParallelTest(t, resource.TestCase{
21+
PreCheck: func() { acctest.PreCheck(ctx, t) },
22+
ErrorCheck: acctest.ErrorCheck(t, names.RDSServiceID),
23+
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories,
24+
Steps: []resource.TestStep{
25+
{
26+
Config: testAccQueryDataSourceConfig_basic(rName),
27+
Check: resource.ComposeTestCheckFunc(
28+
resource.TestCheckResourceAttrSet(dataSourceName, "records"),
29+
resource.TestCheckResourceAttr(dataSourceName, "sql", "SELECT SCHEMA_NAME FROM information_schema.SCHEMATA LIMIT 1"),
30+
),
31+
},
32+
},
33+
})
34+
}
35+
36+
func TestAccRDSDataQueryDataSource_withParameters(t *testing.T) {
37+
ctx := acctest.Context(t)
38+
dataSourceName := "data.aws_rdsdata_query.test"
39+
rName := acctest.RandomWithPrefix(t, acctest.ResourcePrefix)
40+
41+
resource.ParallelTest(t, resource.TestCase{
42+
PreCheck: func() { acctest.PreCheck(ctx, t) },
43+
ErrorCheck: acctest.ErrorCheck(t, names.RDSServiceID),
44+
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories,
45+
Steps: []resource.TestStep{
46+
{
47+
Config: testAccQueryDataSourceConfig_withParameters(rName),
48+
Check: resource.ComposeTestCheckFunc(
49+
resource.TestCheckResourceAttrSet(dataSourceName, "records"),
50+
resource.TestCheckResourceAttr(dataSourceName, "sql", "SELECT :param1 as test_column"),
51+
resource.TestCheckResourceAttr(dataSourceName, "parameters.#", "1"),
52+
resource.TestCheckResourceAttr(dataSourceName, "parameters.0.name", "param1"),
53+
resource.TestCheckResourceAttr(dataSourceName, "parameters.0.value", "test_value"),
54+
),
55+
},
56+
},
57+
})
58+
}
59+
60+
func testAccQueryDataSourceConfig_basic(rName string) string {
61+
return acctest.ConfigCompose(testAccQueryDataSourceConfig_base(rName), `
62+
data "aws_rdsdata_query" "test" {
63+
depends_on = [aws_rds_cluster_instance.test]
64+
resource_arn = aws_rds_cluster.test.arn
65+
secret_arn = aws_secretsmanager_secret.test.arn
66+
sql = "SELECT SCHEMA_NAME FROM information_schema.SCHEMATA LIMIT 1"
67+
}
68+
`)
69+
}
70+
71+
func testAccQueryDataSourceConfig_withParameters(rName string) string {
72+
return acctest.ConfigCompose(testAccQueryDataSourceConfig_base(rName), `
73+
data "aws_rdsdata_query" "test" {
74+
depends_on = [aws_rds_cluster_instance.test]
75+
resource_arn = aws_rds_cluster.test.arn
76+
secret_arn = aws_secretsmanager_secret.test.arn
77+
sql = "SELECT :param1 as test_column"
78+
79+
parameters {
80+
name = "param1"
81+
value = "test_value"
82+
}
83+
}
84+
`)
85+
}
86+
87+
func testAccQueryDataSourceConfig_base(rName string) string {
88+
return fmt.Sprintf(`
89+
resource "aws_rds_cluster" "test" {
90+
cluster_identifier = %[1]q
91+
engine = "aurora-mysql"
92+
database_name = "test"
93+
master_username = "username"
94+
master_password = "mustbeeightcharacters"
95+
backup_retention_period = 7
96+
preferred_backup_window = "07:00-09:00"
97+
preferred_maintenance_window = "tue:04:00-tue:04:30"
98+
skip_final_snapshot = true
99+
enable_http_endpoint = true
100+
101+
serverlessv2_scaling_configuration {
102+
max_capacity = 8
103+
min_capacity = 0.5
104+
}
105+
}
106+
107+
resource "aws_rds_cluster_instance" "test" {
108+
cluster_identifier = aws_rds_cluster.test.id
109+
instance_class = "db.serverless"
110+
engine = aws_rds_cluster.test.engine
111+
engine_version = aws_rds_cluster.test.engine_version
112+
}
113+
114+
resource "aws_secretsmanager_secret" "test" {
115+
name = %[1]q
116+
}
117+
118+
resource "aws_secretsmanager_secret_version" "test" {
119+
secret_id = aws_secretsmanager_secret.test.id
120+
secret_string = jsonencode({
121+
username = aws_rds_cluster.test.master_username
122+
password = aws_rds_cluster.test.master_password
123+
})
124+
}
125+
`, rName)
126+
}

internal/service/rdsdata/service_package_gen.go

Lines changed: 9 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)