@@ -23,11 +23,6 @@ func (r resourceDbQueryLoggingType) GetSchema(ctx context.Context) (tfsdk.Schema
23
23
return tfsdk.Schema {
24
24
Description : `The resource to manage DB query logging configuration for a cluster in YugabyteDB Aeon.` ,
25
25
Attributes : map [string ]tfsdk.Attribute {
26
- "cluster_name" : {
27
- Description : "Name of the cluster with which this DB query logging config will be associated." ,
28
- Type : types .StringType ,
29
- Required : true ,
30
- },
31
26
"integration_name" : {
32
27
Description : "Name of the integration for this DB query logging configuration." ,
33
28
Type : types .StringType ,
@@ -46,10 +41,10 @@ func (r resourceDbQueryLoggingType) GetSchema(ctx context.Context) (tfsdk.Schema
46
41
"cluster_id" : {
47
42
Description : "ID of the cluster with which this DB query logging config will be associated." ,
48
43
Type : types .StringType ,
49
- Computed : true ,
44
+ Required : true ,
50
45
},
51
46
"config_id" : {
52
- Description : "ID of the DB query logging configuration." ,
47
+ Description : "ID of the DB query logging configuration. Created automatically when enabling DB query logs. " ,
53
48
Type : types .StringType ,
54
49
Computed : true ,
55
50
},
@@ -60,8 +55,7 @@ func (r resourceDbQueryLoggingType) GetSchema(ctx context.Context) (tfsdk.Schema
60
55
},
61
56
"log_config" : {
62
57
Description : "The Log config." ,
63
- Optional : true ,
64
- Computed : true ,
58
+ Required : true ,
65
59
Attributes : tfsdk .SingleNestedAttributes (map [string ]tfsdk.Attribute {
66
60
"log_min_duration_statement" : {
67
61
Description : "Duration(in ms) of each completed statement to be logged if the statement ran for at least the specified amount of time." ,
@@ -138,15 +132,23 @@ type resourceDbQueryLogging struct {
138
132
139
133
func getConfigFromPlan (ctx context.Context , plan tfsdk.Plan , config * DbQueryLoggingConfig ) diag.Diagnostics {
140
134
var diags diag.Diagnostics
141
- diags .Append (plan .GetAttribute (ctx , path .Root ("cluster_name " ), & config .ClusterName )... )
135
+ diags .Append (plan .GetAttribute (ctx , path .Root ("cluster_id " ), & config .ClusterID )... )
142
136
diags .Append (plan .GetAttribute (ctx , path .Root ("integration_name" ), & config .IntegrationName )... )
143
137
diags .Append (plan .GetAttribute (ctx , path .Root ("log_config" ), & config .LogConfig )... )
144
138
return diags
145
139
}
146
140
147
- func buildDbQueryLoggingSpec (config DbQueryLoggingConfig , integrationId string ) (* openapiclient.PgLogExporterConfigSpec , error ) {
148
- // Start with a default log config
149
- newLogConfig := openapiclient .NewPgLogExportConfigWithDefaults ()
141
+ func getConfigFromState (ctx context.Context , state tfsdk.State , config * DbQueryLoggingConfig ) {
142
+ state .GetAttribute (ctx , path .Root ("account_id" ), & config .AccountID )
143
+ state .GetAttribute (ctx , path .Root ("project_id" ), & config .ProjectID )
144
+ state .GetAttribute (ctx , path .Root ("cluster_id" ), & config .ClusterID )
145
+ state .GetAttribute (ctx , path .Root ("integration_name" ), & config .IntegrationName )
146
+ state .GetAttribute (ctx , path .Root ("config_id" ), & config .ConfigID )
147
+ state .GetAttribute (ctx , path .Root ("log_config" ), & config .LogConfig )
148
+ }
149
+
150
+ func buildDbQueryLoggingSpec (config DbQueryLoggingConfig , integrationId string , exportConfig * openapiclient.PgLogExportConfig ) (* openapiclient.PgLogExporterConfigSpec , error ) {
151
+ newLogConfig := * exportConfig
150
152
151
153
// Update log config params, that are provided in tf file
152
154
if config .LogConfig != nil {
@@ -155,6 +157,10 @@ func buildDbQueryLoggingSpec(config DbQueryLoggingConfig, integrationId string)
155
157
newLogConfig .SetLogConnections (tfLogConfig .LogConnections .Value )
156
158
}
157
159
160
+ if ! tfLogConfig .DebugPrintPlan .IsNull () && ! tfLogConfig .DebugPrintPlan .IsUnknown () {
161
+ newLogConfig .SetDebugPrintPlan (tfLogConfig .DebugPrintPlan .Value )
162
+ }
163
+
158
164
if ! tfLogConfig .LogDisconnections .IsNull () && ! tfLogConfig .LogDisconnections .IsUnknown () {
159
165
newLogConfig .SetLogDisconnections (tfLogConfig .LogDisconnections .Value )
160
166
}
@@ -198,27 +204,48 @@ func buildDbQueryLoggingSpec(config DbQueryLoggingConfig, integrationId string)
198
204
199
205
// Return the new PgLogExporterConfigSpec
200
206
return & openapiclient.PgLogExporterConfigSpec {
201
- ExportConfig : * newLogConfig ,
207
+ ExportConfig : newLogConfig ,
202
208
ExporterId : integrationId ,
203
209
}, nil
204
210
}
205
211
206
- // Read latest state/config of a resource from Backend and convert it to model
207
- func resourceRead (ctx context.Context , accountId string , projectId string , clusterId string ,
208
- clusterName string , integrationName string ,
209
- apiClient * openapiclient.APIClient ) (dbQueryLoggingConfig DbQueryLoggingConfig , readOK bool , errMsg string ) {
210
-
211
- // fetch log config from backend
212
+ func getPgLogExporterConfig (ctx context.Context , accountId string , projectId string , clusterId string , apiClient * openapiclient.APIClient ) (conf * openapiclient.PgLogExporterConfigData , ok bool , errMsg string ) {
212
213
specList , _ , err := apiClient .ClusterApi .ListPgLogExporterConfigs (ctx , accountId , projectId , clusterId ).Execute ()
213
214
if err != nil {
214
- return dbQueryLoggingConfig , false , GetApiErrorDetails (err )
215
+ return nil , false , GetApiErrorDetails (err )
215
216
}
216
217
217
- if len (specList .GetData ()) == 0 {
218
- return dbQueryLoggingConfig , false , fmt . Sprintf ( "No DB query logging config found for cluster %s" , clusterName )
218
+ if len (specList .GetData ()) < 1 {
219
+ return nil , false , "no DB query logging config found for the cluster"
219
220
}
221
+ return & specList .Data [0 ], true , ""
222
+ }
220
223
221
- spec := specList .Data [0 ]
224
+ func getIntegrationId (ctx context.Context , accountId string , projectId string , integrationName string , apiClient * openapiclient.APIClient ) (data * openapiclient.TelemetryProviderData , ok bool , errMsg string ) {
225
+ integrationConfig , _ , err := apiClient .TelemetryProviderApi .
226
+ ListTelemetryProviders (ctx , accountId , projectId ).
227
+ Name (integrationName ).
228
+ Execute ()
229
+ if err != nil {
230
+ return data , false , GetApiErrorDetails (err )
231
+ }
232
+
233
+ if len (integrationConfig .GetData ()) < 1 {
234
+ errMsg := fmt .Sprintf ("Integration %s not found" , integrationName )
235
+ return nil , false , errMsg
236
+ }
237
+ return & integrationConfig .GetData ()[0 ], true , ""
238
+ }
239
+
240
+ // Read latest state/config of a resource from Backend and convert it to model
241
+ func resourceRead (ctx context.Context , accountId string , projectId string , clusterId string ,
242
+ integrationName string ,
243
+ apiClient * openapiclient.APIClient ) (dbQueryLoggingConfig DbQueryLoggingConfig , readOK bool , errMsg string ) {
244
+
245
+ spec , ok , errMsg := getPgLogExporterConfig (ctx , accountId , projectId , clusterId , apiClient )
246
+ if ! ok {
247
+ return dbQueryLoggingConfig , false , errMsg
248
+ }
222
249
exportConfig := spec .Spec .ExportConfig
223
250
224
251
// Initialize the LogConfig object from PgLogExportConfig
@@ -238,7 +265,6 @@ func resourceRead(ctx context.Context, accountId string, projectId string, clust
238
265
AccountID : types.String {Value : accountId },
239
266
ProjectID : types.String {Value : projectId },
240
267
ClusterID : types.String {Value : clusterId },
241
- ClusterName : types.String {Value : clusterName },
242
268
IntegrationName : types.String {Value : integrationName },
243
269
State : types.String {Value : string (spec .Info .State )},
244
270
ConfigID : types.String {Value : spec .Info .Id },
@@ -248,27 +274,16 @@ func resourceRead(ctx context.Context, accountId string, projectId string, clust
248
274
return dbQueryLoggingConfig , true , ""
249
275
}
250
276
251
- func getConfigFromState (ctx context.Context , state tfsdk.State , config * DbQueryLoggingConfig ) {
252
- state .GetAttribute (ctx , path .Root ("account_id" ), & config .AccountID )
253
- state .GetAttribute (ctx , path .Root ("project_id" ), & config .ProjectID )
254
- state .GetAttribute (ctx , path .Root ("cluster_name" ), & config .ClusterName )
255
- state .GetAttribute (ctx , path .Root ("cluster_id" ), & config .ClusterID )
256
- state .GetAttribute (ctx , path .Root ("integration_name" ), & config .IntegrationName )
257
- state .GetAttribute (ctx , path .Root ("config_id" ), & config .ConfigID )
258
- }
259
-
260
277
func (r resourceDbQueryLogging ) Read (ctx context.Context , req tfsdk.ReadResourceRequest , resp * tfsdk.ReadResourceResponse ) {
261
278
var state DbQueryLoggingConfig
262
-
263
279
getConfigFromState (ctx , req .State , & state )
264
280
apiClient := r .p .client
265
281
accountId := state .AccountID .Value
266
282
projectId := state .ProjectID .Value
267
283
clusterId := state .ClusterID .Value
268
- clusterName := state .ClusterName .Value
269
284
integrationName := state .IntegrationName .Value
270
285
271
- dbqlConfig , readOK , message := resourceRead (ctx , accountId , projectId , clusterId , clusterName , integrationName , apiClient )
286
+ dbqlConfig , readOK , message := resourceRead (ctx , accountId , projectId , clusterId , integrationName , apiClient )
272
287
if ! readOK {
273
288
resp .Diagnostics .AddError ("Unable to read the state of Db Query log configuration associated with the cluster" , message )
274
289
return
@@ -282,17 +297,134 @@ func (r resourceDbQueryLogging) Read(ctx context.Context, req tfsdk.ReadResource
282
297
}
283
298
284
299
func (r resourceDbQueryLogging ) Update (ctx context.Context , req tfsdk.UpdateResourceRequest , resp * tfsdk.UpdateResourceResponse ) {
285
- resp .Diagnostics .AddError (
286
- "Unsupported Operation" ,
287
- "Update is not currently supported." ,
288
- )
300
+ var planConfig DbQueryLoggingConfig
301
+ resp .Diagnostics .Append (getConfigFromPlan (ctx , req .Plan , & planConfig )... )
302
+ if resp .Diagnostics .HasError () {
303
+ tflog .Debug (ctx , "Error while getting the plan for the Db query logging config" )
304
+ return
305
+ }
306
+ integrationName := planConfig .IntegrationName .Value
307
+
308
+ apiClient := r .p .client
309
+ var stateConfig DbQueryLoggingConfig
310
+ getConfigFromState (ctx , req .State , & stateConfig )
311
+ accountId := stateConfig .AccountID .Value
312
+ projectId := stateConfig .ProjectID .Value
313
+ clusterId := stateConfig .ClusterID .Value
314
+ configId := stateConfig .ConfigID .Value
315
+
316
+ if planConfig .ClusterID != stateConfig .ClusterID {
317
+ errMsg := "Field cluster_id cannot be changed after resource creation"
318
+ resp .Diagnostics .AddError (errMsg , errMsg )
319
+ return
320
+ }
321
+
322
+ spec , ok , errMsg := getPgLogExporterConfig (ctx , accountId , projectId , clusterId , apiClient )
323
+ if ! ok {
324
+ resp .Diagnostics .AddError ("Unable to fetch DB query logging config" , errMsg )
325
+ return
326
+ }
327
+
328
+ integrationId := ""
329
+ if planConfig .IntegrationName != stateConfig .IntegrationName {
330
+ integrationConfig , ok , errMsg := getIntegrationId (ctx , accountId , projectId , integrationName , apiClient )
331
+ if ! ok {
332
+ resp .Diagnostics .AddError ("Unable to fetch integration details for: " + integrationName , errMsg )
333
+ return
334
+ }
335
+ integrationId = integrationConfig .Info .Id
336
+ } else {
337
+ integrationId = spec .Spec .ExporterId
338
+ }
339
+ // Use planConfig provided in tf file to build new API Pg log exporter config spec
340
+ apiConfigSpec , err := buildDbQueryLoggingSpec (planConfig , integrationId , & spec .Spec .ExportConfig )
341
+ if err != nil {
342
+ resp .Diagnostics .AddError ("Unable to update DB query logging config" , GetApiErrorDetails (err ))
343
+ return
344
+ }
345
+
346
+ _ , _ , err = apiClient .ClusterApi .UpdatePgLogExporterConfig (ctx , accountId , projectId , clusterId , configId ).PgLogExporterConfigSpec (* apiConfigSpec ).Execute ()
347
+ if err != nil {
348
+ resp .Diagnostics .AddError (fmt .Sprintf ("Unable to update DB query logging config for cluster %s" , clusterId ), GetApiErrorDetails (err ))
349
+ return
350
+ }
351
+
352
+ retryPolicy := retry .NewConstant (10 * time .Second )
353
+ retryPolicy = retry .WithMaxDuration (2400 * time .Second , retryPolicy )
354
+ err = retry .Do (ctx , retryPolicy , func (ctx context.Context ) error {
355
+ asState , readInfoOK , message := getTaskState (accountId , projectId , clusterId , openapiclient .ENTITYTYPEENUM_CLUSTER , openapiclient .TASKTYPEENUM_EDIT_DATABASE_QUERY_LOGGING , apiClient , ctx )
356
+ if readInfoOK {
357
+ if asState == string (openapiclient .TASKACTIONSTATEENUM_SUCCEEDED ) {
358
+ return nil
359
+ }
360
+ if asState == string (openapiclient .TASKACTIONSTATEENUM_FAILED ) {
361
+ return fmt .Errorf ("failed to update DB query log config" )
362
+ }
363
+ } else {
364
+ return retry .RetryableError (errors .New ("unable to check DB query log config update status: " + message ))
365
+ }
366
+ return retry .RetryableError (errors .New ("DB query log config is being updated" ))
367
+ })
368
+
369
+ if err != nil {
370
+ errorSummary := fmt .Sprintf ("Unable to update DB query log config for cluster: %s" , clusterId )
371
+ resp .Diagnostics .AddError (errorSummary , "The operation timed out waiting for DB query log config update operation." )
372
+ return
373
+ }
374
+
375
+ planConfig .ConfigID .Value = configId
376
+
377
+ dbqlConfig , readOK , readErrMsg := resourceRead (ctx , accountId , projectId , clusterId , integrationName , apiClient )
378
+ if ! readOK {
379
+ resp .Diagnostics .AddError ("Unable to read the state of Db Query log config " , readErrMsg )
380
+ return
381
+ }
382
+
383
+ diags := resp .State .Set (ctx , & dbqlConfig )
384
+ resp .Diagnostics .Append (diags ... )
385
+ if resp .Diagnostics .HasError () {
386
+ return
387
+ }
289
388
}
290
389
291
390
func (r resourceDbQueryLogging ) Delete (ctx context.Context , req tfsdk.DeleteResourceRequest , resp * tfsdk.DeleteResourceResponse ) {
292
- resp .Diagnostics .AddError (
293
- "Unsupported Operation" ,
294
- "Delete is not currently supported." ,
295
- )
391
+ apiClient := r .p .client
392
+ var state DbQueryLoggingConfig
393
+ getConfigFromState (ctx , req .State , & state )
394
+ accountId := state .AccountID .Value
395
+ projectId := state .ProjectID .Value
396
+ clusterId := state .ClusterID .Value
397
+ configId := state .ConfigID .Value
398
+
399
+ _ , err := apiClient .ClusterApi .RemovePgLogExporterConfig (ctx , accountId , projectId , clusterId , configId ).Execute ()
400
+ if err != nil {
401
+ resp .Diagnostics .AddError (fmt .Sprintf ("Unable to remove DB query logging config for cluster: %s" , clusterId ), GetApiErrorDetails (err ))
402
+ return
403
+ }
404
+
405
+ retryPolicy := retry .NewConstant (10 * time .Second )
406
+ retryPolicy = retry .WithMaxDuration (2400 * time .Second , retryPolicy )
407
+ err = retry .Do (ctx , retryPolicy , func (ctx context.Context ) error {
408
+ asState , readInfoOK , message := getTaskState (accountId , projectId , clusterId , openapiclient .ENTITYTYPEENUM_CLUSTER , openapiclient .TASKTYPEENUM_DISABLE_DATABASE_QUERY_LOGGING , apiClient , ctx )
409
+ if readInfoOK {
410
+ if asState == string (openapiclient .TASKACTIONSTATEENUM_SUCCEEDED ) {
411
+ return nil
412
+ }
413
+ if asState == string (openapiclient .TASKACTIONSTATEENUM_FAILED ) {
414
+ return ErrFailedTask
415
+ }
416
+ } else {
417
+ return retry .RetryableError (errors .New ("Unable to check status for DB query logging removal task: " + message ))
418
+ }
419
+ return retry .RetryableError (errors .New ("DB Query log configuration is being removed from the cluster" ))
420
+ })
421
+
422
+ if err != nil {
423
+ resp .Diagnostics .AddError ("Unable to remove Db Logging config from the cluster " , "The operation timed out waiting for DB Query Logging removal to complete." )
424
+ return
425
+ }
426
+
427
+ resp .State .RemoveResource (ctx )
296
428
}
297
429
298
430
func (r resourceDbQueryLogging ) ImportState (ctx context.Context , req tfsdk.ImportResourceStateRequest , resp * tfsdk.ImportResourceStateResponse ) {
@@ -331,38 +463,21 @@ func (r resourceDbQueryLogging) Create(ctx context.Context, req tfsdk.CreateReso
331
463
return
332
464
}
333
465
334
- var clusterId string
466
+ var clusterId = config . ClusterID . Value
335
467
var integrationId string
336
468
337
- clusterName := config .ClusterName .Value
338
469
integrationName := config .IntegrationName .Value
339
- clusterData , err := GetClusterByNameorID (accountId , projectId , "" , clusterName , apiClient )
340
- if err != nil {
341
- resp .Diagnostics .AddError ("Unable to fetch cluster details for cluster: " + clusterName , GetApiErrorDetails (err ))
342
- return
343
- }
344
- clusterId = clusterData .Info .Id
345
470
346
- integrationConfig , _ , err := apiClient .TelemetryProviderApi .
347
- ListTelemetryProviders (context .Background (), accountId , projectId ).
348
- Name (integrationName ).
349
- Execute ()
350
-
351
- if err != nil {
352
- resp .Diagnostics .AddError ("Unable to fetch integration details for integration: " + integrationName , GetApiErrorDetails (err ))
353
- return
354
- }
355
-
356
- if len (integrationConfig .GetData ()) < 1 {
357
- message := fmt .Sprintf ("Integration %s not found" , integrationName )
358
- resp .Diagnostics .AddError (message , message )
471
+ integrationConfig , ok , errMsg := getIntegrationId (ctx , accountId , projectId , integrationName , apiClient )
472
+ if ! ok {
473
+ resp .Diagnostics .AddError ("Unable to fetch integration details for: " + integrationName , errMsg )
359
474
return
360
475
}
361
- integrationId = integrationConfig .GetData ()[ 0 ]. Info .Id
476
+ integrationId = integrationConfig .Info .Id
362
477
363
- dbQueryLoggingConfigSpec , err := buildDbQueryLoggingSpec (config , integrationId )
478
+ dbQueryLoggingConfigSpec , err := buildDbQueryLoggingSpec (config , integrationId , openapiclient . NewPgLogExportConfigWithDefaults () )
364
479
if err != nil {
365
- resp . Diagnostics . AddWarning ( "Unable to build DB query logging config spec" , GetApiErrorDetails (err ))
480
+ tflog . Warn ( ctx , "Unable to build DB query logging config spec" + GetApiErrorDetails (err ))
366
481
resp .Diagnostics .AddError ("Encountered error while enabling DB Query Logging" , GetApiErrorDetails (err ))
367
482
return
368
483
}
@@ -401,7 +516,7 @@ func (r resourceDbQueryLogging) Create(ctx context.Context, req tfsdk.CreateReso
401
516
}
402
517
403
518
dbQueryLoggingConfig , readOK , readErrMsg := resourceRead (ctx , accountId , projectId ,
404
- clusterId , clusterName , integrationName , apiClient )
519
+ clusterId , integrationName , apiClient )
405
520
if ! readOK {
406
521
resp .Diagnostics .AddError ("Unable to read the state of Db Query log config for the cluster " , readErrMsg )
407
522
return
0 commit comments