diff --git a/.ci/.semgrep-service-name0.yml b/.ci/.semgrep-service-name0.yml index c87b94b7ce5a..cf1f90e36d69 100644 --- a/.ci/.semgrep-service-name0.yml +++ b/.ci/.semgrep-service-name0.yml @@ -4451,3 +4451,31 @@ rules: - pattern-not-regex: "^TestAccConfigService" - pattern-regex: ^TestAcc.* severity: WARNING + - id: configservice-in-const-name + languages: + - go + message: Do not use "ConfigService" in const name inside configservice package + paths: + include: + - "/internal/service/configservice" + patterns: + - pattern: const $NAME = ... + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-regex: "(?i)ConfigService" + severity: WARNING + - id: configservice-in-var-name + languages: + - go + message: Do not use "ConfigService" in var name inside configservice package + paths: + include: + - "/internal/service/configservice" + patterns: + - pattern: var $NAME = ... + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-regex: "(?i)ConfigService" + severity: WARNING diff --git a/.ci/.semgrep-service-name1.yml b/.ci/.semgrep-service-name1.yml index dd4188e2c05f..0b707e13aa69 100644 --- a/.ci/.semgrep-service-name1.yml +++ b/.ci/.semgrep-service-name1.yml @@ -1,33 +1,5 @@ # Generated by internal/generate/servicesemgrep/main.go; DO NOT EDIT. rules: - - id: configservice-in-const-name - languages: - - go - message: Do not use "ConfigService" in const name inside configservice package - paths: - include: - - "/internal/service/configservice" - patterns: - - pattern: const $NAME = ... - - metavariable-pattern: - metavariable: $NAME - patterns: - - pattern-regex: "(?i)ConfigService" - severity: WARNING - - id: configservice-in-var-name - languages: - - go - message: Do not use "ConfigService" in var name inside configservice package - paths: - include: - - "/internal/service/configservice" - patterns: - - pattern: var $NAME = ... - - metavariable-pattern: - metavariable: $NAME - patterns: - - pattern-regex: "(?i)ConfigService" - severity: WARNING - id: connect-in-func-name languages: - go @@ -4445,3 +4417,61 @@ rules: - focus-metavariable: $NAME - pattern-not: func $NAME($T *testing.T) severity: WARNING + - id: iot-in-test-name + languages: + - go + message: Include "IoT" in test name + paths: + include: + - "/internal/service/iot/*_test.go" + patterns: + - pattern: func $NAME( ... ) + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-not-regex: "^TestAccIoT" + - pattern-regex: ^TestAcc.* + severity: WARNING + - id: iot-in-const-name + languages: + - go + message: Do not use "IoT" in const name inside iot package + paths: + include: + - "/internal/service/iot" + patterns: + - pattern: const $NAME = ... + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-regex: "(?i)IoT" + severity: WARNING + - id: iot-in-var-name + languages: + - go + message: Do not use "IoT" in var name inside iot package + paths: + include: + - "/internal/service/iot" + patterns: + - pattern: var $NAME = ... + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-regex: "(?i)IoT" + severity: WARNING + - id: ipam-in-test-name + languages: + - go + message: Include "IPAM" in test name + paths: + include: + - "/internal/service/ec2/ipam_*_test.go" + patterns: + - pattern: func $NAME( ... ) + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-not-regex: "^TestAccIPAM" + - pattern-regex: ^TestAcc.* + severity: WARNING diff --git a/.ci/.semgrep-service-name2.yml b/.ci/.semgrep-service-name2.yml index 5819dadca7e8..8849b641e634 100644 --- a/.ci/.semgrep-service-name2.yml +++ b/.ci/.semgrep-service-name2.yml @@ -1,63 +1,5 @@ # Generated by internal/generate/servicesemgrep/main.go; DO NOT EDIT. rules: - - id: iot-in-test-name - languages: - - go - message: Include "IoT" in test name - paths: - include: - - "/internal/service/iot/*_test.go" - patterns: - - pattern: func $NAME( ... ) - - metavariable-pattern: - metavariable: $NAME - patterns: - - pattern-not-regex: "^TestAccIoT" - - pattern-regex: ^TestAcc.* - severity: WARNING - - id: iot-in-const-name - languages: - - go - message: Do not use "IoT" in const name inside iot package - paths: - include: - - "/internal/service/iot" - patterns: - - pattern: const $NAME = ... - - metavariable-pattern: - metavariable: $NAME - patterns: - - pattern-regex: "(?i)IoT" - severity: WARNING - - id: iot-in-var-name - languages: - - go - message: Do not use "IoT" in var name inside iot package - paths: - include: - - "/internal/service/iot" - patterns: - - pattern: var $NAME = ... - - metavariable-pattern: - metavariable: $NAME - patterns: - - pattern-regex: "(?i)IoT" - severity: WARNING - - id: ipam-in-test-name - languages: - - go - message: Include "IPAM" in test name - paths: - include: - - "/internal/service/ec2/ipam_*_test.go" - patterns: - - pattern: func $NAME( ... ) - - metavariable-pattern: - metavariable: $NAME - patterns: - - pattern-not-regex: "^TestAccIPAM" - - pattern-regex: ^TestAcc.* - severity: WARNING - id: ivs-in-func-name languages: - go @@ -4413,49 +4355,128 @@ rules: patterns: - pattern-regex: "(?i)RDS" severity: WARNING - - id: recyclebin-in-func-name + - id: rdsdata-in-func-name languages: - go - message: Do not use "recyclebin" in func name inside rbin package + message: Do not use "RDSData" in func name inside rdsdata package paths: include: - - "/internal/service/rbin" + - "/internal/service/rdsdata" exclude: - - "/internal/service/rbin/list_pages_gen.go" + - "/internal/service/rdsdata/list_pages_gen.go" patterns: - pattern: func $NAME( ... ) - metavariable-pattern: metavariable: $NAME patterns: - - pattern-regex: "(?i)recyclebin" + - pattern-regex: "(?i)RDSData" - focus-metavariable: $NAME - pattern-not: func $NAME($T *testing.T) severity: WARNING - - id: recyclebin-in-const-name + - id: rdsdata-in-test-name languages: - go - message: Do not use "recyclebin" in const name inside rbin package + message: Include "RDSData" in test name paths: include: - - "/internal/service/rbin" + - "/internal/service/rdsdata/*_test.go" + patterns: + - pattern: func $NAME( ... ) + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-not-regex: "^TestAccRDSData" + - pattern-regex: ^TestAcc.* + severity: WARNING + - id: rdsdata-in-const-name + languages: + - go + message: Do not use "RDSData" in const name inside rdsdata package + paths: + include: + - "/internal/service/rdsdata" patterns: - pattern: const $NAME = ... - metavariable-pattern: metavariable: $NAME patterns: - - pattern-regex: "(?i)recyclebin" + - pattern-regex: "(?i)RDSData" severity: WARNING - - id: recyclebin-in-var-name + - id: rdsdata-in-var-name languages: - go - message: Do not use "recyclebin" in var name inside rbin package + message: Do not use "RDSData" in var name inside rdsdata package paths: include: - - "/internal/service/rbin" + - "/internal/service/rdsdata" patterns: - pattern: var $NAME = ... + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-regex: "(?i)RDSData" + severity: WARNING + - id: rdsdataservice-in-func-name + languages: + - go + message: Do not use "rdsdataservice" in func name inside rdsdata package + paths: + include: + - "/internal/service/rdsdata" + exclude: + - "/internal/service/rdsdata/list_pages_gen.go" + patterns: + - pattern: func $NAME( ... ) + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-regex: "(?i)rdsdataservice" + - focus-metavariable: $NAME + - pattern-not: func $NAME($T *testing.T) + severity: WARNING + - id: rdsdataservice-in-const-name + languages: + - go + message: Do not use "rdsdataservice" in const name inside rdsdata package + paths: + include: + - "/internal/service/rdsdata" + patterns: + - pattern: const $NAME = ... + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-regex: "(?i)rdsdataservice" + severity: WARNING + - id: rdsdataservice-in-var-name + languages: + - go + message: Do not use "rdsdataservice" in var name inside rdsdata package + paths: + include: + - "/internal/service/rdsdata" + patterns: + - pattern: var $NAME = ... + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-regex: "(?i)rdsdataservice" + severity: WARNING + - id: recyclebin-in-func-name + languages: + - go + message: Do not use "recyclebin" in func name inside rbin package + paths: + include: + - "/internal/service/rbin" + exclude: + - "/internal/service/rbin/list_pages_gen.go" + patterns: + - pattern: func $NAME( ... ) - metavariable-pattern: metavariable: $NAME patterns: - pattern-regex: "(?i)recyclebin" + - focus-metavariable: $NAME + - pattern-not: func $NAME($T *testing.T) severity: WARNING diff --git a/.ci/.semgrep-service-name3.yml b/.ci/.semgrep-service-name3.yml index 833fcb162182..46600a3d14d7 100644 --- a/.ci/.semgrep-service-name3.yml +++ b/.ci/.semgrep-service-name3.yml @@ -1,5 +1,33 @@ # Generated by internal/generate/servicesemgrep/main.go; DO NOT EDIT. rules: + - id: recyclebin-in-const-name + languages: + - go + message: Do not use "recyclebin" in const name inside rbin package + paths: + include: + - "/internal/service/rbin" + patterns: + - pattern: const $NAME = ... + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-regex: "(?i)recyclebin" + severity: WARNING + - id: recyclebin-in-var-name + languages: + - go + message: Do not use "recyclebin" in var name inside rbin package + paths: + include: + - "/internal/service/rbin" + patterns: + - pattern: var $NAME = ... + - metavariable-pattern: + metavariable: $NAME + patterns: + - pattern-regex: "(?i)recyclebin" + severity: WARNING - id: redshift-in-func-name languages: - go diff --git a/.teamcity/components/generated/services_all.kt b/.teamcity/components/generated/services_all.kt index 63a969500adb..99ab6d56d79f 100644 --- a/.teamcity/components/generated/services_all.kt +++ b/.teamcity/components/generated/services_all.kt @@ -190,6 +190,7 @@ val services = mapOf( "ram" to ServiceSpec("RAM (Resource Access Manager)"), "rbin" to ServiceSpec("Recycle Bin (RBin)"), "rds" to ServiceSpec("RDS (Relational Database)", vpcLock = true), + "rdsdata" to ServiceSpec("RDS Data"), "redshift" to ServiceSpec("Redshift", vpcLock = true), "redshiftdata" to ServiceSpec("Redshift Data"), "redshiftserverless" to ServiceSpec("Redshift Serverless"), diff --git a/go.mod b/go.mod index 013d61a84b1b..0d3b42f3f2f2 100644 --- a/go.mod +++ b/go.mod @@ -205,6 +205,7 @@ require ( github.com/aws/aws-sdk-go-v2/service/ram v1.34.6 github.com/aws/aws-sdk-go-v2/service/rbin v1.26.6 github.com/aws/aws-sdk-go-v2/service/rds v1.108.1 + github.com/aws/aws-sdk-go-v2/service/rdsdata v1.32.6 github.com/aws/aws-sdk-go-v2/service/redshift v1.59.0 github.com/aws/aws-sdk-go-v2/service/redshiftdata v1.37.6 github.com/aws/aws-sdk-go-v2/service/redshiftserverless v1.31.8 diff --git a/go.sum b/go.sum index 0659acfcbd97..52e4be4096b9 100644 --- a/go.sum +++ b/go.sum @@ -431,6 +431,8 @@ github.com/aws/aws-sdk-go-v2/service/rbin v1.26.6 h1:wKVcl95mVcHW1rJMsf5SsA9T2zr github.com/aws/aws-sdk-go-v2/service/rbin v1.26.6/go.mod h1:LCbTwbuAosB0UYOB4eMr7CmzwKPaO5ZD+UXEhJ6TPn4= github.com/aws/aws-sdk-go-v2/service/rds v1.108.1 h1:Felh4W55+rsAjcg+EsRUdCWxXXyy3N9UJz4Ej5tLCC4= github.com/aws/aws-sdk-go-v2/service/rds v1.108.1/go.mod h1:VOBL5tbhS7AF0m5YpfwLuRBpb5QVp4EWSPizUr/D6iE= +github.com/aws/aws-sdk-go-v2/service/rdsdata v1.32.6 h1:3E/6Am67lTTCpSS3f0cxT4IDW4THvXArhfvEIhQJGmM= +github.com/aws/aws-sdk-go-v2/service/rdsdata v1.32.6/go.mod h1:ZodWJ7ybwqVRgzlhnO0I86ZUu9S/2QBsMLfHyOCDVoU= github.com/aws/aws-sdk-go-v2/service/redshift v1.59.0 h1:MtE4oUVeljvF2CWPZwzWERizY5uhZV7os1eJC9oA8BI= github.com/aws/aws-sdk-go-v2/service/redshift v1.59.0/go.mod h1:ARgrCFhclWArEevJ/GAn+UBBVc9+f9oFurQlyjx262I= github.com/aws/aws-sdk-go-v2/service/redshiftdata v1.37.6 h1:PC5iIPcOwMMqAocH4fuiyLKbEOKr9t75zhp7yysK0NY= diff --git a/internal/conns/awsclient_gen.go b/internal/conns/awsclient_gen.go index 1782f1943c0f..50d1125a6b18 100644 --- a/internal/conns/awsclient_gen.go +++ b/internal/conns/awsclient_gen.go @@ -193,6 +193,7 @@ import ( "github.com/aws/aws-sdk-go-v2/service/ram" "github.com/aws/aws-sdk-go-v2/service/rbin" "github.com/aws/aws-sdk-go-v2/service/rds" + "github.com/aws/aws-sdk-go-v2/service/rdsdata" "github.com/aws/aws-sdk-go-v2/service/redshift" "github.com/aws/aws-sdk-go-v2/service/redshiftdata" "github.com/aws/aws-sdk-go-v2/service/redshiftserverless" @@ -1019,6 +1020,10 @@ func (c *AWSClient) RDSClient(ctx context.Context) *rds.Client { return errs.Must(client[*rds.Client](ctx, c, names.RDS, make(map[string]any))) } +func (c *AWSClient) RDSDataClient(ctx context.Context) *rdsdata.Client { + return errs.Must(client[*rdsdata.Client](ctx, c, names.RDSData, make(map[string]any))) +} + func (c *AWSClient) RUMClient(ctx context.Context) *rum.Client { return errs.Must(client[*rum.Client](ctx, c, names.RUM, make(map[string]any))) } diff --git a/internal/provider/framework/provider_gen.go b/internal/provider/framework/provider_gen.go index e4a015994cac..0cb1373bd31b 100644 --- a/internal/provider/framework/provider_gen.go +++ b/internal/provider/framework/provider_gen.go @@ -1523,6 +1523,18 @@ func endpointsBlock() schema.SetNestedBlock { Description: "Use this to override the default service endpoint URL", }, + // rdsdata + + "rdsdata": schema.StringAttribute{ + Optional: true, + Description: "Use this to override the default service endpoint URL", + }, + + "rdsdataservice": schema.StringAttribute{ + Optional: true, + Description: "Use this to override the default service endpoint URL", + }, + // redshift "redshift": schema.StringAttribute{ diff --git a/internal/provider/sdkv2/provider_gen.go b/internal/provider/sdkv2/provider_gen.go index 3407e1fc6cb1..3ab54e596c00 100644 --- a/internal/provider/sdkv2/provider_gen.go +++ b/internal/provider/sdkv2/provider_gen.go @@ -1759,6 +1759,20 @@ func endpointsSchema() *schema.Schema { Description: "Use this to override the default service endpoint URL", }, + // rdsdata + + "rdsdata": { + Type: schema.TypeString, + Optional: true, + Description: "Use this to override the default service endpoint URL", + }, + + "rdsdataservice": { + Type: schema.TypeString, + Optional: true, + Description: "Use this to override the default service endpoint URL", + }, + // redshift "redshift": { @@ -3100,6 +3114,30 @@ func expandEndpoints(_ context.Context, tfList []any) (map[string]string, diag.D } } + case "rdsdata", "rdsdataservice": + const pkg = "rdsdata" + attrs := []string{"rdsdata", "rdsdataservice"} + for _, v := range attrs { + seen[v] = true + } + count := 0 + for _, attr := range attrs { + if v := tfMap[attr].(string); v != "" { + count++ + } + } + if count > 1 { + diags = append(diags, ConflictingEndpointsWarningDiag(elementPath, attrs...)) + } + if endpoints[pkg] == "" { + for _, attr := range attrs { + if v := tfMap[attr].(string); v != "" { + endpoints[pkg] = v + break + } + } + } + case "redshiftdata", "redshiftdataapiservice": const pkg = "redshiftdata" attrs := []string{"redshiftdata", "redshiftdataapiservice"} diff --git a/internal/provider/sdkv2/service_packages_gen.go b/internal/provider/sdkv2/service_packages_gen.go index 07a6d528f69b..f07ff868612d 100644 --- a/internal/provider/sdkv2/service_packages_gen.go +++ b/internal/provider/sdkv2/service_packages_gen.go @@ -197,6 +197,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/service/ram" "github.com/hashicorp/terraform-provider-aws/internal/service/rbin" "github.com/hashicorp/terraform-provider-aws/internal/service/rds" + "github.com/hashicorp/terraform-provider-aws/internal/service/rdsdata" "github.com/hashicorp/terraform-provider-aws/internal/service/redshift" "github.com/hashicorp/terraform-provider-aws/internal/service/redshiftdata" "github.com/hashicorp/terraform-provider-aws/internal/service/redshiftserverless" @@ -457,6 +458,7 @@ func servicePackages(ctx context.Context) []conns.ServicePackage { ram.ServicePackage(ctx), rbin.ServicePackage(ctx), rds.ServicePackage(ctx), + rdsdata.ServicePackage(ctx), redshift.ServicePackage(ctx), redshiftdata.ServicePackage(ctx), redshiftserverless.ServicePackage(ctx), diff --git a/internal/service/rdsdata/generate.go b/internal/service/rdsdata/generate.go new file mode 100644 index 000000000000..4e0e70f71f07 --- /dev/null +++ b/internal/service/rdsdata/generate.go @@ -0,0 +1,7 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +//go:generate go run ../../generate/servicepackage/main.go +// ONLY generate directives and package declaration! Do not add anything else to this file. + +package rdsdata diff --git a/internal/service/rdsdata/query_data_source.go b/internal/service/rdsdata/query_data_source.go new file mode 100644 index 000000000000..9cac87d1b234 --- /dev/null +++ b/internal/service/rdsdata/query_data_source.go @@ -0,0 +1,168 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package rdsdata + +import ( + "context" + "encoding/json" + + "github.com/aws/aws-sdk-go-v2/service/rdsdata" + rdsdatatypes "github.com/aws/aws-sdk-go-v2/service/rdsdata/types" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-provider-aws/internal/framework" + "github.com/hashicorp/terraform-provider-aws/names" +) + +// @FrameworkDataSource("aws_rdsdata_query", name="Query") +func newDataSourceQuery(context.Context) (datasource.DataSourceWithConfigure, error) { + return &dataSourceQuery{}, nil +} + +type dataSourceQuery struct { + framework.DataSourceWithModel[dataSourceQueryModel] +} + +func (d *dataSourceQuery) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + names.AttrID: framework.IDAttribute(), + names.AttrDatabase: schema.StringAttribute{ + Optional: true, + }, + names.AttrResourceARN: schema.StringAttribute{ + Required: true, + }, + "secret_arn": schema.StringAttribute{ + Required: true, + }, + "sql": schema.StringAttribute{ + Required: true, + }, + "records": schema.StringAttribute{ + Computed: true, + }, + "number_of_records_updated": schema.Int64Attribute{ + Computed: true, + }, + }, + Blocks: map[string]schema.Block{ + names.AttrParameters: schema.ListNestedBlock{ + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + names.AttrName: schema.StringAttribute{ + Required: true, + }, + names.AttrValue: schema.StringAttribute{ + Required: true, + }, + "type_hint": schema.StringAttribute{ + Optional: true, + }, + }, + }, + }, + }, + } +} + +type dataSourceQueryModel struct { + framework.WithRegionModel + ID types.String `tfsdk:"id"` + Database types.String `tfsdk:"database"` + ResourceARN types.String `tfsdk:"resource_arn"` + SecretARN types.String `tfsdk:"secret_arn"` + SQL types.String `tfsdk:"sql"` + Parameters []dataSourceQueryParameterModel `tfsdk:"parameters"` + Records types.String `tfsdk:"records"` + NumberOfRecordsUpdated types.Int64 `tfsdk:"number_of_records_updated"` +} + +type dataSourceQueryParameterModel struct { + Name types.String `tfsdk:"name"` + Value types.String `tfsdk:"value"` + TypeHint types.String `tfsdk:"type_hint"` +} + +func (d *dataSourceQuery) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data dataSourceQueryModel + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + conn := d.Meta().RDSDataClient(ctx) + + input := rdsdata.ExecuteStatementInput{ + ResourceArn: data.ResourceARN.ValueStringPointer(), + SecretArn: data.SecretARN.ValueStringPointer(), + Sql: data.SQL.ValueStringPointer(), + FormatRecordsAs: rdsdatatypes.RecordsFormatTypeJson, + } + + if !data.Database.IsNull() { + input.Database = data.Database.ValueStringPointer() + } + + if len(data.Parameters) > 0 { + input.Parameters = expandSQLParameters(data.Parameters) + } + + output, err := conn.ExecuteStatement(ctx, &input) + if err != nil { + resp.Diagnostics.AddError("executing RDS Data API statement", err.Error()) + return + } + + data.ID = types.StringValue(data.ResourceARN.ValueString() + ":" + data.SQL.ValueString()) + data.Records = types.StringPointerValue(output.FormattedRecords) + data.NumberOfRecordsUpdated = types.Int64Value(output.NumberOfRecordsUpdated) + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func expandSQLParameters(tfList []dataSourceQueryParameterModel) []rdsdatatypes.SqlParameter { + if len(tfList) == 0 { + return nil + } + + var apiObjects []rdsdatatypes.SqlParameter + + for _, tfObj := range tfList { + apiObject := rdsdatatypes.SqlParameter{ + Name: tfObj.Name.ValueStringPointer(), + } + + if !tfObj.TypeHint.IsNull() { + apiObject.TypeHint = rdsdatatypes.TypeHint(tfObj.TypeHint.ValueString()) + } + + // Convert value to Field type + valueStr := tfObj.Value.ValueString() + var field rdsdatatypes.Field + + // Try to parse as JSON first, otherwise treat as string + var jsonValue any + if err := json.Unmarshal([]byte(valueStr), &jsonValue); err == nil { + switch v := jsonValue.(type) { + case string: + field = &rdsdatatypes.FieldMemberStringValue{Value: v} + case float64: + field = &rdsdatatypes.FieldMemberDoubleValue{Value: v} + case bool: + field = &rdsdatatypes.FieldMemberBooleanValue{Value: v} + default: + field = &rdsdatatypes.FieldMemberStringValue{Value: valueStr} + } + } else { + field = &rdsdatatypes.FieldMemberStringValue{Value: valueStr} + } + + apiObject.Value = field + apiObjects = append(apiObjects, apiObject) + } + + return apiObjects +} diff --git a/internal/service/rdsdata/query_data_source_test.go b/internal/service/rdsdata/query_data_source_test.go new file mode 100644 index 000000000000..67344a0f9eee --- /dev/null +++ b/internal/service/rdsdata/query_data_source_test.go @@ -0,0 +1,126 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package rdsdata_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-aws/internal/acctest" + "github.com/hashicorp/terraform-provider-aws/names" +) + +func TestAccRDSDataQueryDataSource_basic(t *testing.T) { + ctx := acctest.Context(t) + dataSourceName := "data.aws_rdsdata_query.test" + rName := acctest.RandomWithPrefix(t, acctest.ResourcePrefix) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.RDSServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccQueryDataSourceConfig_basic(rName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(dataSourceName, "records"), + resource.TestCheckResourceAttr(dataSourceName, "sql", "SELECT SCHEMA_NAME FROM information_schema.SCHEMATA LIMIT 1"), + ), + }, + }, + }) +} + +func TestAccRDSDataQueryDataSource_withParameters(t *testing.T) { + ctx := acctest.Context(t) + dataSourceName := "data.aws_rdsdata_query.test" + rName := acctest.RandomWithPrefix(t, acctest.ResourcePrefix) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.RDSServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccQueryDataSourceConfig_withParameters(rName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(dataSourceName, "records"), + resource.TestCheckResourceAttr(dataSourceName, "sql", "SELECT :param1 as test_column"), + resource.TestCheckResourceAttr(dataSourceName, "parameters.#", "1"), + resource.TestCheckResourceAttr(dataSourceName, "parameters.0.name", "param1"), + resource.TestCheckResourceAttr(dataSourceName, "parameters.0.value", "test_value"), + ), + }, + }, + }) +} + +func testAccQueryDataSourceConfig_basic(rName string) string { + return acctest.ConfigCompose(testAccQueryDataSourceConfig_base(rName), ` +data "aws_rdsdata_query" "test" { + depends_on = [aws_rds_cluster_instance.test] + resource_arn = aws_rds_cluster.test.arn + secret_arn = aws_secretsmanager_secret.test.arn + sql = "SELECT SCHEMA_NAME FROM information_schema.SCHEMATA LIMIT 1" +} +`) +} + +func testAccQueryDataSourceConfig_withParameters(rName string) string { + return acctest.ConfigCompose(testAccQueryDataSourceConfig_base(rName), ` +data "aws_rdsdata_query" "test" { + depends_on = [aws_rds_cluster_instance.test] + resource_arn = aws_rds_cluster.test.arn + secret_arn = aws_secretsmanager_secret.test.arn + sql = "SELECT :param1 as test_column" + + parameters { + name = "param1" + value = "test_value" + } +} +`) +} + +func testAccQueryDataSourceConfig_base(rName string) string { + return fmt.Sprintf(` +resource "aws_rds_cluster" "test" { + cluster_identifier = %[1]q + engine = "aurora-mysql" + database_name = "test" + master_username = "username" + master_password = "mustbeeightcharacters" + backup_retention_period = 7 + preferred_backup_window = "07:00-09:00" + preferred_maintenance_window = "tue:04:00-tue:04:30" + skip_final_snapshot = true + enable_http_endpoint = true + + serverlessv2_scaling_configuration { + max_capacity = 8 + min_capacity = 0.5 + } +} + +resource "aws_rds_cluster_instance" "test" { + cluster_identifier = aws_rds_cluster.test.id + instance_class = "db.serverless" + engine = aws_rds_cluster.test.engine + engine_version = aws_rds_cluster.test.engine_version +} + +resource "aws_secretsmanager_secret" "test" { + name = %[1]q +} + +resource "aws_secretsmanager_secret_version" "test" { + secret_id = aws_secretsmanager_secret.test.id + secret_string = jsonencode({ + username = aws_rds_cluster.test.master_username + password = aws_rds_cluster.test.master_password + }) +} +`, rName) +} diff --git a/internal/service/rdsdata/query_resource.go b/internal/service/rdsdata/query_resource.go new file mode 100644 index 000000000000..ad04add2ec84 --- /dev/null +++ b/internal/service/rdsdata/query_resource.go @@ -0,0 +1,154 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package rdsdata + +import ( + "context" + + "github.com/aws/aws-sdk-go-v2/service/rdsdata" + rdsdatatypes "github.com/aws/aws-sdk-go-v2/service/rdsdata/types" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-provider-aws/internal/framework" + "github.com/hashicorp/terraform-provider-aws/names" +) + +// @FrameworkResource("aws_rdsdata_query", name="Query") +func newResourceQuery(context.Context) (resource.ResourceWithConfigure, error) { + return &resourceQuery{}, nil +} + +type resourceQuery struct { + framework.ResourceWithModel[resourceQueryModel] +} + +func (r *resourceQuery) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + names.AttrID: framework.IDAttribute(), + names.AttrDatabase: schema.StringAttribute{ + Optional: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + names.AttrResourceARN: schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "secret_arn": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "sql": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "records": schema.StringAttribute{ + Computed: true, + }, + "number_of_records_updated": schema.Int64Attribute{ + Computed: true, + }, + }, + Blocks: map[string]schema.Block{ + names.AttrParameters: schema.ListNestedBlock{ + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + names.AttrName: schema.StringAttribute{ + Required: true, + }, + names.AttrValue: schema.StringAttribute{ + Required: true, + }, + "type_hint": schema.StringAttribute{ + Optional: true, + }, + }, + }, + }, + }, + } +} + +type resourceQueryModel struct { + framework.WithRegionModel + ID types.String `tfsdk:"id"` + Database types.String `tfsdk:"database"` + ResourceARN types.String `tfsdk:"resource_arn"` + SecretARN types.String `tfsdk:"secret_arn"` + SQL types.String `tfsdk:"sql"` + Parameters []resourceQueryParameterModel `tfsdk:"parameters"` + Records types.String `tfsdk:"records"` + NumberOfRecordsUpdated types.Int64 `tfsdk:"number_of_records_updated"` +} + +type resourceQueryParameterModel struct { + Name types.String `tfsdk:"name"` + Value types.String `tfsdk:"value"` + TypeHint types.String `tfsdk:"type_hint"` +} + +func (r *resourceQuery) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data resourceQueryModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + conn := r.Meta().RDSDataClient(ctx) + + input := rdsdata.ExecuteStatementInput{ + ResourceArn: data.ResourceARN.ValueStringPointer(), + SecretArn: data.SecretARN.ValueStringPointer(), + Sql: data.SQL.ValueStringPointer(), + FormatRecordsAs: rdsdatatypes.RecordsFormatTypeJson, + } + + if !data.Database.IsNull() { + input.Database = data.Database.ValueStringPointer() + } + + if len(data.Parameters) > 0 { + // Convert resource parameter model to data source parameter model for compatibility + var params []dataSourceQueryParameterModel + for _, p := range data.Parameters { + params = append(params, dataSourceQueryParameterModel(p)) + } + input.Parameters = expandSQLParameters(params) + } + + output, err := conn.ExecuteStatement(ctx, &input) + if err != nil { + resp.Diagnostics.AddError("executing RDS Data API statement", err.Error()) + return + } + + data.ID = types.StringValue(data.ResourceARN.ValueString() + ":" + data.SQL.ValueString()) + data.Records = types.StringPointerValue(output.FormattedRecords) + data.NumberOfRecordsUpdated = types.Int64Value(output.NumberOfRecordsUpdated) + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *resourceQuery) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + // No-op: query results are stored in state and don't need to be refreshed +} + +func (r *resourceQuery) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + // No-op: all changes require replacement +} + +func (r *resourceQuery) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + // No-op: no API call needed, just remove from state +} diff --git a/internal/service/rdsdata/query_resource_test.go b/internal/service/rdsdata/query_resource_test.go new file mode 100644 index 000000000000..c40d235815b0 --- /dev/null +++ b/internal/service/rdsdata/query_resource_test.go @@ -0,0 +1,84 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package rdsdata_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-aws/internal/acctest" + "github.com/hashicorp/terraform-provider-aws/names" +) + +func TestAccRDSDataQueryResource_basic(t *testing.T) { + ctx := acctest.Context(t) + rName := acctest.RandomWithPrefix(t, acctest.ResourcePrefix) + resourceName := "aws_rdsdata_query.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.RDSDataServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccQueryResourceConfig_basic(rName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, names.AttrID), + resource.TestCheckResourceAttr(resourceName, "records", "[{\"1\":1}]"), + resource.TestCheckResourceAttr(resourceName, "number_of_records_updated", "0"), + ), + }, + }, + }) +} + +func TestAccRDSDataQueryResource_withParameters(t *testing.T) { + ctx := acctest.Context(t) + rName := acctest.RandomWithPrefix(t, acctest.ResourcePrefix) + resourceName := "aws_rdsdata_query.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.RDSDataServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccQueryResourceConfig_withParameters(rName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, names.AttrID), + resource.TestCheckResourceAttrSet(resourceName, "records"), + resource.TestCheckResourceAttr(resourceName, "number_of_records_updated", "0"), + ), + }, + }, + }) +} + +func testAccQueryResourceConfig_basic(rName string) string { + return acctest.ConfigCompose(testAccQueryDataSourceConfig_base(rName), ` +resource "aws_rdsdata_query" "test" { + depends_on = [aws_rds_cluster_instance.test] + resource_arn = aws_rds_cluster.test.arn + secret_arn = aws_secretsmanager_secret_version.test.arn + sql = "SELECT 1" +} +`) +} + +func testAccQueryResourceConfig_withParameters(rName string) string { + return acctest.ConfigCompose(testAccQueryDataSourceConfig_base(rName), ` +resource "aws_rdsdata_query" "test" { + depends_on = [aws_rds_cluster_instance.test] + resource_arn = aws_rds_cluster.test.arn + secret_arn = aws_secretsmanager_secret_version.test.arn + sql = "SELECT * FROM information_schema.tables WHERE table_name = :table_name" + database = aws_rds_cluster.test.database_name + + parameters { + name = "table_name" + value = "test_table" + } +} +`) +} diff --git a/internal/service/rdsdata/service_endpoint_resolver_gen.go b/internal/service/rdsdata/service_endpoint_resolver_gen.go new file mode 100644 index 000000000000..388a6a7fcca5 --- /dev/null +++ b/internal/service/rdsdata/service_endpoint_resolver_gen.go @@ -0,0 +1,82 @@ +// Code generated by internal/generate/servicepackage/main.go; DO NOT EDIT. + +package rdsdata + +import ( + "context" + "fmt" + "net" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/rdsdata" + smithyendpoints "github.com/aws/smithy-go/endpoints" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-provider-aws/internal/errs" +) + +var _ rdsdata.EndpointResolverV2 = resolverV2{} + +type resolverV2 struct { + defaultResolver rdsdata.EndpointResolverV2 +} + +func newEndpointResolverV2() resolverV2 { + return resolverV2{ + defaultResolver: rdsdata.NewDefaultEndpointResolverV2(), + } +} + +func (r resolverV2) ResolveEndpoint(ctx context.Context, params rdsdata.EndpointParameters) (endpoint smithyendpoints.Endpoint, err error) { + params = params.WithDefaults() + useFIPS := aws.ToBool(params.UseFIPS) + + if eps := params.Endpoint; aws.ToString(eps) != "" { + tflog.Debug(ctx, "setting endpoint", map[string]any{ + "tf_aws.endpoint": endpoint, + }) + + if useFIPS { + tflog.Debug(ctx, "endpoint set, ignoring UseFIPSEndpoint setting") + params.UseFIPS = aws.Bool(false) + } + + return r.defaultResolver.ResolveEndpoint(ctx, params) + } else if useFIPS { + ctx = tflog.SetField(ctx, "tf_aws.use_fips", useFIPS) + + endpoint, err = r.defaultResolver.ResolveEndpoint(ctx, params) + if err != nil { + return endpoint, err + } + + tflog.Debug(ctx, "endpoint resolved", map[string]any{ + "tf_aws.endpoint": endpoint.URI.String(), + }) + + hostname := endpoint.URI.Hostname() + _, err = net.LookupHost(hostname) + if err != nil { + if dnsErr, ok := errs.As[*net.DNSError](err); ok && dnsErr.IsNotFound { + tflog.Debug(ctx, "default endpoint host not found, disabling FIPS", map[string]any{ + "tf_aws.hostname": hostname, + }) + params.UseFIPS = aws.Bool(false) + } else { + err = fmt.Errorf("looking up rdsdata endpoint %q: %w", hostname, err) + return + } + } else { + return endpoint, err + } + } + + return r.defaultResolver.ResolveEndpoint(ctx, params) +} + +func withBaseEndpoint(endpoint string) func(*rdsdata.Options) { + return func(o *rdsdata.Options) { + if endpoint != "" { + o.BaseEndpoint = aws.String(endpoint) + } + } +} diff --git a/internal/service/rdsdata/service_endpoints_gen_test.go b/internal/service/rdsdata/service_endpoints_gen_test.go new file mode 100644 index 000000000000..9ad760e1ba1f --- /dev/null +++ b/internal/service/rdsdata/service_endpoints_gen_test.go @@ -0,0 +1,687 @@ +// Code generated by internal/generate/serviceendpointtests/main.go; DO NOT EDIT. + +package rdsdata_test + +import ( + "context" + "errors" + "fmt" + "maps" + "net" + "net/url" + "os" + "path/filepath" + "reflect" + "strings" + "testing" + + "github.com/aws/aws-sdk-go-v2/aws" + awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware" + "github.com/aws/aws-sdk-go-v2/service/rdsdata" + "github.com/aws/smithy-go/middleware" + smithyhttp "github.com/aws/smithy-go/transport/http" + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/aws-sdk-go-base/v2/servicemocks" + "github.com/hashicorp/go-cty/cty" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + terraformsdk "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-provider-aws/internal/acctest" + "github.com/hashicorp/terraform-provider-aws/internal/conns" + "github.com/hashicorp/terraform-provider-aws/internal/errs" + "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" + "github.com/hashicorp/terraform-provider-aws/internal/provider/sdkv2" + "github.com/hashicorp/terraform-provider-aws/names" +) + +type endpointTestCase struct { + with []setupFunc + expected caseExpectations +} + +type caseSetup struct { + config map[string]any + configFile configFile + environmentVariables map[string]string +} + +type configFile struct { + baseUrl string + serviceUrl string +} + +type caseExpectations struct { + diags diag.Diagnostics + endpoint string + region string +} + +type apiCallParams struct { + endpoint string + region string +} + +type setupFunc func(setup *caseSetup) + +type callFunc func(ctx context.Context, t *testing.T, meta *conns.AWSClient) apiCallParams + +const ( + packageNameConfigEndpoint = "https://packagename-config.endpoint.test/" + awsServiceEnvvarEndpoint = "https://service-envvar.endpoint.test/" + baseEnvvarEndpoint = "https://base-envvar.endpoint.test/" + serviceConfigFileEndpoint = "https://service-configfile.endpoint.test/" + baseConfigFileEndpoint = "https://base-configfile.endpoint.test/" + + aliasName0ConfigEndpoint = "https://aliasname0-config.endpoint.test/" +) + +const ( + packageName = "rdsdata" + awsEnvVar = "AWS_ENDPOINT_URL_RDS_DATA" + baseEnvVar = "AWS_ENDPOINT_URL" + configParam = "rds_data" + + aliasName0 = "rdsdataservice" +) + +const ( + expectedCallRegion = "us-west-2" //lintignore:AWSAT003 +) + +func TestEndpointConfiguration(t *testing.T) { //nolint:paralleltest // uses t.Setenv + ctx := t.Context() + const providerRegion = "us-west-2" //lintignore:AWSAT003 + const expectedEndpointRegion = providerRegion + + testcases := map[string]endpointTestCase{ + "no config": { + with: []setupFunc{withNoConfig}, + expected: expectDefaultEndpoint(ctx, t, expectedEndpointRegion), + }, + + // Package name endpoint on Config + + "package name endpoint config": { + with: []setupFunc{ + withPackageNameEndpointInConfig, + }, + expected: expectPackageNameConfigEndpoint(), + }, + + "package name endpoint config overrides alias name 0 config": { + with: []setupFunc{ + withPackageNameEndpointInConfig, + withAliasName0EndpointInConfig, + }, + expected: conflictsWith(expectPackageNameConfigEndpoint()), + }, + + "package name endpoint config overrides aws service envvar": { + with: []setupFunc{ + withPackageNameEndpointInConfig, + withAwsEnvVar, + }, + expected: expectPackageNameConfigEndpoint(), + }, + + "package name endpoint config overrides base envvar": { + with: []setupFunc{ + withPackageNameEndpointInConfig, + withBaseEnvVar, + }, + expected: expectPackageNameConfigEndpoint(), + }, + + "package name endpoint config overrides service config file": { + with: []setupFunc{ + withPackageNameEndpointInConfig, + withServiceEndpointInConfigFile, + }, + expected: expectPackageNameConfigEndpoint(), + }, + + "package name endpoint config overrides base config file": { + with: []setupFunc{ + withPackageNameEndpointInConfig, + withBaseEndpointInConfigFile, + }, + expected: expectPackageNameConfigEndpoint(), + }, + + // Alias name 0 endpoint on Config + + "alias name 0 endpoint config": { + with: []setupFunc{ + withAliasName0EndpointInConfig, + }, + expected: expectAliasName0ConfigEndpoint(), + }, + + "alias name 0 endpoint config overrides aws service envvar": { + with: []setupFunc{ + withAliasName0EndpointInConfig, + withAwsEnvVar, + }, + expected: expectAliasName0ConfigEndpoint(), + }, + + "alias name 0 endpoint config overrides base envvar": { + with: []setupFunc{ + withAliasName0EndpointInConfig, + withBaseEnvVar, + }, + expected: expectAliasName0ConfigEndpoint(), + }, + + "alias name 0 endpoint config overrides service config file": { + with: []setupFunc{ + withAliasName0EndpointInConfig, + withServiceEndpointInConfigFile, + }, + expected: expectAliasName0ConfigEndpoint(), + }, + + "alias name 0 endpoint config overrides base config file": { + with: []setupFunc{ + withAliasName0EndpointInConfig, + withBaseEndpointInConfigFile, + }, + expected: expectAliasName0ConfigEndpoint(), + }, + + // Service endpoint in AWS envvar + + "service aws envvar": { + with: []setupFunc{ + withAwsEnvVar, + }, + expected: expectAwsEnvVarEndpoint(), + }, + + "service aws envvar overrides base envvar": { + with: []setupFunc{ + withAwsEnvVar, + withBaseEnvVar, + }, + expected: expectAwsEnvVarEndpoint(), + }, + + "service aws envvar overrides service config file": { + with: []setupFunc{ + withAwsEnvVar, + withServiceEndpointInConfigFile, + }, + expected: expectAwsEnvVarEndpoint(), + }, + + "service aws envvar overrides base config file": { + with: []setupFunc{ + withAwsEnvVar, + withBaseEndpointInConfigFile, + }, + expected: expectAwsEnvVarEndpoint(), + }, + + // Base endpoint in envvar + + "base endpoint envvar": { + with: []setupFunc{ + withBaseEnvVar, + }, + expected: expectBaseEnvVarEndpoint(), + }, + + "base endpoint envvar overrides service config file": { + with: []setupFunc{ + withBaseEnvVar, + withServiceEndpointInConfigFile, + }, + expected: expectBaseEnvVarEndpoint(), + }, + + "base endpoint envvar overrides base config file": { + with: []setupFunc{ + withBaseEnvVar, + withBaseEndpointInConfigFile, + }, + expected: expectBaseEnvVarEndpoint(), + }, + + // Service endpoint in config file + + "service config file": { + with: []setupFunc{ + withServiceEndpointInConfigFile, + }, + expected: expectServiceConfigFileEndpoint(), + }, + + "service config file overrides base config file": { + with: []setupFunc{ + withServiceEndpointInConfigFile, + withBaseEndpointInConfigFile, + }, + expected: expectServiceConfigFileEndpoint(), + }, + + // Base endpoint in config file + + "base endpoint config file": { + with: []setupFunc{ + withBaseEndpointInConfigFile, + }, + expected: expectBaseConfigFileEndpoint(), + }, + + // Use FIPS endpoint on Config + + "use fips config": { + with: []setupFunc{ + withUseFIPSInConfig, + }, + expected: expectDefaultFIPSEndpoint(ctx, t, expectedEndpointRegion), + }, + + "use fips config with package name endpoint config": { + with: []setupFunc{ + withUseFIPSInConfig, + withPackageNameEndpointInConfig, + }, + expected: expectPackageNameConfigEndpoint(), + }, + } + + for name, testcase := range testcases { //nolint:paralleltest // uses t.Setenv + t.Run(name, func(t *testing.T) { + testEndpointCase(ctx, t, providerRegion, testcase, callService) + }) + } +} + +func defaultEndpoint(ctx context.Context, region string) (url.URL, error) { + r := rdsdata.NewDefaultEndpointResolverV2() + + ep, err := r.ResolveEndpoint(ctx, rdsdata.EndpointParameters{ + Region: aws.String(region), + }) + if err != nil { + return url.URL{}, err + } + + if ep.URI.Path == "" { + ep.URI.Path = "/" + } + + return ep.URI, nil +} + +func defaultFIPSEndpoint(ctx context.Context, region string) (url.URL, error) { + r := rdsdata.NewDefaultEndpointResolverV2() + + ep, err := r.ResolveEndpoint(ctx, rdsdata.EndpointParameters{ + Region: aws.String(region), + UseFIPS: aws.Bool(true), + }) + if err != nil { + return url.URL{}, err + } + + if ep.URI.Path == "" { + ep.URI.Path = "/" + } + + return ep.URI, nil +} + +func callService(ctx context.Context, t *testing.T, meta *conns.AWSClient) apiCallParams { + t.Helper() + + client := meta.RDSDataClient(ctx) + + var result apiCallParams + + input := rdsdata.ExecuteStatementInput{ + ResourceArn: aws.String("arn:" + acctest.Partition() + ":rds:" + acctest.Region() + ":" + acctest.Ct12Digit + ":cluster:test"), + SecretArn: aws.String("arn:" + acctest.Partition() + ":secretsmanager:" + acctest.Region() + ":" + acctest.Ct12Digit + ":secret:test"), + Sql: aws.String("SELECT 1"), + } + _, err := client.ExecuteStatement(ctx, &input, + func(opts *rdsdata.Options) { + opts.APIOptions = append(opts.APIOptions, + addRetrieveEndpointURLMiddleware(t, &result.endpoint), + addRetrieveRegionMiddleware(&result.region), + addCancelRequestMiddleware(), + ) + }, + ) + if err == nil { + t.Fatal("Expected an error, got none") + } else if !errors.Is(err, errCancelOperation) { + t.Fatalf("Unexpected error: %s", err) + } + + return result +} + +func withNoConfig(_ *caseSetup) { + // no-op +} + +func withPackageNameEndpointInConfig(setup *caseSetup) { + if _, ok := setup.config[names.AttrEndpoints]; !ok { + setup.config[names.AttrEndpoints] = []any{ + map[string]any{}, + } + } + endpoints := setup.config[names.AttrEndpoints].([]any)[0].(map[string]any) + endpoints[packageName] = packageNameConfigEndpoint +} + +func withAliasName0EndpointInConfig(setup *caseSetup) { + if _, ok := setup.config[names.AttrEndpoints]; !ok { + setup.config[names.AttrEndpoints] = []any{ + map[string]any{}, + } + } + endpoints := setup.config[names.AttrEndpoints].([]any)[0].(map[string]any) + endpoints[aliasName0] = aliasName0ConfigEndpoint +} + +func conflictsWith(e caseExpectations) caseExpectations { + e.diags = append(e.diags, sdkv2.ConflictingEndpointsWarningDiag( + cty.GetAttrPath(names.AttrEndpoints).IndexInt(0), + packageName, + aliasName0, + )) + return e +} + +func withAwsEnvVar(setup *caseSetup) { + setup.environmentVariables[awsEnvVar] = awsServiceEnvvarEndpoint +} + +func withBaseEnvVar(setup *caseSetup) { + setup.environmentVariables[baseEnvVar] = baseEnvvarEndpoint +} + +func withServiceEndpointInConfigFile(setup *caseSetup) { + setup.configFile.serviceUrl = serviceConfigFileEndpoint +} + +func withBaseEndpointInConfigFile(setup *caseSetup) { + setup.configFile.baseUrl = baseConfigFileEndpoint +} + +func withUseFIPSInConfig(setup *caseSetup) { + setup.config["use_fips_endpoint"] = true +} + +func expectDefaultEndpoint(ctx context.Context, t *testing.T, region string) caseExpectations { + t.Helper() + + endpoint, err := defaultEndpoint(ctx, region) + if err != nil { + t.Fatalf("resolving accessanalyzer default endpoint: %s", err) + } + + return caseExpectations{ + endpoint: endpoint.String(), + region: expectedCallRegion, + } +} + +func expectDefaultFIPSEndpoint(ctx context.Context, t *testing.T, region string) caseExpectations { + t.Helper() + + endpoint, err := defaultFIPSEndpoint(ctx, region) + if err != nil { + t.Fatalf("resolving accessanalyzer FIPS endpoint: %s", err) + } + + hostname := endpoint.Hostname() + _, err = net.LookupHost(hostname) + if dnsErr, ok := errs.As[*net.DNSError](err); ok && dnsErr.IsNotFound { + return expectDefaultEndpoint(ctx, t, region) + } else if err != nil { + t.Fatalf("looking up accessanalyzer endpoint %q: %s", hostname, err) + } + + return caseExpectations{ + endpoint: endpoint.String(), + region: expectedCallRegion, + } +} + +func expectPackageNameConfigEndpoint() caseExpectations { + return caseExpectations{ + endpoint: packageNameConfigEndpoint, + region: expectedCallRegion, + } +} + +func expectAliasName0ConfigEndpoint() caseExpectations { + return caseExpectations{ + endpoint: aliasName0ConfigEndpoint, + region: expectedCallRegion, + } +} + +func expectAwsEnvVarEndpoint() caseExpectations { + return caseExpectations{ + endpoint: awsServiceEnvvarEndpoint, + region: expectedCallRegion, + } +} + +func expectBaseEnvVarEndpoint() caseExpectations { + return caseExpectations{ + endpoint: baseEnvvarEndpoint, + region: expectedCallRegion, + } +} + +func expectServiceConfigFileEndpoint() caseExpectations { + return caseExpectations{ + endpoint: serviceConfigFileEndpoint, + region: expectedCallRegion, + } +} + +func expectBaseConfigFileEndpoint() caseExpectations { + return caseExpectations{ + endpoint: baseConfigFileEndpoint, + region: expectedCallRegion, + } +} + +func testEndpointCase(ctx context.Context, t *testing.T, region string, testcase endpointTestCase, callF callFunc) { + t.Helper() + + setup := caseSetup{ + config: map[string]any{}, + environmentVariables: map[string]string{}, + } + + for _, f := range testcase.with { + f(&setup) + } + + config := map[string]any{ + names.AttrAccessKey: servicemocks.MockStaticAccessKey, + names.AttrSecretKey: servicemocks.MockStaticSecretKey, + names.AttrRegion: region, + names.AttrSkipCredentialsValidation: true, + names.AttrSkipRequestingAccountID: true, + } + + maps.Copy(config, setup.config) + + if setup.configFile.baseUrl != "" || setup.configFile.serviceUrl != "" { + config[names.AttrProfile] = "default" + tempDir := t.TempDir() + writeSharedConfigFile(t, &config, tempDir, generateSharedConfigFile(setup.configFile)) + } + + for k, v := range setup.environmentVariables { + t.Setenv(k, v) + } + + p, err := sdkv2.NewProvider(ctx) + if err != nil { + t.Fatal(err) + } + + p.TerraformVersion = "1.0.0" + + expectedDiags := testcase.expected.diags + diags := p.Configure(ctx, terraformsdk.NewResourceConfigRaw(config)) + + if diff := cmp.Diff(diags, expectedDiags, cmp.Comparer(sdkdiag.Comparer)); diff != "" { + t.Errorf("unexpected diagnostics difference: %s", diff) + } + + if diags.HasError() { + return + } + + meta := p.Meta().(*conns.AWSClient) + + callParams := callF(ctx, t, meta) + + if e, a := testcase.expected.endpoint, callParams.endpoint; e != a { + t.Errorf("expected endpoint %q, got %q", e, a) + } + + if e, a := testcase.expected.region, callParams.region; e != a { + t.Errorf("expected region %q, got %q", e, a) + } +} + +func addRetrieveEndpointURLMiddleware(t *testing.T, endpoint *string) func(*middleware.Stack) error { + return func(stack *middleware.Stack) error { + return stack.Finalize.Add( + retrieveEndpointURLMiddleware(t, endpoint), + middleware.After, + ) + } +} + +func retrieveEndpointURLMiddleware(t *testing.T, endpoint *string) middleware.FinalizeMiddleware { + return middleware.FinalizeMiddlewareFunc( + "Test: Retrieve Endpoint", + func(ctx context.Context, in middleware.FinalizeInput, next middleware.FinalizeHandler) (middleware.FinalizeOutput, middleware.Metadata, error) { + t.Helper() + + request, ok := in.Request.(*smithyhttp.Request) + if !ok { + t.Fatalf("Expected *github.com/aws/smithy-go/transport/http.Request, got %s", fullTypeName(in.Request)) + } + + url := request.URL + url.RawQuery = "" + url.Path = "/" + + *endpoint = url.String() + + return next.HandleFinalize(ctx, in) + }) +} + +func addRetrieveRegionMiddleware(region *string) func(*middleware.Stack) error { + return func(stack *middleware.Stack) error { + return stack.Serialize.Add( + retrieveRegionMiddleware(region), + middleware.After, + ) + } +} + +func retrieveRegionMiddleware(region *string) middleware.SerializeMiddleware { + return middleware.SerializeMiddlewareFunc( + "Test: Retrieve Region", + func(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (middleware.SerializeOutput, middleware.Metadata, error) { + *region = awsmiddleware.GetRegion(ctx) + + return next.HandleSerialize(ctx, in) + }, + ) +} + +var errCancelOperation = errors.New("Test: Canceling request") + +func addCancelRequestMiddleware() func(*middleware.Stack) error { + return func(stack *middleware.Stack) error { + return stack.Finalize.Add( + cancelRequestMiddleware(), + middleware.After, + ) + } +} + +// cancelRequestMiddleware creates a Smithy middleware that intercepts the request before sending and cancels it +func cancelRequestMiddleware() middleware.FinalizeMiddleware { + return middleware.FinalizeMiddlewareFunc( + "Test: Cancel Requests", + func(_ context.Context, in middleware.FinalizeInput, next middleware.FinalizeHandler) (middleware.FinalizeOutput, middleware.Metadata, error) { + return middleware.FinalizeOutput{}, middleware.Metadata{}, errCancelOperation + }) +} + +func fullTypeName(i any) string { + return fullValueTypeName(reflect.ValueOf(i)) +} + +func fullValueTypeName(v reflect.Value) string { + if v.Kind() == reflect.Ptr { + return "*" + fullValueTypeName(reflect.Indirect(v)) + } + + requestType := v.Type() + return fmt.Sprintf("%s.%s", requestType.PkgPath(), requestType.Name()) +} + +func generateSharedConfigFile(config configFile) string { + var buf strings.Builder + + buf.WriteString(` +[default] +aws_access_key_id = DefaultSharedCredentialsAccessKey +aws_secret_access_key = DefaultSharedCredentialsSecretKey +`) + if config.baseUrl != "" { + fmt.Fprintf(&buf, "endpoint_url = %s\n", config.baseUrl) + } + + if config.serviceUrl != "" { + fmt.Fprintf(&buf, ` +services = endpoint-test + +[services endpoint-test] +%[1]s = + endpoint_url = %[2]s +`, configParam, serviceConfigFileEndpoint) + } + + return buf.String() +} + +func writeSharedConfigFile(t *testing.T, config *map[string]any, tempDir, content string) string { + t.Helper() + + file, err := os.Create(filepath.Join(tempDir, "aws-sdk-go-base-shared-configuration-file")) + if err != nil { + t.Fatalf("creating shared configuration file: %s", err) + } + + _, err = file.WriteString(content) + if err != nil { + t.Fatalf(" writing shared configuration file: %s", err) + } + + if v, ok := (*config)[names.AttrSharedConfigFiles]; !ok { + (*config)[names.AttrSharedConfigFiles] = []any{file.Name()} + } else { + (*config)[names.AttrSharedConfigFiles] = append(v.([]any), file.Name()) + } + + return file.Name() +} diff --git a/internal/service/rdsdata/service_package_gen.go b/internal/service/rdsdata/service_package_gen.go new file mode 100644 index 000000000000..28cdc93511c6 --- /dev/null +++ b/internal/service/rdsdata/service_package_gen.go @@ -0,0 +1,102 @@ +// Code generated by internal/generate/servicepackage/main.go; DO NOT EDIT. + +package rdsdata + +import ( + "context" + "unique" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/rdsdata" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-provider-aws/internal/conns" + inttypes "github.com/hashicorp/terraform-provider-aws/internal/types" + "github.com/hashicorp/terraform-provider-aws/internal/vcr" + "github.com/hashicorp/terraform-provider-aws/names" +) + +type servicePackage struct{} + +func (p *servicePackage) FrameworkDataSources(ctx context.Context) []*inttypes.ServicePackageFrameworkDataSource { + return []*inttypes.ServicePackageFrameworkDataSource{ + { + Factory: newDataSourceQuery, + TypeName: "aws_rdsdata_query", + Name: "Query", + Region: unique.Make(inttypes.ResourceRegionDefault()), + }, + } +} + +func (p *servicePackage) FrameworkResources(ctx context.Context) []*inttypes.ServicePackageFrameworkResource { + return []*inttypes.ServicePackageFrameworkResource{ + { + Factory: newResourceQuery, + TypeName: "aws_rdsdata_query", + Name: "Query", + Region: unique.Make(inttypes.ResourceRegionDefault()), + }, + } +} + +func (p *servicePackage) SDKDataSources(ctx context.Context) []*inttypes.ServicePackageSDKDataSource { + return []*inttypes.ServicePackageSDKDataSource{} +} + +func (p *servicePackage) SDKResources(ctx context.Context) []*inttypes.ServicePackageSDKResource { + return []*inttypes.ServicePackageSDKResource{} +} + +func (p *servicePackage) ServicePackageName() string { + return names.RDSData +} + +// NewClient returns a new AWS SDK for Go v2 client for this service package's AWS API. +func (p *servicePackage) NewClient(ctx context.Context, config map[string]any) (*rdsdata.Client, error) { + cfg := *(config["aws_sdkv2_config"].(*aws.Config)) + optFns := []func(*rdsdata.Options){ + rdsdata.WithEndpointResolverV2(newEndpointResolverV2()), + withBaseEndpoint(config[names.AttrEndpoint].(string)), + func(o *rdsdata.Options) { + if region := config[names.AttrRegion].(string); o.Region != region { + tflog.Info(ctx, "overriding provider-configured AWS API region", map[string]any{ + "service": p.ServicePackageName(), + "original_region": o.Region, + "override_region": region, + }) + o.Region = region + } + }, + func(o *rdsdata.Options) { + if inContext, ok := conns.FromContext(ctx); ok && inContext.VCREnabled() { + tflog.Info(ctx, "overriding retry behavior to immediately return VCR errors") + o.Retryer = conns.AddIsErrorRetryables(cfg.Retryer().(aws.RetryerV2), vcr.InteractionNotFoundRetryableFunc) + } + }, + withExtraOptions(ctx, p, config), + } + + return rdsdata.NewFromConfig(cfg, optFns...), nil +} + +// withExtraOptions returns a functional option that allows this service package to specify extra API client options. +// This option is always called after any generated options. +func withExtraOptions(ctx context.Context, sp conns.ServicePackage, config map[string]any) func(*rdsdata.Options) { + if v, ok := sp.(interface { + withExtraOptions(context.Context, map[string]any) []func(*rdsdata.Options) + }); ok { + optFns := v.withExtraOptions(ctx, config) + + return func(o *rdsdata.Options) { + for _, optFn := range optFns { + optFn(o) + } + } + } + + return func(*rdsdata.Options) {} +} + +func ServicePackage(ctx context.Context) conns.ServicePackage { + return &servicePackage{} +} diff --git a/internal/sweep/service_packages_gen_test.go b/internal/sweep/service_packages_gen_test.go index b65cd9455036..0b23d82267af 100644 --- a/internal/sweep/service_packages_gen_test.go +++ b/internal/sweep/service_packages_gen_test.go @@ -197,6 +197,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/service/ram" "github.com/hashicorp/terraform-provider-aws/internal/service/rbin" "github.com/hashicorp/terraform-provider-aws/internal/service/rds" + "github.com/hashicorp/terraform-provider-aws/internal/service/rdsdata" "github.com/hashicorp/terraform-provider-aws/internal/service/redshift" "github.com/hashicorp/terraform-provider-aws/internal/service/redshiftdata" "github.com/hashicorp/terraform-provider-aws/internal/service/redshiftserverless" @@ -457,6 +458,7 @@ func servicePackages(ctx context.Context) []conns.ServicePackage { ram.ServicePackage(ctx), rbin.ServicePackage(ctx), rds.ServicePackage(ctx), + rdsdata.ServicePackage(ctx), redshift.ServicePackage(ctx), redshiftdata.ServicePackage(ctx), redshiftserverless.ServicePackage(ctx), diff --git a/names/consts_gen.go b/names/consts_gen.go index 065cdebdd073..352d8a6cf3c2 100644 --- a/names/consts_gen.go +++ b/names/consts_gen.go @@ -191,6 +191,7 @@ const ( RAM = "ram" RBin = "rbin" RDS = "rds" + RDSData = "rdsdata" RUM = "rum" Redshift = "redshift" RedshiftData = "redshiftdata" @@ -451,6 +452,7 @@ const ( RAMServiceID = "RAM" RBinServiceID = "rbin" RDSServiceID = "RDS" + RDSDataServiceID = "RDS Data" RUMServiceID = "RUM" RedshiftServiceID = "Redshift" RedshiftDataServiceID = "Redshift Data" diff --git a/names/data/names_data.hcl b/names/data/names_data.hcl index 9ec22f8d3924..e5df46558b52 100644 --- a/names/data/names_data.hcl +++ b/names/data/names_data.hcl @@ -6814,6 +6814,11 @@ service "rdsdata" { arn_namespace = "rdsdata" } + endpoint_info { + endpoint_api_call = "ExecuteStatement" + endpoint_api_params = "ResourceArn: aws.String(\"arn:\" + acctest.Partition() + \":rds:\" + acctest.Region() + \":\" + acctest.Ct12Digit + \":cluster:test\"),\n\t\tSecretArn: aws.String(\"arn:\" + acctest.Partition() + \":secretsmanager:\" + acctest.Region() + \":\" + acctest.Ct12Digit + \":secret:test\"),\n\t\tSql: aws.String(\"SELECT 1\")" + } + names { aliases = ["rdsdataservice"] provider_name_upper = "RDSData" @@ -6827,7 +6832,6 @@ service "rdsdata" { provider_package_correct = "rdsdata" doc_prefix = ["rdsdata_"] brand = "Amazon" - not_implemented = true } service "pi" { diff --git a/website/allowed-subcategories.txt b/website/allowed-subcategories.txt index d0fef3789c3b..3858e059be93 100644 --- a/website/allowed-subcategories.txt +++ b/website/allowed-subcategories.txt @@ -190,6 +190,7 @@ QLDB (Quantum Ledger Database) QuickSight RAM (Resource Access Manager) RDS (Relational Database) +RDS Data Recycle Bin (RBin) Redshift Redshift Data diff --git a/website/docs/d/rdsdata_query.html.markdown b/website/docs/d/rdsdata_query.html.markdown new file mode 100644 index 000000000000..e00116fa0a1b --- /dev/null +++ b/website/docs/d/rdsdata_query.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "RDS Data" +layout: "aws" +page_title: "AWS: aws_rdsdata_query" +description: |- + Executes SQL queries against RDS clusters using the RDS Data API. +--- + +# Data Source: aws_rdsdata_query + +Executes SQL queries against RDS clusters using the RDS Data API. This data source allows you to run SQL statements and retrieve results in JSON format. + +~> **Note:** This data source is ideal for SELECT queries that need to be executed multiple times during Terraform operations. For one-time operations like DDL statements, INSERT, UPDATE, or DELETE operations, consider using the [`aws_rdsdata_query` resource](/docs/providers/aws/r/rdsdata_query.html) instead. + +## Example Usage + +### Basic Query + +```terraform +data "aws_rdsdata_query" "example" { + resource_arn = aws_rds_cluster.example.arn + secret_arn = aws_secretsmanager_secret.example.arn + sql = "SELECT * FROM users LIMIT 10" +} +``` + +### Query with Parameters + +```terraform +data "aws_rdsdata_query" "example" { + resource_arn = aws_rds_cluster.example.arn + secret_arn = aws_secretsmanager_secret.example.arn + sql = "SELECT * FROM users WHERE status = :status" + database = "myapp" + + parameters { + name = "status" + value = "active" + } +} +``` + +### Query with Multiple Parameters + +```terraform +data "aws_rdsdata_query" "example" { + resource_arn = aws_rds_cluster.example.arn + secret_arn = aws_secretsmanager_secret.example.arn + sql = "SELECT * FROM orders WHERE user_id = :user_id AND created_at > :date" + + parameters { + name = "user_id" + value = "123" + } + + parameters { + name = "date" + value = "2023-01-01" + } +} +``` + +## Argument Reference + +This data source supports the following arguments: + +* `resource_arn` - (Required) The Amazon Resource Name (ARN) of the Aurora Serverless DB cluster. +* `secret_arn` - (Required) The ARN of the secret that enables access to the DB cluster. The secret must contain the database credentials. +* `sql` - (Required) The SQL statement to execute. +* `database` - (Optional) The name of the database to execute the statement against. +* `parameters` - (Optional) Parameters for the SQL statement. See [Parameters](#parameters) below. +* `region` - (Optional) The AWS region where the RDS cluster is located. If not specified, the provider region is used. + +### Parameters + +The `parameters` block supports the following: + +* `name` - (Required) The name of the parameter. +* `value` - (Required) The value of the parameter as a string. +* `type_hint` - (Optional) A hint that specifies the correct object type for the parameter value. Valid values: `DATE`, `DECIMAL`, `JSON`, `TIME`, `TIMESTAMP`, `UUID`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `records` - The records returned by the SQL statement in JSON format. +* `number_of_records_updated` - The number of records updated by the request (for DML statements). + +## Notes + +* This data source requires the Aurora Serverless cluster to have the Data API enabled. +* The secret must be created in AWS Secrets Manager and contain the database credentials in the correct format. +* Results are returned in JSON format when using `SELECT` statements. +* For non-SELECT statements (INSERT, UPDATE, DELETE), the `number_of_records_updated` attribute will contain the count of affected rows. +* The Data API has a 1MB limit for response data. Large result sets may be truncated. diff --git a/website/docs/guides/custom-service-endpoints.html.markdown b/website/docs/guides/custom-service-endpoints.html.markdown index 9d363d48e057..462ae5ba18da 100644 --- a/website/docs/guides/custom-service-endpoints.html.markdown +++ b/website/docs/guides/custom-service-endpoints.html.markdown @@ -272,6 +272,7 @@ provider "aws" { |RAM (Resource Access Manager)|`ram`|`AWS_ENDPOINT_URL_RAM`|`ram`| |Recycle Bin (RBin)|`rbin`(or `recyclebin`)|`AWS_ENDPOINT_URL_RBIN`|`rbin`| |RDS (Relational Database)|`rds`|`AWS_ENDPOINT_URL_RDS`|`rds`| +|RDS Data|`rdsdata`(or `rdsdataservice`)|`AWS_ENDPOINT_URL_RDS_DATA`|`rds_data`| |Redshift|`redshift`|`AWS_ENDPOINT_URL_REDSHIFT`|`redshift`| |Redshift Data|`redshiftdata`(or `redshiftdataapiservice`)|`AWS_ENDPOINT_URL_REDSHIFT_DATA`|`redshift_data`| |Redshift Serverless|`redshiftserverless`|`AWS_ENDPOINT_URL_REDSHIFT_SERVERLESS`|`redshift_serverless`| diff --git a/website/docs/r/rdsdata_query.md b/website/docs/r/rdsdata_query.md new file mode 100644 index 000000000000..f09c04e44c87 --- /dev/null +++ b/website/docs/r/rdsdata_query.md @@ -0,0 +1,76 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rdsdata_query" +description: |- + Executes a SQL query against an RDS cluster using the RDS Data API. +--- + +# Resource: aws_rdsdata_query + +Executes a SQL query against an RDS cluster using the RDS Data API. The query is executed once during resource creation, and any changes to the query or parameters will trigger a replacement. + +~> **Note:** For queries that need to be executed multiple times or for retrieving data (SELECT queries), consider using the [`aws_rdsdata_query` data source](/docs/providers/aws/d/rdsdata_query.html) instead. Use this resource for one-time operations like DDL statements, INSERT, UPDATE, or DELETE operations. + +## Example Usage + +### Basic Usage + +```terraform +resource "aws_rdsdata_query" "example" { + resource_arn = aws_rds_cluster.example.arn + secret_arn = aws_secretsmanager_secret.example.arn + sql = "SELECT * FROM users WHERE active = true" + database = "mydb" +} +``` + +### With Parameters + +```terraform +resource "aws_rdsdata_query" "example" { + resource_arn = aws_rds_cluster.example.arn + secret_arn = aws_secretsmanager_secret.example.arn + sql = "INSERT INTO users (name, email) VALUES (:name, :email)" + database = "mydb" + + parameters { + name = "name" + value = "John Doe" + } + + parameters { + name = "email" + value = "john@example.com" + } +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_arn` - (Required) The Amazon Resource Name (ARN) of the RDS cluster. +* `secret_arn` - (Required) The ARN of the secret that enables access to the DB cluster. +* `sql` - (Required) The SQL statement to execute. +* `database` - (Optional) The name of the database. +* `parameters` - (Optional) Parameters for the SQL statement. See [parameters](#parameters) below. +* `region` - (Optional) The AWS region. + +### parameters + +* `name` - (Required) The name of the parameter. +* `value` - (Required) The value of the parameter. +* `type_hint` - (Optional) A hint that specifies the correct object type for the parameter value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The resource identifier. +* `records` - The records returned by the SQL statement in JSON format. +* `number_of_records_updated` - The number of records updated by the statement. + +## Import + +You cannot import this resource.