Skip to content

Commit bafad00

Browse files
APIT-2476: revert (#477)
1 parent 056ee56 commit bafad00

File tree

5 files changed

+5
-80
lines changed

5 files changed

+5
-80
lines changed

CHANGELOG.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
**New features:**
66
* Added support for the new `latest_offsets` and `latest_offsets_timestamp` attributes of `confluent_flink_statement` [resource](https://registry.terraform.io/providers/confluentinc/confluent/latest/docs/resources/confluent_flink_statement) in a [General Availability lifecycle stage](https://docs.confluent.io/cloud/current/api.html#section/Versioning/API-Lifecycle-Policy).
7-
* Added support for the new `properties_sensitive` block of `confluent_flink_statement` [resource](https://registry.terraform.io/providers/confluentinc/confluent/latest/docs/resources/confluent_flink_statement) in a [General Availability lifecycle stage](https://docs.confluent.io/cloud/current/api.html#section/Versioning/API-Lifecycle-Policy) to resolve [#397](https://github.com/confluentinc/terraform-provider-confluent/issues/397).
87
* Added support for the new `versions` block of `confluent_flink_artifact` [resource](https://registry.terraform.io/providers/confluentinc/confluent/latest/docs/resources/confluent_flink_artifact) and [data source](https://registry.terraform.io/providers/confluentinc/confluent/latest/docs/data-sources/confluent_flink_artifact) in a [General Availability lifecycle stage](https://docs.confluent.io/cloud/current/api.html#section/Versioning/API-Lifecycle-Policy).
98

109
**Examples:**

docs/resources/confluent_flink_statement.md

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -80,23 +80,6 @@ resource "confluent_flink_statement" "example" {
8080
}
8181
```
8282

83-
Example of `confluent_flink_statement` that creates a model:
84-
```
85-
resource "confluent_flink_statement" "example" {
86-
statement = "CREATE MODEL `vector_encoding` INPUT (input STRING) OUTPUT (vector ARRAY<FLOAT>) WITH( 'TASK' = 'classification','PROVIDER' = 'OPENAI','OPENAI.ENDPOINT' = 'https://api.openai.com/v1/embeddings','OPENAI.API_KEY' = '{{sessionconfig/sql.secrets.openaikey}}');"
87-
properties = {
88-
"sql.current-catalog" = var.confluent_environment_display_name
89-
"sql.current-database" = var.confluent_kafka_cluster_display_name
90-
}
91-
properties_sensitive = {
92-
"sql.secrets.openaikey" : "***REDACTED***"
93-
}
94-
lifecycle {
95-
prevent_destroy = true
96-
}
97-
}
98-
```
99-
10083
<!-- schema generated by tfplugindocs -->
10184
## Argument Reference
10285

@@ -124,9 +107,6 @@ The following arguments are supported:
124107
- `properties` - (Optional Map) The custom topic settings to set:
125108
- `name` - (Required String) The setting name, for example, `sql.local-time-zone`.
126109
- `value` - (Required String) The setting value, for example, `GMT-08:00`.
127-
- `properties_sensitive` - (Optional Map) Block for sensitive statement properties:
128-
- `name` - (Required String) The setting name, for example, `sql.secrets.openaikey`.
129-
- `value` - (Required String) The setting value, for example, `s1234`.
130110

131111
- `stopped` - (Optional Boolean) The boolean flag to control whether the running Flink Statement should be stopped. Defaults to `false`. Update it to `true` to stop the statement. Subsequently, update it to `false` to resume the statement.
132112

internal/provider/resource_flink_statement.go

Lines changed: 5 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,8 @@ import (
2323
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
2424
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
2525
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
26-
"github.com/samber/lo"
2726
"net/http"
2827
"regexp"
29-
"strings"
3028
"time"
3129
)
3230

@@ -35,7 +33,6 @@ const (
3533
paramStatement = "statement"
3634
paramComputePool = "compute_pool"
3735
paramProperties = "properties"
38-
paramPropertiesSensitive = "properties_sensitive"
3936
paramStopped = "stopped"
4037
paramLatestOffsets = "latest_offsets"
4138
paramLatestOffsetsTimestamp = "latest_offsets_timestamp"
@@ -84,16 +81,6 @@ func flinkStatementResource() *schema.Resource {
8481
Optional: true,
8582
Computed: true,
8683
},
87-
paramPropertiesSensitive: {
88-
Type: schema.TypeMap,
89-
Elem: &schema.Schema{
90-
Type: schema.TypeString,
91-
},
92-
Sensitive: true,
93-
Optional: true,
94-
Computed: true,
95-
ForceNew: false,
96-
},
9784
paramStopped: {
9885
Type: schema.TypeBool,
9986
Optional: true,
@@ -164,14 +151,11 @@ func flinkStatementCreate(ctx context.Context, d *schema.ResourceData, meta inte
164151
}
165152

166153
statement := d.Get(paramStatement).(string)
167-
168-
mergedProperties, sensitiveProperties, _ := extractFlinkProperties(d)
169-
170-
tflog.Debug(ctx, fmt.Sprintf("SENSITIVE VALUES: %s", sensitiveProperties))
154+
properties := convertToStringStringMap(d.Get(paramProperties).(map[string]interface{}))
171155

172156
spec := fgb.NewSqlV1StatementSpec()
173157
spec.SetStatement(statement)
174-
spec.SetProperties(mergedProperties)
158+
spec.SetProperties(properties)
175159
spec.SetComputePoolId(computePoolId)
176160
spec.SetPrincipal(principalId)
177161

@@ -183,11 +167,6 @@ func flinkStatementCreate(ctx context.Context, d *schema.ResourceData, meta inte
183167
if err != nil {
184168
return diag.Errorf("error creating Flink Statement: error marshaling %#v to json: %s", createFlinkStatementRequest, createDescriptiveError(err))
185169
}
186-
187-
if err := d.Set(paramPropertiesSensitive, sensitiveProperties); err != nil {
188-
return diag.FromErr(createDescriptiveError(err))
189-
}
190-
191170
tflog.Debug(ctx, fmt.Sprintf("Creating new Flink Statement: %s", createFlinkStatementRequestJson))
192171

193172
createdFlinkStatement, _, err := executeFlinkStatementCreate(flinkRestClient.apiContext(ctx), flinkRestClient, createFlinkStatementRequest)
@@ -260,8 +239,8 @@ func flinkStatementUpdate(ctx context.Context, d *schema.ResourceData, meta inte
260239
// Updating anything else is not supported at this moment
261240
// stopped: false -> true to trigger flinkStatementStop
262241
// stopped: true -> false to trigger flinkStatementResume
263-
if d.HasChangesExcept(paramStopped, paramPropertiesSensitive) {
264-
return diag.Errorf("error updating Flink Statement %q: only %q and %q attribute can be updated for Flink Statement", d.Id(), paramStopped, paramPropertiesSensitive)
242+
if d.HasChangeExcept(paramStopped) {
243+
return diag.Errorf(`error updating Flink Statement %q: only %q attribute can be updated for Flink Statement, "true" -> "false" to trigger resuming, "false" -> "true" to trigger stopping`, d.Id(), paramStopped)
265244
}
266245

267246
if d.Get(paramStopped).(bool) == false {
@@ -380,7 +359,7 @@ func setFlinkStatementAttributes(d *schema.ResourceData, c *FlinkRestClient, sta
380359
if err := d.Set(paramStatement, statement.Spec.GetStatement()); err != nil {
381360
return nil, err
382361
}
383-
if err := d.Set(paramProperties, extractNonsensitiveProperties(statement.Spec.GetProperties())); err != nil {
362+
if err := d.Set(paramProperties, statement.Spec.GetProperties()); err != nil {
384363
return nil, err
385364
}
386365
if err := d.Set(paramStopped, statement.Spec.GetStopped()); err != nil {
@@ -418,21 +397,6 @@ func setFlinkStatementAttributes(d *schema.ResourceData, c *FlinkRestClient, sta
418397
return d, nil
419398
}
420399

421-
func extractNonsensitiveProperties(properties map[string]string) map[string]string {
422-
nonsensitiveProperties := make(map[string]string)
423-
424-
for propertiesSettingName, propertiesSettingValue := range properties {
425-
// Skip all sensitive config settings since we don't want to store them in TF state
426-
isSensitiveSetting := strings.HasPrefix(propertiesSettingName, "sql.secrets")
427-
if isSensitiveSetting {
428-
continue
429-
}
430-
nonsensitiveProperties[propertiesSettingName] = propertiesSettingValue
431-
}
432-
433-
return nonsensitiveProperties
434-
}
435-
436400
func flinkStatementDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
437401
tflog.Debug(ctx, fmt.Sprintf("Deleting Flink Statement %q", d.Id()), map[string]interface{}{flinkStatementLoggingKey: d.Id()})
438402

@@ -650,19 +614,6 @@ func extractFlinkPrincipalId(client *Client, d *schema.ResourceData, isImportOpe
650614
return "", fmt.Errorf("one of provider.flink_principal_id (defaults to FLINK_PRINCIPAL_ID environment variable) or resource.principal.id must be set")
651615
}
652616

653-
func extractFlinkProperties(d *schema.ResourceData) (map[string]string, map[string]string, map[string]string) {
654-
sensitiveProperties := convertToStringStringMap(d.Get(paramPropertiesSensitive).(map[string]interface{}))
655-
nonsensitiveProperties := convertToStringStringMap(d.Get(paramProperties).(map[string]interface{}))
656-
657-
// Merge both configs
658-
properties := lo.Assign(
659-
nonsensitiveProperties,
660-
sensitiveProperties,
661-
)
662-
663-
return properties, sensitiveProperties, nonsensitiveProperties
664-
}
665-
666617
func createFlinkStatementId(environmentId, computePoolId, statementName string) string {
667618
return fmt.Sprintf("%s/%s/%s", environmentId, computePoolId, statementName)
668619
}

internal/provider/resource_flink_statement_provider_block_test.go

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -207,7 +207,6 @@ func TestAccFlinkStatementWithEnhancedProviderBlock(t *testing.T) {
207207
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "latest_offsets_timestamp", latestOffsetsTimestampEmptyValueTest),
208208
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "properties.%", "1"),
209209
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, fmt.Sprintf("properties.%s", flinkFirstPropertyKeyTest), flinkFirstPropertyValueTest),
210-
resource.TestCheckNoResourceAttr(fullFlinkStatementResourceLabel, "sql.secrets.openaikey"),
211210
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "credentials.#", "0"),
212211
resource.TestCheckNoResourceAttr(fullFlinkStatementResourceLabel, "credentials.0.key"),
213212
resource.TestCheckNoResourceAttr(fullFlinkStatementResourceLabel, "credentials.0.secret"),
@@ -235,7 +234,6 @@ func TestAccFlinkStatementWithEnhancedProviderBlock(t *testing.T) {
235234
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "latest_offsets_timestamp", latestOffsetsTimestampStoppedValueTest),
236235
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "properties.%", "1"),
237236
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, fmt.Sprintf("properties.%s", flinkFirstPropertyKeyTest), flinkFirstPropertyValueTest),
238-
resource.TestCheckNoResourceAttr(fullFlinkStatementResourceLabel, "sql.secrets.openaikey"),
239237
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "credentials.#", "0"),
240238
resource.TestCheckNoResourceAttr(fullFlinkStatementResourceLabel, "credentials.0.key"),
241239
resource.TestCheckNoResourceAttr(fullFlinkStatementResourceLabel, "credentials.0.secret"),

internal/provider/resource_flink_statement_test.go

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,6 @@ func TestAccFlinkStatement(t *testing.T) {
206206
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "latest_offsets_timestamp", latestOffsetsTimestampEmptyValueTest),
207207
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "properties.%", "1"),
208208
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, fmt.Sprintf("properties.%s", flinkFirstPropertyKeyTest), flinkFirstPropertyValueTest),
209-
resource.TestCheckNoResourceAttr(fullFlinkStatementResourceLabel, "sql.secrets.openaikey"),
210209
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "credentials.#", "1"),
211210
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "credentials.0.%", "2"),
212211
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "credentials.0.key", kafkaApiKey),
@@ -270,7 +269,6 @@ func TestAccFlinkStatement(t *testing.T) {
270269
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "latest_offsets_timestamp", latestOffsetsTimestampStoppedValueTest),
271270
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "properties.%", "1"),
272271
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, fmt.Sprintf("properties.%s", flinkFirstPropertyKeyTest), flinkFirstPropertyValueTest),
273-
resource.TestCheckNoResourceAttr(fullFlinkStatementResourceLabel, "sql.secrets.openaikey"),
274272
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "credentials.#", "1"),
275273
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "credentials.0.%", "2"),
276274
resource.TestCheckResourceAttr(fullFlinkStatementResourceLabel, "credentials.0.key", kafkaApiKey),
@@ -329,7 +327,6 @@ func testAccCheckFlinkStatement(confluentCloudBaseUrl, mockServerUrl string) str
329327
properties = {
330328
"%s" = "%s"
331329
}
332-
333330
}
334331
`, confluentCloudBaseUrl, flinkStatementResourceLabel, kafkaApiKey, kafkaApiSecret, mockServerUrl, flinkPrincipalIdTest,
335332
flinkOrganizationIdTest, flinkEnvironmentIdTest, flinkComputePoolIdTest,

0 commit comments

Comments
 (0)