diff --git a/Makefile b/Makefile index fec7b3dcd..b8779158d 100644 --- a/Makefile +++ b/Makefile @@ -174,10 +174,14 @@ sweep-check: generate: gen-go docs - +OLD_SCHEMA ?= .oldSchema.json +CHANGELOG := PROVIDER_AIVEN_ENABLE_BETA=1 go run ./changelog/... gen-go: - go generate ./... + $(CHANGELOG) -save -schema=$(OLD_SCHEMA) + go generate ./...; $(MAKE) fmt-imports + $(CHANGELOG) -diff -schema=$(OLD_SCHEMA) -changelog=CHANGELOG.md + rm $(OLD_SCHEMA) docs: $(TFPLUGINDOCS) diff --git a/changelog/differ.go b/changelog/differ.go new file mode 100644 index 000000000..5ecd70c8e --- /dev/null +++ b/changelog/differ.go @@ -0,0 +1,151 @@ +package main + +import ( + "encoding/json" + "fmt" + "slices" + "strings" + + "github.com/google/go-cmp/cmp" + "github.com/samber/lo" +) + +func diffItems(resourceType ResourceType, was, have *Item) (*Diff, error) { + // Added or removed + if was == nil || have == nil { + action := ChangeTypeAdd + if have == nil { + action = ChangeTypeRemove + have = was + } + + return &Diff{ + Type: action, + ResourceType: resourceType, + Description: removeEnum(have.Description), + Item: have, + }, nil + } + + // Equal items + if cmp.Equal(was, have) { + return nil, nil + } + + // Compare all the fields + wasMap, err := toMap(was) + if err != nil { + return nil, err + } + + haveMap, err := toMap(have) + if err != nil { + return nil, err + } + + entries := make([]string, 0) + for k, wasValue := range wasMap { + haveValue := haveMap[k] + if cmp.Equal(wasValue, haveValue) { + continue + } + + var entry string + switch k { + case "deprecated": + entry = "remove deprecation" + if have.Deprecated != "" { + entry = fmt.Sprintf("deprecate: %s", have.Deprecated) + } + case "beta": + entry = "marked as beta" + if !haveValue.(bool) { + entry = "no longer beta" + } + default: + // The rest of the fields will have diff-like entry + entry = fmt.Sprintf("%s ~~`%s`~~ -> `%s`", k, strValue(wasValue), strValue(haveValue)) + + // Fixes formatting issues + entry = strings.ReplaceAll(entry, "``", "`") + } + + entries = append(entries, entry) + } + + if len(entries) == 0 { + return nil, nil + } + + return &Diff{ + Type: ChangeTypeChange, + ResourceType: resourceType, + Description: strings.Join(entries, ", "), + Item: have, + }, nil +} + +func diffItemMaps(was, have ItemMap) ([]string, error) { + result := make([]string, 0) + kinds := []ResourceType{ResourceKind, DataSourceKind} + for _, kind := range kinds { + wasItems := was[kind] + haveItems := have[kind] + keys := append(lo.Keys(wasItems), lo.Keys(haveItems)...) + slices.Sort(keys) + + var skipPrefix string + seen := make(map[string]bool) + for _, k := range keys { + if seen[k] { + continue + } + + // Skips duplicate keys + seen[k] = true + + // When a resource added or removed, it skips all its fields until the next resource + if skipPrefix != "" && strings.HasPrefix(k, skipPrefix) { + continue + } + + skipPrefix = "" + wasVal, wasOk := wasItems[k] + haveVal, haveOk := haveItems[k] + if wasOk != haveOk { + // Resource added or removed, must skip all its fields + skipPrefix = k + } + + change, err := diffItems(kind, wasVal, haveVal) + if err != nil { + return nil, fmt.Errorf("failed to compare %s %s: %w", kind, k, err) + } + + if change != nil { + result = append(result, change.String()) + } + } + } + return result, nil +} + +func toMap(item *Item) (map[string]any, error) { + b, err := json.Marshal(item) + if err != nil { + return nil, err + } + + m := make(map[string]any) + err = json.Unmarshal(b, &m) + if err != nil { + return nil, err + } + + m["enum"] = findEnums(item.Description) + m["beta"] = hasBeta(item.Description) + m["type"] = strValueType(item.Type) + m["elemType"] = strValueType(item.ElemType) + delete(m, "description") // Not needed to compare descriptions + return m, err +} diff --git a/changelog/differ_test.go b/changelog/differ_test.go new file mode 100644 index 000000000..cbc518132 --- /dev/null +++ b/changelog/differ_test.go @@ -0,0 +1,99 @@ +package main + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/stretchr/testify/assert" +) + +func TestCompare(t *testing.T) { + tests := []struct { + name string + expect string + kind ResourceType + old, new *Item + }{ + { + name: "change enums", + expect: "Change resource `foo` field `bar`: enum ~~`bar`, `baz`~~ -> `foo`, `baz`", + kind: ResourceKind, + old: &Item{ + Type: schema.TypeString, + Path: "foo.bar", + Description: "Foo. The possible values are `bar`, `baz`.", + }, + new: &Item{ + Type: schema.TypeString, + Path: "foo.bar", + Description: "Foo. The possible values are `foo`, `baz`.", + }, + }, + { + name: "add resource field", + expect: "Add resource `foo` field `bar`: Foo", + kind: ResourceKind, + new: &Item{ + Type: schema.TypeString, + Path: "foo.bar", + Description: "Foo", + }, + }, + { + name: "remove resource field", + expect: "Remove resource `foo` field `bar`: Foo", + kind: ResourceKind, + old: &Item{ + Type: schema.TypeString, + Path: "foo.bar", + Description: "Foo", + }, + }, + { + name: "remove beta from the field", + expect: "Change resource `foo` field `bar`: no longer beta", + kind: ResourceKind, + old: &Item{ + Type: schema.TypeString, + Path: "foo.bar", + Description: "PROVIDER_AIVEN_ENABLE_BETA", + }, + new: &Item{ + Type: schema.TypeString, + Path: "foo.bar", + Description: "Foo", + }, + }, + { + name: "add beta resource", + expect: "Add resource `foo` _(beta)_: does stuff, PROVIDER_AIVEN_ENABLE_BETA", + kind: ResourceKind, + new: &Item{ + Type: schema.TypeString, + Path: "foo", + Description: "does stuff, PROVIDER_AIVEN_ENABLE_BETA", + }, + }, + { + name: "change type", + expect: "Change resource `foo` field `bar`: type ~~`list`~~ -> `set`", + kind: ResourceKind, + old: &Item{ + Type: schema.TypeList, + Path: "foo.bar", + }, + new: &Item{ + Type: schema.TypeSet, + Path: "foo.bar", + }, + }, + } + + for _, opt := range tests { + t.Run(opt.name, func(t *testing.T) { + got, err := diffItems(opt.kind, opt.old, opt.new) + assert.NoError(t, err) + assert.Equal(t, opt.expect, got.String()) + }) + } +} diff --git a/changelog/main.go b/changelog/main.go new file mode 100644 index 000000000..067ad1343 --- /dev/null +++ b/changelog/main.go @@ -0,0 +1,211 @@ +package main + +import ( + "encoding/json" + "flag" + "fmt" + "log" + "os" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/aiven/terraform-provider-aiven/internal/plugin/util" + "github.com/aiven/terraform-provider-aiven/internal/sdkprovider/provider" +) + +type flags struct { + save bool + diff bool + schemaFile string + changelogFile string +} + +func main() { + if err := exec(); err != nil { + log.Fatal(err) + } +} + +func exec() error { + if err := checkBetaMode(); err != nil { + return err + } + + flags, err := parseFlags() + if err != nil { + return err + } + + p, err := loadProvider() + if err != nil { + return fmt.Errorf("failed to load provider: %w", err) + } + + newMap, err := fromProvider(p) + if err != nil { + return fmt.Errorf("failed to process provider schema: %w", err) + } + + if flags.save { + return saveSchema(flags.schemaFile, newMap) + } + + return processDiff(flags, newMap) +} + +func checkBetaMode() error { + if !util.IsBeta() { + return fmt.Errorf("please enable beta mode, i.e. set %s=1", util.AivenEnableBeta) + } + return nil +} + +func parseFlags() (*flags, error) { + f := &flags{} + flag.BoolVar(&f.save, "save", false, "output current schema") + flag.BoolVar(&f.diff, "diff", false, "compare current schema with imported schema") + flag.StringVar(&f.schemaFile, "schema", "", "schema file path (for save/diff)") + flag.StringVar(&f.changelogFile, "changelog", "", "changelog output file path") + flag.Parse() + + if f.save == f.diff { + return nil, fmt.Errorf("either --save or --diff must be set") + } + + if f.diff && f.schemaFile == "" { + return nil, fmt.Errorf("schema file path is required when using --diff") + } + + return f, nil +} + +func loadProvider() (*schema.Provider, error) { + p, err := provider.Provider("dev") + if err != nil { + return nil, err + } + return p, nil +} + +func saveSchema(filePath string, schema ItemMap) error { + if filePath == "" { + return json.NewEncoder(os.Stdout).Encode(&schema) + } + + f, err := os.Create(filePath) + if err != nil { + return fmt.Errorf("failed to create schema file: %w", err) + } + defer f.Close() + + encoder := json.NewEncoder(f) + encoder.SetIndent("", " ") // Pretty print the JSON + return encoder.Encode(&schema) +} + +func processDiff(flags *flags, newMap ItemMap) error { + oldMap, err := loadSchemaFile(flags.schemaFile) + if err != nil { + return err + } + + entries, err := diffItemMaps(oldMap, newMap) + if err != nil { + return fmt.Errorf("failed to generate diff: %w", err) + } + + if flags.changelogFile == "" { + return printEntries(entries) + } + + return writeChangelog(flags.changelogFile, entries) +} + +func loadSchemaFile(path string) (ItemMap, error) { + f, err := os.Open(path) + if err != nil { + return nil, fmt.Errorf("failed to open schema file: %w", err) + } + defer f.Close() + + var oldMap ItemMap + if err := json.NewDecoder(f).Decode(&oldMap); err != nil { + return nil, fmt.Errorf("failed to parse schema file: %w", err) + } + + return oldMap, nil +} + +func printEntries(entries []string) error { + for _, l := range entries { + fmt.Printf("- %s\n", l) + } + return nil +} + +func writeChangelog(_ string, entries []string) error { + // todo: write to file + for _, l := range entries { + fmt.Printf("- %s\n", l) + } + + return nil +} + +func fromProvider(p *schema.Provider) (ItemMap, error) { + // Item names might clash between resources and data sources + // Splits into separate maps + sourceMaps := map[ResourceType]map[string]*schema.Resource{ + ResourceKind: p.ResourcesMap, + DataSourceKind: p.DataSourcesMap, + } + + items := make(ItemMap) + for kind, m := range sourceMaps { + items[kind] = make(map[string]*Item) + for name, r := range m { + res := &Item{ + Name: name, + Path: name, + Description: r.Description, + Type: schema.TypeList, + } + for k, v := range r.Schema { + walked := walkSchema(k, v, res) + for i := range walked { + item := walked[i] + items[kind][item.Path] = item + } + } + } + } + return items, nil +} + +func walkSchema(name string, this *schema.Schema, parent *Item) []*Item { + item := &Item{ + Name: name, + Path: fmt.Sprintf("%s.%s", parent.Path, name), + ForceNew: this.ForceNew, + Optional: this.Optional, + Sensitive: this.Sensitive, + MaxItems: this.MaxItems, + Description: this.Description, + Deprecated: this.Deprecated, + Type: this.Type, + } + + items := []*Item{item} + + // Properties + switch elem := this.Elem.(type) { + case *schema.Schema: + item.ElemType = elem.Type + case *schema.Resource: + for k, child := range elem.Schema { + items = append(items, walkSchema(k, child, item)...) + } + } + + return items +} diff --git a/changelog/text.go b/changelog/text.go new file mode 100644 index 000000000..a6466b6e5 --- /dev/null +++ b/changelog/text.go @@ -0,0 +1,87 @@ +package main + +import ( + "fmt" + "regexp" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/aiven/terraform-provider-aiven/internal/plugin/util" + "github.com/aiven/terraform-provider-aiven/internal/schemautil/userconfig" +) + +func hasBeta(description string) bool { + return strings.Contains(description, util.AivenEnableBeta) +} + +var reEnum = regexp.MustCompile("(?i)enum: `.+`\\.?\\s*") + +// removeEnum removes enum values from the description to keep it brief +func removeEnum(text string) string { + return reEnum.ReplaceAllString(text, "") +} + +var reCode = regexp.MustCompile("`[^`]+`") + +func findEnums(description string) []string { + parts := strings.Split(description, userconfig.PossibleValuesPrefix) + if len(parts) != 2 { + return nil + } + + return reCode.FindAllString(parts[1], -1) +} + +// strValue formats Go value into humanreadable string +func strValue(src any) string { + switch v := src.(type) { + case string: + return v + case []string: + return strings.Join(v, ", ") + default: + return fmt.Sprintf("%v", v) + } +} + +// strValueType returns the string representation of the schema.ValueType +func strValueType(t schema.ValueType) string { + switch t { + case schema.TypeBool: + return "bool" + case schema.TypeString: + return "string" + case schema.TypeInt: + return "int" + case schema.TypeFloat: + return "float" + case schema.TypeList: + return "list" + case schema.TypeMap: + return "map" + case schema.TypeSet: + return "set" + default: + return "unknown" + } +} + +// shorten shortens the text to the given size. +func shorten(size int, text string) string { + if size < 1 || len(text) <= size { + return text + } + + const sep = ". " + brief := "" + chunks := strings.Split(text, sep) + for i := 0; len(brief) <= size && i < len(chunks); i++ { + if i > 0 { + brief += sep + } + brief += chunks[i] + } + + return brief +} diff --git a/changelog/types.go b/changelog/types.go new file mode 100644 index 000000000..26f5fc472 --- /dev/null +++ b/changelog/types.go @@ -0,0 +1,69 @@ +package main + +import ( + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +type ( + ResourceType string + DiffType string +) + +const ( + ResourceKind ResourceType = "resource" + DataSourceKind ResourceType = "datasource" + + ChangeTypeAdd DiffType = "Add" + ChangeTypeRemove DiffType = "Remove" + ChangeTypeChange DiffType = "Change" +) + +type ItemMap map[ResourceType]map[string]*Item + +type Item struct { + Name string `json:"name"` + Path string `json:"path"` + Description string `json:"description"` + ForceNew bool `json:"forceNew"` + Optional bool `json:"optional"` + Sensitive bool `json:"sensitive"` + MaxItems int `json:"maxItems"` + Deprecated string `json:"deprecated"` + Type schema.ValueType `json:"type"` + ElemType schema.ValueType `json:"elemType"` +} + +type Diff struct { + Type DiffType + ResourceType ResourceType + Description string + Item *Item +} + +func (c *Diff) String() string { + // resource name + field name + path := strings.SplitN(c.Item.Path, ".", 2) + + // e.g.: "Add resource `aiven_project`" + msg := fmt.Sprintf("%s %s `%s`", c.Type, c.ResourceType, path[0]) + + // e.g.: "field `project`" + if len(path) > 1 { + msg = fmt.Sprintf("%s field `%s`", msg, path[1]) + } + + // Adds beta if needed + if hasBeta(c.Description) { + msg = fmt.Sprintf("%s _(beta)_", msg) + } + + // Adds description + const maxSize = 120 + + msg += ": " + msg += shorten(maxSize-len(msg), c.Description) + return msg +} diff --git a/docs/data-sources/service_integration.md b/docs/data-sources/service_integration.md index be83e2e00..4a8b531af 100644 --- a/docs/data-sources/service_integration.md +++ b/docs/data-sources/service_integration.md @@ -27,7 +27,7 @@ data "aiven_service_integration" "example_integration" { ### Required - `destination_service_name` (String) Destination service for the integration. -- `integration_type` (String) Type of the service integration. Possible values: `alertmanager`, `autoscaler`, `caching`, `cassandra_cross_service_cluster`, `clickhouse_credentials`, `clickhouse_kafka`, `clickhouse_postgresql`, `dashboard`, `datadog`, `datasource`, `disaster_recovery`, `external_aws_cloudwatch_logs`, `external_aws_cloudwatch_metrics`, `external_elasticsearch_logs`, `external_google_cloud_logging`, `external_opensearch_logs`, `flink`, `flink_external_bigquery`, `flink_external_kafka`, `flink_external_postgresql`, `internal_connectivity`, `jolokia`, `kafka_connect`, `kafka_connect_postgresql`, `kafka_logs`, `kafka_mirrormaker`, `logs`, `m3aggregator`, `m3coordinator`, `metrics`, `opensearch_cross_cluster_replication`, `opensearch_cross_cluster_search`, `prometheus`, `read_replica`, `rsyslog`, `schema_registry_proxy`, `stresstester`, `thanos_distributed_query`, `thanos_migrate`, `thanoscompactor`, `thanosquery`, `thanosruler`, `thanosstore`, `vector`, `vmalert` +- `integration_type` (String) Type of the service integration. The possible values are `alertmanager`, `autoscaler`, `caching`, `cassandra_cross_service_cluster`, `clickhouse_credentials`, `clickhouse_kafka`, `clickhouse_postgresql`, `dashboard`, `datadog`, `datasource`, `disaster_recovery`, `external_aws_cloudwatch_logs`, `external_aws_cloudwatch_metrics`, `external_elasticsearch_logs`, `external_google_cloud_logging`, `external_opensearch_logs`, `flink`, `flink_external_bigquery`, `flink_external_kafka`, `flink_external_postgresql`, `internal_connectivity`, `jolokia`, `kafka_connect`, `kafka_connect_postgresql`, `kafka_logs`, `kafka_mirrormaker`, `logs`, `m3aggregator`, `m3coordinator`, `metrics`, `opensearch_cross_cluster_replication`, `opensearch_cross_cluster_search`, `prometheus`, `read_replica`, `rsyslog`, `schema_registry_proxy`, `stresstester`, `thanos_distributed_query`, `thanos_migrate`, `thanoscompactor`, `thanosquery`, `thanosruler`, `thanosstore`, `vector` and `vmalert`. - `project` (String) Project the integration belongs to. - `source_service_name` (String) Source service for the integration (if any) diff --git a/docs/data-sources/service_integration_endpoint.md b/docs/data-sources/service_integration_endpoint.md index 9d2b29714..1129eb506 100644 --- a/docs/data-sources/service_integration_endpoint.md +++ b/docs/data-sources/service_integration_endpoint.md @@ -31,7 +31,7 @@ data "aiven_service_integration_endpoint" "myendpoint" { - `datadog_user_config` (List of Object) Datadog user configurable settings (see [below for nested schema](#nestedatt--datadog_user_config)) - `endpoint_config` (Map of String) Integration endpoint specific backend configuration -- `endpoint_type` (String) Type of the service integration endpoint. Possible values: `autoscaler`, `datadog`, `external_aws_cloudwatch_logs`, `external_aws_cloudwatch_metrics`, `external_aws_s3`, `external_clickhouse`, `external_elasticsearch_logs`, `external_google_cloud_bigquery`, `external_google_cloud_logging`, `external_kafka`, `external_mysql`, `external_opensearch_logs`, `external_postgresql`, `external_prometheus`, `external_redis`, `external_schema_registry`, `external_sumologic_logs`, `jolokia`, `prometheus`, `rsyslog` +- `endpoint_type` (String) Type of the service integration endpoint. The possible values are `autoscaler`, `datadog`, `external_aws_cloudwatch_logs`, `external_aws_cloudwatch_metrics`, `external_aws_s3`, `external_clickhouse`, `external_elasticsearch_logs`, `external_google_cloud_bigquery`, `external_google_cloud_logging`, `external_kafka`, `external_mysql`, `external_opensearch_logs`, `external_postgresql`, `external_prometheus`, `external_redis`, `external_schema_registry`, `external_sumologic_logs`, `jolokia`, `prometheus` and `rsyslog`. - `external_aws_cloudwatch_logs_user_config` (List of Object) ExternalAwsCloudwatchLogs user configurable settings (see [below for nested schema](#nestedatt--external_aws_cloudwatch_logs_user_config)) - `external_aws_cloudwatch_metrics_user_config` (List of Object) ExternalAwsCloudwatchMetrics user configurable settings (see [below for nested schema](#nestedatt--external_aws_cloudwatch_metrics_user_config)) - `external_aws_s3_user_config` (List of Object) ExternalAwsS3 user configurable settings (see [below for nested schema](#nestedatt--external_aws_s3_user_config)) diff --git a/docs/resources/service_integration.md b/docs/resources/service_integration.md index b4bde033c..6722e7c92 100644 --- a/docs/resources/service_integration.md +++ b/docs/resources/service_integration.md @@ -28,7 +28,7 @@ resource "aiven_service_integration" "example_integration" { ### Required -- `integration_type` (String) Type of the service integration. Possible values: `alertmanager`, `autoscaler`, `caching`, `cassandra_cross_service_cluster`, `clickhouse_credentials`, `clickhouse_kafka`, `clickhouse_postgresql`, `dashboard`, `datadog`, `datasource`, `disaster_recovery`, `external_aws_cloudwatch_logs`, `external_aws_cloudwatch_metrics`, `external_elasticsearch_logs`, `external_google_cloud_logging`, `external_opensearch_logs`, `flink`, `flink_external_bigquery`, `flink_external_kafka`, `flink_external_postgresql`, `internal_connectivity`, `jolokia`, `kafka_connect`, `kafka_connect_postgresql`, `kafka_logs`, `kafka_mirrormaker`, `logs`, `m3aggregator`, `m3coordinator`, `metrics`, `opensearch_cross_cluster_replication`, `opensearch_cross_cluster_search`, `prometheus`, `read_replica`, `rsyslog`, `schema_registry_proxy`, `stresstester`, `thanos_distributed_query`, `thanos_migrate`, `thanoscompactor`, `thanosquery`, `thanosruler`, `thanosstore`, `vector`, `vmalert` +- `integration_type` (String) Type of the service integration. The possible values are `alertmanager`, `autoscaler`, `caching`, `cassandra_cross_service_cluster`, `clickhouse_credentials`, `clickhouse_kafka`, `clickhouse_postgresql`, `dashboard`, `datadog`, `datasource`, `disaster_recovery`, `external_aws_cloudwatch_logs`, `external_aws_cloudwatch_metrics`, `external_elasticsearch_logs`, `external_google_cloud_logging`, `external_opensearch_logs`, `flink`, `flink_external_bigquery`, `flink_external_kafka`, `flink_external_postgresql`, `internal_connectivity`, `jolokia`, `kafka_connect`, `kafka_connect_postgresql`, `kafka_logs`, `kafka_mirrormaker`, `logs`, `m3aggregator`, `m3coordinator`, `metrics`, `opensearch_cross_cluster_replication`, `opensearch_cross_cluster_search`, `prometheus`, `read_replica`, `rsyslog`, `schema_registry_proxy`, `stresstester`, `thanos_distributed_query`, `thanos_migrate`, `thanoscompactor`, `thanosquery`, `thanosruler`, `thanosstore`, `vector` and `vmalert`. - `project` (String) Project the integration belongs to. ### Optional diff --git a/docs/resources/service_integration_endpoint.md b/docs/resources/service_integration_endpoint.md index 2f71e7fb8..e107e5dc2 100644 --- a/docs/resources/service_integration_endpoint.md +++ b/docs/resources/service_integration_endpoint.md @@ -18,7 +18,7 @@ The Service Integration Endpoint resource allows the creation and management of ### Required - `endpoint_name` (String) Name of the service integration endpoint -- `endpoint_type` (String) Type of the service integration endpoint. Possible values: `autoscaler`, `datadog`, `external_aws_cloudwatch_logs`, `external_aws_cloudwatch_metrics`, `external_aws_s3`, `external_clickhouse`, `external_elasticsearch_logs`, `external_google_cloud_bigquery`, `external_google_cloud_logging`, `external_kafka`, `external_mysql`, `external_opensearch_logs`, `external_postgresql`, `external_prometheus`, `external_redis`, `external_schema_registry`, `external_sumologic_logs`, `jolokia`, `prometheus`, `rsyslog` +- `endpoint_type` (String) Type of the service integration endpoint. The possible values are `autoscaler`, `datadog`, `external_aws_cloudwatch_logs`, `external_aws_cloudwatch_metrics`, `external_aws_s3`, `external_clickhouse`, `external_elasticsearch_logs`, `external_google_cloud_bigquery`, `external_google_cloud_logging`, `external_kafka`, `external_mysql`, `external_opensearch_logs`, `external_postgresql`, `external_prometheus`, `external_redis`, `external_schema_registry`, `external_sumologic_logs`, `jolokia`, `prometheus` and `rsyslog`. - `project` (String) Project the service integration endpoint belongs to ### Optional diff --git a/internal/schemautil/common.go b/internal/schemautil/common.go index 0e3f8b958..87f7cfb4a 100644 --- a/internal/schemautil/common.go +++ b/internal/schemautil/common.go @@ -1,7 +1,6 @@ package schemautil import ( - "fmt" "regexp" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -74,13 +73,3 @@ func PointerValueOrDefault[T comparable](v *T, d T) T { } return *v } - -func JoinQuoted[T string | int](elems []T, sep, quote string) (result string) { - for i, v := range elems { - if i != 0 { - result += sep - } - result = fmt.Sprintf("%s%s%v%s", result, quote, v, quote) - } - return result -} diff --git a/internal/schemautil/userconfig/desc.go b/internal/schemautil/userconfig/desc.go index 8cff9a9f4..f1387c4de 100644 --- a/internal/schemautil/userconfig/desc.go +++ b/internal/schemautil/userconfig/desc.go @@ -13,6 +13,7 @@ const ( Resource EntityType = iota // DataSource is a constant that represents the data source entity type. DataSource + PossibleValuesPrefix = "The possible values are " ) // String is a function that returns the string representation of the entity type. @@ -162,7 +163,7 @@ the ` + "`PROVIDER_AIVEN_ENABLE_BETA`" + ` environment variable to use the %[1]s if db.withPossibleValues != nil { builder.WriteRune(' ') - builder.WriteString("The possible values are ") + builder.WriteString(PossibleValuesPrefix) for i, value := range db.withPossibleValues { if i > 0 { if i == len(db.withPossibleValues)-1 { diff --git a/internal/sdkprovider/service/serviceintegration/service_integration.go b/internal/sdkprovider/service/serviceintegration/service_integration.go index cfc14c3d4..a56f26b0a 100644 --- a/internal/sdkprovider/service/serviceintegration/service_integration.go +++ b/internal/sdkprovider/service/serviceintegration/service_integration.go @@ -16,6 +16,7 @@ import ( "github.com/aiven/terraform-provider-aiven/internal/common" "github.com/aiven/terraform-provider-aiven/internal/schemautil" + "github.com/aiven/terraform-provider-aiven/internal/schemautil/userconfig" "github.com/aiven/terraform-provider-aiven/internal/schemautil/userconfig/stateupgrader" "github.com/aiven/terraform-provider-aiven/internal/sdkprovider/userconfig/converters" "github.com/aiven/terraform-provider-aiven/internal/sdkprovider/userconfig/serviceintegration" @@ -45,7 +46,7 @@ func aivenServiceIntegrationSchema() map[string]*schema.Schema { Type: schema.TypeString, }, "integration_type": { - Description: "Type of the service integration. Possible values: " + schemautil.JoinQuoted(service.IntegrationTypeChoices(), ", ", "`"), + Description: userconfig.Desc("Type of the service integration").PossibleValuesString(service.IntegrationTypeChoices()...).Build(), ForceNew: true, Required: true, Type: schema.TypeString, diff --git a/internal/sdkprovider/service/serviceintegration/service_integration_endpoint.go b/internal/sdkprovider/service/serviceintegration/service_integration_endpoint.go index deb33bbfc..d05b8d61c 100644 --- a/internal/sdkprovider/service/serviceintegration/service_integration_endpoint.go +++ b/internal/sdkprovider/service/serviceintegration/service_integration_endpoint.go @@ -12,6 +12,7 @@ import ( "github.com/aiven/terraform-provider-aiven/internal/common" "github.com/aiven/terraform-provider-aiven/internal/schemautil" + "github.com/aiven/terraform-provider-aiven/internal/schemautil/userconfig" "github.com/aiven/terraform-provider-aiven/internal/schemautil/userconfig/stateupgrader" "github.com/aiven/terraform-provider-aiven/internal/sdkprovider/userconfig/converters" "github.com/aiven/terraform-provider-aiven/internal/sdkprovider/userconfig/serviceintegrationendpoint" @@ -36,8 +37,7 @@ func aivenServiceIntegrationEndpointSchema() map[string]*schema.Schema { Type: schema.TypeString, }, "endpoint_type": { - Description: "Type of the service integration endpoint. Possible values: " + - schemautil.JoinQuoted(service.EndpointTypeChoices(), ", ", "`"), + Description: userconfig.Desc("Type of the service integration endpoint").PossibleValuesString(service.EndpointTypeChoices()...).Build(), ForceNew: true, Required: true, Type: schema.TypeString,