diff --git a/internal/cli/serverless/dataimport/start/azblob.go b/internal/cli/serverless/dataimport/start/azblob.go index d55f6cea..1c602290 100644 --- a/internal/cli/serverless/dataimport/start/azblob.go +++ b/internal/cli/serverless/dataimport/start/azblob.go @@ -16,7 +16,6 @@ package start import ( "fmt" - "slices" "tidbcloud-cli/internal" "tidbcloud-cli/internal/flag" @@ -36,18 +35,10 @@ type AzBlobOpts struct { clusterId string } -func (o AzBlobOpts) SupportedFileTypes() []string { - return []string{ - string(imp.IMPORTFILETYPEENUM_CSV), - string(imp.IMPORTFILETYPEENUM_PARQUET), - string(imp.IMPORTFILETYPEENUM_SQL), - string(imp.IMPORTFILETYPEENUM_AURORA_SNAPSHOT), - } -} - func (o AzBlobOpts) Run(cmd *cobra.Command) error { ctx := cmd.Context() - var fileType, uri, sasToken string + var uri, sasToken string + var fileType imp.ImportFileTypeEnum var authType imp.ImportAzureBlobAuthTypeEnum var format *imp.CSVFormat d, err := o.h.Client() @@ -91,7 +82,7 @@ func (o AzBlobOpts) Run(cmd *cobra.Command) error { } var fileTypes []interface{} - for _, f := range o.SupportedFileTypes() { + for _, f := range imp.AllowedImportFileTypeEnumEnumValues { fileTypes = append(fileTypes, f) } model, err = ui.InitialSelectModel(fileTypes, "Choose the source file type:") @@ -106,9 +97,9 @@ func (o AzBlobOpts) Run(cmd *cobra.Command) error { if m, _ := fileTypeModel.(ui.SelectModel); m.Interrupted { return util.InterruptError } - fileType = fileTypeModel.(ui.SelectModel).Choices[fileTypeModel.(ui.SelectModel).Selected].(string) + fileType = fileTypeModel.(ui.SelectModel).Choices[fileTypeModel.(ui.SelectModel).Selected].(imp.ImportFileTypeEnum) - if fileType == string(imp.IMPORTFILETYPEENUM_CSV) { + if fileType == imp.IMPORTFILETYPEENUM_CSV { format, err = getCSVFormat() if err != nil { return err @@ -116,12 +107,13 @@ func (o AzBlobOpts) Run(cmd *cobra.Command) error { } } else { // non-interactive mode - fileType, err = cmd.Flags().GetString(flag.FileType) + fileTypeStr, err := cmd.Flags().GetString(flag.FileType) if err != nil { return errors.Trace(err) } - if !slices.Contains(o.SupportedFileTypes(), fileType) { - return fmt.Errorf("file type \"%s\" is not supported, please use one of %q", fileType, o.SupportedFileTypes()) + fileType = imp.ImportFileTypeEnum(fileTypeStr) + if !fileType.IsValid() { + return fmt.Errorf("file type \"%s\" is not supported, please use one of %q", fileTypeStr, imp.AllowedImportFileTypeEnumEnumValues) } uri, err = cmd.Flags().GetString(flag.AzureBlobURI) @@ -142,7 +134,7 @@ func (o AzBlobOpts) Run(cmd *cobra.Command) error { authType = imp.IMPORTAZUREBLOBAUTHTYPEENUM_SAS_TOKEN // optional flags - if fileType == string(imp.IMPORTFILETYPEENUM_CSV) { + if fileType == imp.IMPORTFILETYPEENUM_CSV { format, err = getCSVFlagValue(cmd) if err != nil { return errors.Trace(err) @@ -155,7 +147,7 @@ func (o AzBlobOpts) Run(cmd *cobra.Command) error { source.AzureBlob.AuthType = authType source.AzureBlob.SasToken = &sasToken options := imp.NewImportOptions(imp.ImportFileTypeEnum(fileType)) - if fileType == string(imp.IMPORTFILETYPEENUM_CSV) { + if fileType == imp.IMPORTFILETYPEENUM_CSV { options.CsvFormat = format } body := imp.NewImportServiceCreateImportBody(*options, *source) diff --git a/internal/cli/serverless/dataimport/start/azblob_test.go b/internal/cli/serverless/dataimport/start/azblob_test.go index a199777e..64e0a452 100644 --- a/internal/cli/serverless/dataimport/start/azblob_test.go +++ b/internal/cli/serverless/dataimport/start/azblob_test.go @@ -138,7 +138,7 @@ func (suite *AzblobImportSuite) TestAzblobImportArgs() { { name: "start import with unsupported data format", args: []string{"--source-type", "AZURE_BLOB", "--cluster-id", clusterID, "--file-type", "yaml"}, - err: fmt.Errorf("file type \"yaml\" is not supported, please use one of [\"CSV\" \"PARQUET\" \"SQL\" \"AURORA_SNAPSHOT\"]"), + err: fmt.Errorf("file type \"yaml\" is not supported, please use one of [\"CSV\" \"SQL\" \"AURORA_SNAPSHOT\" \"PARQUET\"]"), }, { name: "start import without required cluster id", diff --git a/internal/cli/serverless/dataimport/start/gcs.go b/internal/cli/serverless/dataimport/start/gcs.go index 7161eb48..760992b8 100644 --- a/internal/cli/serverless/dataimport/start/gcs.go +++ b/internal/cli/serverless/dataimport/start/gcs.go @@ -16,7 +16,6 @@ package start import ( "fmt" - "slices" "tidbcloud-cli/internal" "tidbcloud-cli/internal/flag" @@ -37,18 +36,10 @@ type GCSOpts struct { clusterId string } -func (o GCSOpts) SupportedFileTypes() []string { - return []string{ - string(imp.IMPORTFILETYPEENUM_CSV), - string(imp.IMPORTFILETYPEENUM_PARQUET), - string(imp.IMPORTFILETYPEENUM_SQL), - string(imp.IMPORTFILETYPEENUM_AURORA_SNAPSHOT), - } -} - func (o GCSOpts) Run(cmd *cobra.Command) error { ctx := cmd.Context() - var fileType, gcsUri, accountKey string + var gcsUri, accountKey string + var fileType imp.ImportFileTypeEnum var authType imp.ImportGcsAuthTypeEnum var format *imp.CSVFormat d, err := o.h.Client() @@ -92,7 +83,7 @@ func (o GCSOpts) Run(cmd *cobra.Command) error { } var fileTypes []interface{} - for _, f := range o.SupportedFileTypes() { + for _, f := range imp.AllowedImportFileTypeEnumEnumValues { fileTypes = append(fileTypes, f) } model, err = ui.InitialSelectModel(fileTypes, "Choose the source file type:") @@ -107,9 +98,9 @@ func (o GCSOpts) Run(cmd *cobra.Command) error { if m, _ := fileTypeModel.(ui.SelectModel); m.Interrupted { return util.InterruptError } - fileType = fileTypeModel.(ui.SelectModel).Choices[fileTypeModel.(ui.SelectModel).Selected].(string) + fileType = fileTypeModel.(ui.SelectModel).Choices[fileTypeModel.(ui.SelectModel).Selected].(imp.ImportFileTypeEnum) - if fileType == string(imp.IMPORTFILETYPEENUM_CSV) { + if fileType == imp.IMPORTFILETYPEENUM_CSV { format, err = getCSVFormat() if err != nil { return err @@ -117,12 +108,13 @@ func (o GCSOpts) Run(cmd *cobra.Command) error { } } else { // non-interactive mode - fileType, err = cmd.Flags().GetString(flag.FileType) + fileTypeStr, err := cmd.Flags().GetString(flag.FileType) if err != nil { return errors.Trace(err) } - if !slices.Contains(o.SupportedFileTypes(), fileType) { - return fmt.Errorf("file type \"%s\" is not supported, please use one of %q", fileType, o.SupportedFileTypes()) + fileType = imp.ImportFileTypeEnum(fileTypeStr) + if !fileType.IsValid() { + return fmt.Errorf("file type \"%s\" is not supported, please use one of %q", fileTypeStr, imp.AllowedImportFileTypeEnumEnumValues) } gcsUri, err = cmd.Flags().GetString(flag.GCSURI) if err != nil { @@ -142,7 +134,7 @@ func (o GCSOpts) Run(cmd *cobra.Command) error { authType = imp.IMPORTGCSAUTHTYPEENUM_SERVICE_ACCOUNT_KEY // optional flags - if fileType == string(imp.IMPORTFILETYPEENUM_CSV) { + if fileType == imp.IMPORTFILETYPEENUM_CSV { format, err = getCSVFlagValue(cmd) if err != nil { return errors.Trace(err) @@ -154,7 +146,7 @@ func (o GCSOpts) Run(cmd *cobra.Command) error { source.Gcs = imp.NewGCSSource(gcsUri, authType) source.Gcs.ServiceAccountKey = aws.String(accountKey) options := imp.NewImportOptions(imp.ImportFileTypeEnum(fileType)) - if fileType == string(imp.IMPORTFILETYPEENUM_CSV) { + if fileType == imp.IMPORTFILETYPEENUM_CSV { options.CsvFormat = format } body := imp.NewImportServiceCreateImportBody(*options, *source) diff --git a/internal/cli/serverless/dataimport/start/gcs_test.go b/internal/cli/serverless/dataimport/start/gcs_test.go index 42708db0..589c727e 100644 --- a/internal/cli/serverless/dataimport/start/gcs_test.go +++ b/internal/cli/serverless/dataimport/start/gcs_test.go @@ -138,7 +138,7 @@ func (suite *GCSImportSuite) TestGCSImportArgs() { { name: "start import with unsupported data format", args: []string{"--source-type", "GCS", "--cluster-id", clusterID, "--file-type", "yaml"}, - err: fmt.Errorf("file type \"yaml\" is not supported, please use one of [\"CSV\" \"PARQUET\" \"SQL\" \"AURORA_SNAPSHOT\"]"), + err: fmt.Errorf("file type \"yaml\" is not supported, please use one of [\"CSV\" \"SQL\" \"AURORA_SNAPSHOT\" \"PARQUET\"]"), }, { name: "start import without required cluster id", diff --git a/internal/cli/serverless/dataimport/start/s3.go b/internal/cli/serverless/dataimport/start/s3.go index ecb632e9..93c9bc6d 100644 --- a/internal/cli/serverless/dataimport/start/s3.go +++ b/internal/cli/serverless/dataimport/start/s3.go @@ -16,7 +16,6 @@ package start import ( "fmt" - "slices" "tidbcloud-cli/internal" "tidbcloud-cli/internal/flag" @@ -35,18 +34,10 @@ type S3Opts struct { clusterId string } -func (o S3Opts) SupportedFileTypes() []string { - return []string{ - string(imp.IMPORTFILETYPEENUM_CSV), - string(imp.IMPORTFILETYPEENUM_PARQUET), - string(imp.IMPORTFILETYPEENUM_SQL), - string(imp.IMPORTFILETYPEENUM_AURORA_SNAPSHOT), - } -} - func (o S3Opts) Run(cmd *cobra.Command) error { ctx := cmd.Context() - var fileType, s3Uri, s3Arn, accessKeyID, secretAccessKey string + var s3Uri, s3Arn, accessKeyID, secretAccessKey string + var fileType imp.ImportFileTypeEnum var authType imp.ImportS3AuthTypeEnum var format *imp.CSVFormat d, err := o.h.Client() @@ -108,7 +99,7 @@ func (o S3Opts) Run(cmd *cobra.Command) error { } var fileTypes []interface{} - for _, f := range o.SupportedFileTypes() { + for _, f := range imp.AllowedImportFileTypeEnumEnumValues { fileTypes = append(fileTypes, f) } model, err = ui.InitialSelectModel(fileTypes, "Choose the source file type:") @@ -123,9 +114,9 @@ func (o S3Opts) Run(cmd *cobra.Command) error { if m, _ := fileTypeModel.(ui.SelectModel); m.Interrupted { return util.InterruptError } - fileType = fileTypeModel.(ui.SelectModel).Choices[fileTypeModel.(ui.SelectModel).Selected].(string) + fileType = fileTypeModel.(ui.SelectModel).Choices[fileTypeModel.(ui.SelectModel).Selected].(imp.ImportFileTypeEnum) - if fileType == string(imp.IMPORTFILETYPEENUM_CSV) { + if fileType == imp.IMPORTFILETYPEENUM_CSV { format, err = getCSVFormat() if err != nil { return err @@ -133,12 +124,13 @@ func (o S3Opts) Run(cmd *cobra.Command) error { } } else { // non-interactive mode - fileType, err = cmd.Flags().GetString(flag.FileType) + fileTypeStr, err := cmd.Flags().GetString(flag.FileType) if err != nil { return errors.Trace(err) } - if !slices.Contains(o.SupportedFileTypes(), fileType) { - return fmt.Errorf("file type \"%s\" is not supported, please use one of %q", fileType, o.SupportedFileTypes()) + fileType = imp.ImportFileTypeEnum(fileTypeStr) + if !fileType.IsValid() { + return fmt.Errorf("file type \"%s\" is not supported, please use one of %q", fileTypeStr, imp.AllowedImportFileTypeEnumEnumValues) } s3Uri, err = cmd.Flags().GetString(flag.S3URI) if err != nil { @@ -149,7 +141,7 @@ func (o S3Opts) Run(cmd *cobra.Command) error { } // optional flags - if fileType == string(imp.IMPORTFILETYPEENUM_CSV) { + if fileType == imp.IMPORTFILETYPEENUM_CSV { format, err = getCSVFlagValue(cmd) if err != nil { return errors.Trace(err) @@ -189,7 +181,7 @@ func (o S3Opts) Run(cmd *cobra.Command) error { } } options := imp.NewImportOptions(imp.ImportFileTypeEnum(fileType)) - if fileType == string(imp.IMPORTFILETYPEENUM_CSV) { + if fileType == imp.IMPORTFILETYPEENUM_CSV { options.CsvFormat = format } body := imp.NewImportServiceCreateImportBody(*options, *source) diff --git a/internal/cli/serverless/dataimport/start/s3_test.go b/internal/cli/serverless/dataimport/start/s3_test.go index 1f383f5d..7b2d07a6 100644 --- a/internal/cli/serverless/dataimport/start/s3_test.go +++ b/internal/cli/serverless/dataimport/start/s3_test.go @@ -138,7 +138,7 @@ func (suite *S3ImportSuite) TestS3ImportArgs() { { name: "start import with unsupported data format", args: []string{"--source-type", "S3", "--cluster-id", clusterID, "--file-type", "yaml"}, - err: fmt.Errorf("file type \"yaml\" is not supported, please use one of [\"CSV\" \"PARQUET\" \"SQL\" \"AURORA_SNAPSHOT\"]"), + err: fmt.Errorf("file type \"yaml\" is not supported, please use one of [\"CSV\" \"SQL\" \"AURORA_SNAPSHOT\" \"PARQUET\"]"), }, { name: "start import without required cluster id", diff --git a/internal/cli/serverless/dataimport/start/start.go b/internal/cli/serverless/dataimport/start/start.go index d68571d8..862f32ec 100644 --- a/internal/cli/serverless/dataimport/start/start.go +++ b/internal/cli/serverless/dataimport/start/start.go @@ -66,26 +66,10 @@ var inputDescription = map[string]string{ flag.CSVNotNull: "Input the CSV not-null: whether the CSV can contains any NULL value, skip to use default value (false)", } -var sourceTypes = []imp.ImportSourceTypeEnum{ - imp.IMPORTSOURCETYPEENUM_S3, - imp.IMPORTSOURCETYPEENUM_LOCAL, - imp.IMPORTSOURCETYPEENUM_GCS, - imp.IMPORTSOURCETYPEENUM_AZURE_BLOB, -} - type StartOpts struct { interactive bool } -func (o StartOpts) SupportedFileTypes() []string { - return []string{ - string(imp.IMPORTFILETYPEENUM_CSV), - string(imp.IMPORTFILETYPEENUM_PARQUET), - string(imp.IMPORTFILETYPEENUM_SQL), - string(imp.IMPORTFILETYPEENUM_AURORA_SNAPSHOT), - } -} - func (o StartOpts) NonInteractiveFlags() []string { return []string{ flag.ClusterID, @@ -249,8 +233,8 @@ func StartCmd(h *internal.Helper) *cobra.Command { } startCmd.Flags().StringP(flag.ClusterID, flag.ClusterIDShort, "", "Cluster ID.") - startCmd.Flags().String(flag.SourceType, "LOCAL", fmt.Sprintf("The import source type, one of %q.", sourceTypes)) - startCmd.Flags().String(flag.FileType, "", fmt.Sprintf("The import file type, one of %q.", opts.SupportedFileTypes())) + startCmd.Flags().String(flag.SourceType, "LOCAL", fmt.Sprintf("The import source type, one of %q.", imp.AllowedImportSourceTypeEnumEnumValues)) + startCmd.Flags().String(flag.FileType, "", fmt.Sprintf("The import file type, one of %q.", imp.AllowedImportFileTypeEnumEnumValues)) startCmd.Flags().String(flag.LocalFilePath, "", "The local file path to import.") startCmd.Flags().String(flag.LocalTargetDatabase, "", "Target database to which import data.") @@ -282,8 +266,8 @@ func StartCmd(h *internal.Helper) *cobra.Command { } func getSelectedSourceType() (imp.ImportSourceTypeEnum, error) { - SourceTypes := make([]interface{}, 0, len(sourceTypes)) - for _, sourceType := range sourceTypes { + SourceTypes := make([]interface{}, 0, len(imp.AllowedImportSourceTypeEnumEnumValues)) + for _, sourceType := range imp.AllowedImportSourceTypeEnumEnumValues { SourceTypes = append(SourceTypes, sourceType) } model, err := ui.InitialSelectModel(SourceTypes, "Choose import source type:") diff --git a/internal/cli/serverless/export/create.go b/internal/cli/serverless/export/create.go index 078011d3..b7e7b5c8 100644 --- a/internal/cli/serverless/export/create.go +++ b/internal/cli/serverless/export/create.go @@ -16,7 +16,6 @@ package export import ( "fmt" - "slices" "strings" "tidbcloud-cli/internal/ui" @@ -35,41 +34,6 @@ import ( "tidbcloud-cli/pkg/tidbcloud/v1beta1/serverless/export" ) -type TargetType string - -const ( - TargetTypeS3 TargetType = "S3" - TargetTypeLOCAL TargetType = "LOCAL" - TargetTypeGCS TargetType = "GCS" - TargetTypeAZBLOB TargetType = "AZURE_BLOB" - TargetTypeUnknown TargetType = "UNKNOWN" -) - -type FileType string - -const ( - FileTypeSQL FileType = "SQL" - FileTypeCSV FileType = "CSV" - FileTypePARQUET FileType = "PARQUET" - FileTypeUnknown FileType = "UNKNOWN" -) - -type AuthType string - -const ( - AuthTypeS3AccessKey AuthType = "S3AccessKey" - AuthTypeS3RoleArn AuthType = "S3RoleArn" - AuthTypeGCSServiceAccountKey AuthType = "GCSServiceAccountKey" - AuthTypeAzBlobSasToken AuthType = "AzBlobSasToken" -) - -var ( - supportedFileType = []string{string(FileTypeSQL), string(FileTypeCSV), string(FileTypePARQUET)} - supportedTargetType = []string{string(TargetTypeS3), string(TargetTypeLOCAL), string(TargetTypeGCS), string(TargetTypeAZBLOB)} - supportedCompression = []string{"GZIP", "SNAPPY", "ZSTD", "NONE"} - supportedParquetCompression = []string{"GZIP", "SNAPPY", "ZSTD", "NONE"} -) - const ( CSVSeparatorDefaultValue = "," CSVDelimiterDefaultValue = "\"" @@ -174,13 +138,16 @@ func CreateCmd(h *internal.Helper) *cobra.Command { ctx := cmd.Context() // options - var targetType, fileType, compression, clusterId, sql, where string + var targetType export.ExportTargetTypeEnum + var fileType export.ExportFileTypeEnum + var compression export.ExportCompressionTypeEnum + var clusterId, sql, where string var patterns []string // csv format var csvSeparator, csvDelimiter, csvNullValue string var csvSkipHeader bool // parquet options - var parquetCompression string + var parquetCompression export.ExportParquetCompressionTypeEnum // s3 var s3URI, accessKeyID, secretAccessKey, s3RoleArn string // gcs @@ -205,17 +172,16 @@ func CreateCmd(h *internal.Helper) *cobra.Command { clusterId = cluster.ID // target - selectedTargetType, err := GetSelectedTargetType() + targetType, err = GetSelectedTargetType() if err != nil { return err } - targetType = string(selectedTargetType) - selectedAuthType, err := GetSelectedAuthType(selectedTargetType) + selectedAuthType, err := GetSelectedAuthType(targetType) if err != nil { return err } switch selectedAuthType { - case AuthTypeS3AccessKey: + case string(export.EXPORTS3AUTHTYPEENUM_ACCESS_KEY): inputs := []string{flag.S3URI, flag.S3AccessKeyID, flag.S3SecretAccessKey} textInput, err := ui.InitialInputModel(inputs, inputDescription) if err != nil { @@ -233,7 +199,7 @@ func CreateCmd(h *internal.Helper) *cobra.Command { if secretAccessKey == "" { return errors.New("empty S3 secret access key") } - case AuthTypeS3RoleArn: + case string(export.EXPORTS3AUTHTYPEENUM_ROLE_ARN): inputs := []string{flag.S3URI, flag.S3RoleArn} textInput, err := ui.InitialInputModel(inputs, inputDescription) if err != nil { @@ -247,7 +213,7 @@ func CreateCmd(h *internal.Helper) *cobra.Command { if s3RoleArn == "" { return errors.New("empty S3 role arn") } - case AuthTypeGCSServiceAccountKey: + case string(export.EXPORTGCSAUTHTYPEENUM_SERVICE_ACCOUNT_KEY): inputs := []string{flag.GCSURI, flag.GCSServiceAccountKey} textInput, err := ui.InitialInputModel(inputs, inputDescription) if err != nil { @@ -261,7 +227,7 @@ func CreateCmd(h *internal.Helper) *cobra.Command { if gcsServiceAccountKey == "" { return errors.New("empty GCS service account key") } - case AuthTypeAzBlobSasToken: + case string(export.EXPORTAZUREBLOBAUTHTYPEENUM_SAS_TOKEN): inputs := []string{flag.AzureBlobURI, flag.AzureBlobSASToken} textInput, err := ui.InitialInputModel(inputs, inputDescription) if err != nil { @@ -312,13 +278,12 @@ func CreateCmd(h *internal.Helper) *cobra.Command { } } - selectedFileType, err := GetSelectedFileType(filterType) + fileType, err = GetSelectedFileType(filterType) if err != nil { return err } - fileType = string(selectedFileType) switch fileType { - case string(FileTypeCSV): + case export.EXPORTFILETYPEENUM_CSV: customCSVFormat := false prompt := &survey.Confirm{ Message: "Do you want to customize the CSV format", @@ -357,7 +322,7 @@ func CreateCmd(h *internal.Helper) *cobra.Command { if csvNullValue == "" { csvNullValue = CSVNullValueDefaultValue } - case string(FileTypePARQUET): + case export.EXPORTFILETYPEENUM_PARQUET: customParquetCompression := false prompt := &survey.Confirm{ Message: "Do you want change the default parquet compression algorithm ZSTD", @@ -380,7 +345,7 @@ func CreateCmd(h *internal.Helper) *cobra.Command { } } - if fileType != string(FileTypePARQUET) { + if fileType != export.EXPORTFILETYPEENUM_PARQUET { changeCompression := false prompt := &survey.Confirm{ Message: "Do you want to change the default compression algorithm GZIP", @@ -408,22 +373,25 @@ func CreateCmd(h *internal.Helper) *cobra.Command { if err != nil { return errors.Trace(err) } - targetType, err = cmd.Flags().GetString(flag.TargetType) + targetTypeStr, err := cmd.Flags().GetString(flag.TargetType) if err != nil { return errors.Trace(err) } - if targetType != "" && !slices.Contains(supportedTargetType, strings.ToUpper(targetType)) { - return errors.New("unsupported target type: " + targetType) + targetType = export.ExportTargetTypeEnum(strings.ToUpper(targetTypeStr)) + if targetType != "" && !targetType.IsValid() { + return errors.New("unsupported target type: " + targetTypeStr) } - fileType, err = cmd.Flags().GetString(flag.FileType) + fileTypeStr, err := cmd.Flags().GetString(flag.FileType) if err != nil { return errors.Trace(err) } - if fileType != "" && !slices.Contains(supportedFileType, strings.ToUpper(fileType)) { - return errors.New("unsupported file type: " + fileType) + fileType = export.ExportFileTypeEnum(strings.ToUpper(fileTypeStr)) + if fileType != "" && !fileType.IsValid() { + return errors.New("unsupported file type: " + fileTypeStr) } - switch strings.ToUpper(targetType) { - case string(TargetTypeS3): + + switch targetType { + case export.EXPORTTARGETTYPEENUM_S3: s3URI, err = cmd.Flags().GetString(flag.S3URI) if err != nil { return errors.Trace(err) @@ -446,7 +414,7 @@ func CreateCmd(h *internal.Helper) *cobra.Command { if s3RoleArn == "" && (accessKeyID == "" || secretAccessKey == "") { return errors.New("missing S3 auth information, require either role arn or access key id and secret access key") } - case string(TargetTypeGCS): + case export.EXPORTTARGETTYPEENUM_GCS: gcsURI, err = cmd.Flags().GetString(flag.GCSURI) if err != nil { return errors.Trace(err) @@ -461,7 +429,7 @@ func CreateCmd(h *internal.Helper) *cobra.Command { if gcsServiceAccountKey == "" { return errors.New("GCS service account key is required when target type is GCS") } - case string(TargetTypeAZBLOB): + case export.EXPORTTARGETTYPEENUM_AZURE_BLOB: azBlobURI, err = cmd.Flags().GetString(flag.AzureBlobURI) if err != nil { return errors.Trace(err) @@ -478,16 +446,17 @@ func CreateCmd(h *internal.Helper) *cobra.Command { } } - compression, err = cmd.Flags().GetString(flag.Compression) + compressionStr, err := cmd.Flags().GetString(flag.Compression) if err != nil { return errors.Trace(err) } - if compression != "" && !slices.Contains(supportedCompression, strings.ToUpper(compression)) { - return errors.New("unsupported compression: " + compression) + compression = export.ExportCompressionTypeEnum(strings.ToUpper(compressionStr)) + if compression != "" && !compression.IsValid() { + return errors.New("unsupported compression: " + compressionStr) } - switch strings.ToUpper(fileType) { - case string(FileTypeCSV): + switch fileType { + case export.EXPORTFILETYPEENUM_CSV: csvSeparator, err = cmd.Flags().GetString(flag.CSVSeparator) if err != nil { return errors.Trace(err) @@ -507,13 +476,14 @@ func CreateCmd(h *internal.Helper) *cobra.Command { if err != nil { return errors.Trace(err) } - case string(FileTypePARQUET): - parquetCompression, err = cmd.Flags().GetString(flag.ParquetCompression) + case export.EXPORTFILETYPEENUM_PARQUET: + parquetCompressionStr, err := cmd.Flags().GetString(flag.ParquetCompression) if err != nil { return errors.Trace(err) } - if parquetCompression != "" && !slices.Contains(supportedParquetCompression, strings.ToUpper(parquetCompression)) { - return errors.New("unsupported parquet compression: " + parquetCompression) + parquetCompression = export.ExportParquetCompressionTypeEnum(strings.ToUpper(parquetCompressionStr)) + if parquetCompression != "" && !parquetCompression.IsValid() { + return errors.New("unsupported parquet compression: " + parquetCompressionStr) } if compression != "" { return errors.New("--compression is not supported when file type is parquet, please use --parquet.compression instead") @@ -558,26 +528,24 @@ func CreateCmd(h *internal.Helper) *cobra.Command { } // apply default values - if strings.ToUpper(fileType) == string(FileTypePARQUET) { + if fileType == export.EXPORTFILETYPEENUM_PARQUET { if parquetCompression == "" { - parquetCompression = string(ParquetCompressionDefaultValue) + parquetCompression = ParquetCompressionDefaultValue } } else if compression == "" { - compression = string(CompressionDefaultValue) + compression = CompressionDefaultValue } // build param to create export - fileTypeEnum := export.ExportFileTypeEnum(strings.ToUpper(fileType)) - targetTypeEnum := export.ExportTargetTypeEnum(strings.ToUpper(targetType)) params := &export.ExportServiceCreateExportBody{ ExportOptions: &export.ExportOptions{ - FileType: &fileTypeEnum, + FileType: &fileType, }, Target: &export.ExportTarget{ - Type: &targetTypeEnum, + Type: &targetType, }, } // add target - switch targetTypeEnum { + switch targetType { case export.EXPORTTARGETTYPEENUM_S3: if s3RoleArn != "" { params.Target.S3 = &export.S3Target{ @@ -610,8 +578,7 @@ func CreateCmd(h *internal.Helper) *cobra.Command { } // add compression if compression != "" { - compressionEnum := export.ExportCompressionTypeEnum(strings.ToUpper(compression)) - params.ExportOptions.Compression = &compressionEnum + params.ExportOptions.Compression = &compression } // add filter if sql != "" { @@ -628,20 +595,17 @@ func CreateCmd(h *internal.Helper) *cobra.Command { } } // add file type - switch strings.ToUpper(fileType) { - case string(FileTypeCSV): + switch fileType { + case export.EXPORTFILETYPEENUM_CSV: params.ExportOptions.CsvFormat = &export.ExportOptionsCSVFormat{ Separator: &csvSeparator, Delimiter: *export.NewNullableString(&csvDelimiter), NullValue: *export.NewNullableString(&csvNullValue), SkipHeader: &csvSkipHeader, } - case string(FileTypePARQUET): - if parquetCompression != "" { - c := export.ExportParquetCompressionTypeEnum(strings.ToUpper(parquetCompression)) - params.ExportOptions.ParquetFormat = &export.ExportOptionsParquetFormat{ - Compression: &c, - } + case export.EXPORTFILETYPEENUM_PARQUET: + params.ExportOptions.ParquetFormat = &export.ExportOptionsParquetFormat{ + Compression: &parquetCompression, } } @@ -658,12 +622,12 @@ func CreateCmd(h *internal.Helper) *cobra.Command { } createCmd.Flags().StringP(flag.ClusterID, flag.ClusterIDShort, "", "The ID of the cluster, in which the export will be created.") - createCmd.Flags().String(flag.FileType, "CSV", "The export file type. One of [\"CSV\" \"SQL\" \"PARQUET\"].") - createCmd.Flags().String(flag.TargetType, "LOCAL", "The export target. One of [\"LOCAL\" \"S3\" \"GCS\" \"AZURE_BLOB\"].") + createCmd.Flags().String(flag.FileType, "CSV", fmt.Sprintf("The export file type. One of %q.", export.AllowedExportFileTypeEnumEnumValues)) + createCmd.Flags().String(flag.TargetType, "LOCAL", fmt.Sprintf("The export target. One of %q.", export.AllowedExportTargetTypeEnumEnumValues)) createCmd.Flags().String(flag.S3URI, "", "The S3 URI in s3:/// format. Required when target type is S3.") createCmd.Flags().String(flag.S3AccessKeyID, "", "The access key ID of the S3. You only need to set one of the s3.role-arn and [s3.access-key-id, s3.secret-access-key].") createCmd.Flags().String(flag.S3SecretAccessKey, "", "The secret access key of the S3. You only need to set one of the s3.role-arn and [s3.access-key-id, s3.secret-access-key].") - createCmd.Flags().String(flag.Compression, "", "The compression algorithm of the export file. One of [\"GZIP\" \"SNAPPY\" \"ZSTD\" \"NONE\"].") + createCmd.Flags().String(flag.Compression, "", fmt.Sprintf("The compression algorithm of the export file. One of %q.", export.AllowedExportCompressionTypeEnumEnumValues)) createCmd.Flags().StringSlice(flag.TableFilter, nil, "Specify the exported table(s) with table filter patterns. See https://docs.pingcap.com/tidb/stable/table-filter to learn table filter.") createCmd.Flags().String(flag.TableWhere, "", "Filter the exported table(s) with the where condition.") createCmd.Flags().String(flag.SQL, "", "Filter the exported data with SQL SELECT statement.") @@ -677,7 +641,7 @@ func CreateCmd(h *internal.Helper) *cobra.Command { createCmd.Flags().String(flag.GCSServiceAccountKey, "", "The base64 encoded service account key of GCS.") createCmd.Flags().String(flag.AzureBlobURI, "", "The Azure Blob URI in azure://.blob.core.windows.net// format. Required when target type is AZURE_BLOB.") createCmd.Flags().String(flag.AzureBlobSASToken, "", "The SAS token of Azure Blob.") - createCmd.Flags().String(flag.ParquetCompression, "ZSTD", "The parquet compression algorithm. One of [\"GZIP\" \"SNAPPY\" \"ZSTD\" \"NONE\"].") + createCmd.Flags().String(flag.ParquetCompression, "ZSTD", fmt.Sprintf("The parquet compression algorithm. One of %q.", export.AllowedExportParquetCompressionTypeEnumEnumValues)) createCmd.MarkFlagsMutuallyExclusive(flag.TableFilter, flag.SQL) createCmd.MarkFlagsMutuallyExclusive(flag.TableWhere, flag.SQL) diff --git a/internal/cli/serverless/export/ui.go b/internal/cli/serverless/export/ui.go index 476315fc..f6d564e2 100644 --- a/internal/cli/serverless/export/ui.go +++ b/internal/cli/serverless/export/ui.go @@ -18,6 +18,7 @@ import ( "tidbcloud-cli/internal/flag" "tidbcloud-cli/internal/ui" "tidbcloud-cli/internal/util" + "tidbcloud-cli/pkg/tidbcloud/v1beta1/serverless/export" tea "github.com/charmbracelet/bubbletea" "github.com/juju/errors" @@ -41,9 +42,9 @@ var inputDescription = map[string]string{ flag.CSVSkipHeader: "Input the CSV skip header: export CSV files of the tables without header. Type `true` to skip header, others will not skip header", } -func GetSelectedParquetCompression() (string, error) { - compressions := make([]interface{}, 0, 4) - compressions = append(compressions, "SNAPPY", "GZIP", "NONE") +func GetSelectedParquetCompression() (export.ExportParquetCompressionTypeEnum, error) { + compressions := make([]interface{}, 0, 3) + compressions = append(compressions, export.EXPORTPARQUETCOMPRESSIONTYPEENUM_SNAPPY, export.EXPORTPARQUETCOMPRESSIONTYPEENUM_GZIP, export.EXPORTPARQUETCOMPRESSIONTYPEENUM_NONE) model, err := ui.InitialSelectModel(compressions, "Choose the parquet compression algorithm:") if err != nil { return "", errors.Trace(err) @@ -61,47 +62,47 @@ func GetSelectedParquetCompression() (string, error) { if compression == nil { return "", errors.New("no compression algorithm selected") } - return compression.(string), nil + return compression.(export.ExportParquetCompressionTypeEnum), nil } -func GetSelectedTargetType() (TargetType, error) { - targetTypes := make([]interface{}, 0, 2) - targetTypes = append(targetTypes, TargetTypeLOCAL, TargetTypeS3, TargetTypeGCS, TargetTypeAZBLOB) +func GetSelectedTargetType() (export.ExportTargetTypeEnum, error) { + targetTypes := make([]interface{}, 0, 4) + targetTypes = append(targetTypes, export.EXPORTTARGETTYPEENUM_LOCAL, export.EXPORTTARGETTYPEENUM_S3, export.EXPORTTARGETTYPEENUM_GCS, export.EXPORTTARGETTYPEENUM_AZURE_BLOB) model, err := ui.InitialSelectModel(targetTypes, "Choose the export target:") if err != nil { - return TargetTypeUnknown, errors.Trace(err) + return "", errors.Trace(err) } p := tea.NewProgram(model) targetTypeModel, err := p.Run() if err != nil { - return TargetTypeUnknown, errors.Trace(err) + return "", errors.Trace(err) } if m, _ := targetTypeModel.(ui.SelectModel); m.Interrupted { - return TargetTypeUnknown, util.InterruptError + return "", util.InterruptError } targetType := targetTypeModel.(ui.SelectModel).GetSelectedItem() if targetType == nil { - return TargetTypeUnknown, errors.New("no export target selected") + return "", errors.New("no export target selected") } - return targetType.(TargetType), nil + return targetType.(export.ExportTargetTypeEnum), nil } -func GetSelectedAuthType(target TargetType) (_ AuthType, err error) { +func GetSelectedAuthType(target export.ExportTargetTypeEnum) (_ string, err error) { var model *ui.SelectModel switch target { - case TargetTypeS3: + case export.EXPORTTARGETTYPEENUM_S3: authTypes := make([]interface{}, 0, 2) - authTypes = append(authTypes, AuthTypeS3RoleArn, AuthTypeS3AccessKey) + authTypes = append(authTypes, string(export.EXPORTS3AUTHTYPEENUM_ROLE_ARN), string(export.EXPORTS3AUTHTYPEENUM_ACCESS_KEY)) model, err = ui.InitialSelectModel(authTypes, "Choose and input the S3 auth:") if err != nil { return "", errors.Trace(err) } - case TargetTypeGCS: - return AuthTypeGCSServiceAccountKey, nil - case TargetTypeAZBLOB: - return AuthTypeAzBlobSasToken, nil - case TargetTypeLOCAL: + case export.EXPORTTARGETTYPEENUM_GCS: + return string(export.EXPORTGCSAUTHTYPEENUM_SERVICE_ACCOUNT_KEY), nil + case export.EXPORTTARGETTYPEENUM_AZURE_BLOB: + return string(export.EXPORTAZUREBLOBAUTHTYPEENUM_SAS_TOKEN), nil + case export.EXPORTTARGETTYPEENUM_LOCAL: return "", nil } if model == nil { @@ -119,19 +120,19 @@ func GetSelectedAuthType(target TargetType) (_ AuthType, err error) { if authType == nil { return "", errors.New("no auth type selected") } - return authType.(AuthType), nil + return authType.(string), nil } -func GetSelectedFileType(filterType FilterType) (_ FileType, err error) { +func GetSelectedFileType(filterType FilterType) (_ export.ExportFileTypeEnum, err error) { var model *ui.SelectModel switch filterType { case FilterSQL: fileTypes := make([]interface{}, 0, 2) - fileTypes = append(fileTypes, FileTypeCSV, FileTypePARQUET) + fileTypes = append(fileTypes, export.EXPORTFILETYPEENUM_CSV, export.EXPORTFILETYPEENUM_PARQUET) model, err = ui.InitialSelectModel(fileTypes, "Choose the exported file type:") default: fileTypes := make([]interface{}, 0, 3) - fileTypes = append(fileTypes, FileTypeSQL, FileTypeCSV, FileTypePARQUET) + fileTypes = append(fileTypes, export.EXPORTFILETYPEENUM_SQL, export.EXPORTFILETYPEENUM_CSV, export.EXPORTFILETYPEENUM_PARQUET) model, err = ui.InitialSelectModel(fileTypes, "Choose the exported file type:") } if err != nil { @@ -144,18 +145,18 @@ func GetSelectedFileType(filterType FilterType) (_ FileType, err error) { return "", errors.Trace(err) } if m, _ := fileTypeModel.(ui.SelectModel); m.Interrupted { - return FileTypeUnknown, util.InterruptError + return "", util.InterruptError } fileType := fileTypeModel.(ui.SelectModel).GetSelectedItem() if fileType == nil { return "", errors.New("no export file type selected") } - return fileType.(FileType), nil + return fileType.(export.ExportFileTypeEnum), nil } -func GetSelectedCompression() (string, error) { - compressions := make([]interface{}, 0, 4) - compressions = append(compressions, "SNAPPY", "ZSTD", "NONE") +func GetSelectedCompression() (export.ExportCompressionTypeEnum, error) { + compressions := make([]interface{}, 0, 3) + compressions = append(compressions, export.EXPORTCOMPRESSIONTYPEENUM_SNAPPY, export.EXPORTCOMPRESSIONTYPEENUM_ZSTD, export.EXPORTCOMPRESSIONTYPEENUM_NONE) model, err := ui.InitialSelectModel(compressions, "Choose the compression algorithm:") if err != nil { return "", errors.Trace(err) @@ -173,7 +174,7 @@ func GetSelectedCompression() (string, error) { if compression == nil { return "", errors.New("no compression algorithm selected") } - return compression.(string), nil + return compression.(export.ExportCompressionTypeEnum), nil } type FilterType string