diff --git a/internal/cli/serverless/export/cancel.go b/internal/cli/serverless/export/cancel.go index df893443..98f3ad45 100644 --- a/internal/cli/serverless/export/cancel.go +++ b/internal/cli/serverless/export/cancel.go @@ -127,7 +127,7 @@ func CancelCmd(h *internal.Helper) *cobra.Command { if !force { if !h.IOStreams.CanPrompt { - return fmt.Errorf("the terminal doesn't support prompt, please run with --force to delete the branch") + return fmt.Errorf("the terminal doesn't support prompt, please run with --force to cancel the export") } confirmationMessage := fmt.Sprintf("%s %s %s", color.BlueString("Please type"), color.HiBlueString(confirmed), color.BlueString("to confirm:")) diff --git a/internal/cli/serverless/export/cancel_test.go b/internal/cli/serverless/export/cancel_test.go new file mode 100644 index 00000000..e8ee95e7 --- /dev/null +++ b/internal/cli/serverless/export/cancel_test.go @@ -0,0 +1,113 @@ +// Copyright 2024 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package export + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "os" + "testing" + + "tidbcloud-cli/internal" + "tidbcloud-cli/internal/iostream" + "tidbcloud-cli/internal/mock" + "tidbcloud-cli/internal/service/cloud" + "tidbcloud-cli/pkg/tidbcloud/v1beta1/serverless/export" + + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" +) + +type CancelExportSuite struct { + suite.Suite + h *internal.Helper + mockClient *mock.TiDBCloudClient +} + +func (suite *CancelExportSuite) SetupTest() { + if err := os.Setenv("NO_COLOR", "true"); err != nil { + suite.T().Error(err) + } + + var pageSize int64 = 10 + suite.mockClient = new(mock.TiDBCloudClient) + suite.h = &internal.Helper{ + Client: func() (cloud.TiDBCloudClient, error) { + return suite.mockClient, nil + }, + QueryPageSize: pageSize, + IOStreams: iostream.Test(), + } +} + +func (suite *DescribeExportSuite) TestCancelExportArgs() { + assert := require.New(suite.T()) + ctx := context.Background() + + body := &export.Export{} + err := json.Unmarshal([]byte(getExportResp), body) + assert.Nil(err) + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + suite.mockClient.On("CancelExport", ctx, clusterId, exportId).Return(body, nil) + + tests := []struct { + name string + args []string + err error + stdoutString string + stderrString string + }{ + { + name: "cancel export success", + args: []string{"-c", clusterId, "-e", exportId, "--force"}, + stdoutString: "export fake-export-id canceled\n", + }, + { + name: "cancel export without force", + args: []string{"-c", clusterId, "-e", exportId}, + err: fmt.Errorf("the terminal doesn't support prompt, please run with --force to cancel the export"), + }, + { + name: "cancel export without required cluster id", + args: []string{"-e", exportId}, + err: fmt.Errorf("required flag(s) \"cluster-id\" not set"), + }, + } + + for _, tt := range tests { + suite.T().Run(tt.name, func(t *testing.T) { + cmd := CancelCmd(suite.h) + cmd.SetContext(ctx) + suite.h.IOStreams.Out.(*bytes.Buffer).Reset() + suite.h.IOStreams.Err.(*bytes.Buffer).Reset() + cmd.SetArgs(tt.args) + err = cmd.Execute() + assert.Equal(tt.err, err) + + assert.Equal(tt.stdoutString, suite.h.IOStreams.Out.(*bytes.Buffer).String()) + assert.Equal(tt.stderrString, suite.h.IOStreams.Err.(*bytes.Buffer).String()) + if tt.err == nil { + suite.mockClient.AssertExpectations(suite.T()) + } + }) + } +} + +func TestCancelExportSuite(t *testing.T) { + suite.Run(t, new(CancelExportSuite)) +} diff --git a/internal/cli/serverless/export/create.go b/internal/cli/serverless/export/create.go index 3b93b668..521fb4f0 100644 --- a/internal/cli/serverless/export/create.go +++ b/internal/cli/serverless/export/create.go @@ -72,8 +72,9 @@ const ( CSVSeparatorDefaultValue = "," CSVDelimiterDefaultValue = "\"" CSVNullValueDefaultValue = "\\N" - CompressionDefaultValue = "GZIP" - ParquetCompressionDefaultValue = "ZSTD" + CSVSkipHeaderDefaultValue = false + CompressionDefaultValue = export.EXPORTCOMPRESSIONTYPEENUM_GZIP + ParquetCompressionDefaultValue = export.EXPORTPARQUETCOMPRESSIONTYPEENUM_ZSTD ) type CreateOpts struct { @@ -557,10 +558,10 @@ func CreateCmd(h *internal.Helper) *cobra.Command { // apply default values if strings.ToUpper(fileType) == string(FileTypePARQUET) { if parquetCompression == "" { - parquetCompression = ParquetCompressionDefaultValue + parquetCompression = string(ParquetCompressionDefaultValue) } } else if compression == "" { - compression = CompressionDefaultValue + compression = string(CompressionDefaultValue) } // build param to create export fileTypeEnum := export.ExportFileTypeEnum(strings.ToUpper(fileType)) @@ -668,7 +669,7 @@ func CreateCmd(h *internal.Helper) *cobra.Command { createCmd.Flags().String(flag.CSVDelimiter, CSVDelimiterDefaultValue, "Delimiter of string type variables in CSV files.") createCmd.Flags().String(flag.CSVSeparator, CSVSeparatorDefaultValue, "Separator of each value in CSV files.") createCmd.Flags().String(flag.CSVNullValue, CSVNullValueDefaultValue, "Representation of null values in CSV files.") - createCmd.Flags().Bool(flag.CSVSkipHeader, false, "Export CSV files of the tables without header.") + createCmd.Flags().Bool(flag.CSVSkipHeader, CSVSkipHeaderDefaultValue, "Export CSV files of the tables without header.") createCmd.Flags().String(flag.S3RoleArn, "", "The role arn of the S3. You only need to set one of the s3.role-arn and [s3.access-key-id, s3.secret-access-key].") createCmd.Flags().String(flag.GCSURI, "", "The GCS URI in gcs:/// format. Required when target type is GCS.") createCmd.Flags().String(flag.GCSServiceAccountKey, "", "The base64 encoded service account key of GCS.") diff --git a/internal/cli/serverless/export/create_test.go b/internal/cli/serverless/export/create_test.go new file mode 100644 index 00000000..fc824348 --- /dev/null +++ b/internal/cli/serverless/export/create_test.go @@ -0,0 +1,434 @@ +// Copyright 2024 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package export + +import ( + "bytes" + "context" + "errors" + "fmt" + "os" + "testing" + + "tidbcloud-cli/internal" + "tidbcloud-cli/internal/iostream" + "tidbcloud-cli/internal/mock" + "tidbcloud-cli/internal/service/cloud" + "tidbcloud-cli/pkg/tidbcloud/v1beta1/serverless/export" + + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" +) + +type CreateExportSuite struct { + suite.Suite + h *internal.Helper + mockClient *mock.TiDBCloudClient +} + +func (suite *CreateExportSuite) SetupTest() { + if err := os.Setenv("NO_COLOR", "true"); err != nil { + suite.T().Error(err) + } + + var pageSize int64 = 10 + suite.mockClient = new(mock.TiDBCloudClient) + suite.h = &internal.Helper{ + Client: func() (cloud.TiDBCloudClient, error) { + return suite.mockClient, nil + }, + QueryPageSize: pageSize, + IOStreams: iostream.Test(), + } +} + +func (suite *CreateExportSuite) TestCreateExportToLocal() { + ctx := context.Background() + + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + + suite.mockClient.On("CreateExport", ctx, clusterId, getDefaultCreateExportBody()). + Return(&export.Export{ + ExportId: &exportId, + }, nil) + + tests := []Test{ + { + name: "export all data to local with force", + args: []string{"-c", clusterId, "--force"}, + stdoutString: fmt.Sprintf("export %s is running now\n", exportId), + }, + { + name: "export all data to local without force", + args: []string{"-c", clusterId}, + err: errors.New("the terminal doesn't support prompt, please run with --force to create export"), + }, + } + suite.AssertTest(ctx, tests) +} + +func (suite *CreateExportSuite) TestCreateExportToS3WithRoleArn() { + ctx := context.Background() + + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + targetType := export.EXPORTTARGETTYPEENUM_S3 + uri := "s3://fake-bucket/fake-prefix" + roleArn := "arn:aws:iam::123456789012:role/service-role/AmazonS3FullAccess" + + body := getDefaultCreateExportBody() + body.Target = &export.ExportTarget{ + Type: &targetType, + S3: &export.S3Target{ + Uri: &uri, + AuthType: export.EXPORTS3AUTHTYPEENUM_ROLE_ARN, + RoleArn: &roleArn, + }, + } + suite.mockClient.On("CreateExport", ctx, clusterId, body). + Return(&export.Export{ + ExportId: &exportId, + }, nil) + + tests := []Test{ + { + name: "export all data to s3 using role arn", + args: []string{"-c", clusterId, "--target-type", "S3", "--s3.uri", uri, "--s3.role-arn", roleArn, "--force"}, + stdoutString: fmt.Sprintf("export %s is running now\n", exportId), + }, + { + name: "export all data to s3 without uri", + args: []string{"-c", clusterId, "--target-type", "S3", "--s3.role-arn", roleArn, "--force"}, + err: errors.New("S3 URI is required when target type is S3"), + }, + { + name: "export all data to s3 without auth", + args: []string{"-c", clusterId, "--target-type", "S3", "--s3.uri", uri, "--force"}, + err: errors.New("missing S3 auth information, require either role arn or access key id and secret access key"), + }, + } + + suite.AssertTest(ctx, tests) +} + +func (suite *CreateExportSuite) TestCreateExportToS3WithAccessKey() { + ctx := context.Background() + + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + targetType := export.EXPORTTARGETTYPEENUM_S3 + uri := "s3://fake-bucket/fake-prefix" + accessKeyId := "fake-id" + secretAccess := "fake-secret" + + body := getDefaultCreateExportBody() + body.Target = &export.ExportTarget{ + Type: &targetType, + S3: &export.S3Target{ + Uri: &uri, + AuthType: export.EXPORTS3AUTHTYPEENUM_ACCESS_KEY, + AccessKey: &export.S3TargetAccessKey{ + Id: accessKeyId, + Secret: secretAccess, + }, + }, + } + suite.mockClient.On("CreateExport", ctx, clusterId, body). + Return(&export.Export{ + ExportId: &exportId, + }, nil) + + tests := []Test{ + { + name: "export all data to s3 using access key", + args: []string{"-c", clusterId, "--target-type", "S3", "--s3.uri", uri, "--s3.access-key-id", accessKeyId, "--s3.secret-access-key", secretAccess, "--force"}, + stdoutString: fmt.Sprintf("export %s is running now\n", exportId), + }, + } + suite.AssertTest(ctx, tests) +} + +func (suite *CreateExportSuite) TestCreateExportToGCS() { + ctx := context.Background() + + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + targetType := export.EXPORTTARGETTYPEENUM_GCS + uri := "s3://fake-bucket/fake-prefix" + serviceAccountKey := "fake-service-account-key" + + body := getDefaultCreateExportBody() + body.Target = &export.ExportTarget{ + Type: &targetType, + Gcs: &export.GCSTarget{ + Uri: uri, + AuthType: export.EXPORTGCSAUTHTYPEENUM_SERVICE_ACCOUNT_KEY, + ServiceAccountKey: &serviceAccountKey, + }, + } + suite.mockClient.On("CreateExport", ctx, clusterId, body). + Return(&export.Export{ + ExportId: &exportId, + }, nil) + + tests := []Test{ + { + name: "export all data to gcs", + args: []string{"-c", clusterId, "--target-type", "GCS", "--gcs.uri", uri, "--gcs.service-account-key", serviceAccountKey, "--force"}, + stdoutString: fmt.Sprintf("export %s is running now\n", exportId), + }, + { + name: "export all data to gcs without auth", + args: []string{"-c", clusterId, "--target-type", "GCS", "--gcs.uri", uri, "--force"}, + err: errors.New("GCS service account key is required when target type is GCS"), + }, + { + name: "export all data to gcs without uri", + args: []string{"-c", clusterId, "--target-type", "GCS", "--gcs.service-account-key", serviceAccountKey, "--force"}, + err: errors.New("GCS URI is required when target type is GCS"), + }, + } + suite.AssertTest(ctx, tests) +} + +func (suite *CreateExportSuite) TestCreateExportToAzure() { + ctx := context.Background() + + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + targetType := export.EXPORTTARGETTYPEENUM_AZURE_BLOB + uri := "s3://fake-bucket/fake-prefix" + sasToken := "fake-sas-token" + + body := getDefaultCreateExportBody() + body.Target = &export.ExportTarget{ + Type: &targetType, + AzureBlob: &export.AzureBlobTarget{ + Uri: uri, + AuthType: export.EXPORTAZUREBLOBAUTHTYPEENUM_SAS_TOKEN, + SasToken: &sasToken, + }, + } + suite.mockClient.On("CreateExport", ctx, clusterId, body). + Return(&export.Export{ + ExportId: &exportId, + }, nil) + + tests := []Test{ + { + name: "export all data to azure blob", + args: []string{"-c", clusterId, "--target-type", "AZURE_BLOB", "--azblob.uri", uri, "--azblob.sas-token", sasToken, "--force"}, + stdoutString: fmt.Sprintf("export %s is running now\n", exportId), + }, + { + name: "export all data to azure blob without auth", + args: []string{"-c", clusterId, "--target-type", "AZURE_BLOB", "--azblob.uri", uri, "--force"}, + err: errors.New("Azure Blob SAS token is required when target type is AZURE_BLOB"), + }, + { + name: "export all data to azure blob without uri", + args: []string{"-c", clusterId, "--target-type", "AZURE_BLOB", "--azblob.sas-token", sasToken, "--force"}, + err: errors.New("Azure Blob URI is required when target type is AZURE_BLOB"), + }, + } + suite.AssertTest(ctx, tests) +} + +func (suite *CreateExportSuite) TestCreateExportWithSQLFile() { + ctx := context.Background() + + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + fileType := export.EXPORTFILETYPEENUM_SQL + + body := getDefaultCreateExportBody() + body.ExportOptions.FileType = &fileType + body.ExportOptions.CsvFormat = nil + + suite.mockClient.On("CreateExport", ctx, clusterId, body). + Return(&export.Export{ + ExportId: &exportId, + }, nil) + + tests := []Test{ + { + name: "export all data to sql file", + args: []string{"-c", clusterId, "--file-type", "SQL", "--force"}, + stdoutString: fmt.Sprintf("export %s is running now\n", exportId), + }, + } + suite.AssertTest(ctx, tests) +} + +func (suite *CreateExportSuite) TestCreateExportWithParquetFile() { + ctx := context.Background() + + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + fileType := export.EXPORTFILETYPEENUM_PARQUET + parquetCompression := export.EXPORTPARQUETCOMPRESSIONTYPEENUM_ZSTD + + body := getDefaultCreateExportBody() + body.ExportOptions.FileType = &fileType + body.ExportOptions.Compression = nil + body.ExportOptions.CsvFormat = nil + body.ExportOptions.ParquetFormat = &export.ExportOptionsParquetFormat{ + Compression: &parquetCompression, + } + + suite.mockClient.On("CreateExport", ctx, clusterId, body). + Return(&export.Export{ + ExportId: &exportId, + }, nil) + + tests := []Test{ + { + name: "export all data to parquet file", + args: []string{"-c", clusterId, "--file-type", "PARQUET", "--force"}, + stdoutString: fmt.Sprintf("export %s is running now\n", exportId), + }, + { + name: "export all data to parquet file with compression", + args: []string{"-c", clusterId, "--file-type", "PARQUET", "--compression", "GZIP", "--force"}, + err: errors.New("--compression is not supported when file type is parquet, please use --parquet.compression instead"), + }, + } + suite.AssertTest(ctx, tests) +} + +func (suite *CreateExportSuite) TestCreateExportWithSQLFilter() { + ctx := context.Background() + + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + sql := "SELECT * FROM db.table WHERE column = 'value'" + + body := getDefaultCreateExportBody() + body.ExportOptions.Filter = &export.ExportOptionsFilter{ + Sql: &sql, + } + + suite.mockClient.On("CreateExport", ctx, clusterId, body). + Return(&export.Export{ + ExportId: &exportId, + }, nil) + + tests := []Test{ + { + name: "export data with sql filter", + args: []string{"-c", clusterId, "--sql", sql}, + stdoutString: fmt.Sprintf("export %s is running now\n", exportId), + }, + } + suite.AssertTest(ctx, tests) +} + +func (suite *CreateExportSuite) TestCreateExportWithTableFilter() { + ctx := context.Background() + + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + where := "column = 'value'" + pattern1 := "db.t\\.able" + pattern2 := "db.table" + + body := getDefaultCreateExportBody() + body.ExportOptions.Filter = &export.ExportOptionsFilter{ + Table: &export.ExportOptionsFilterTable{ + Patterns: []string{pattern1, pattern2}, + Where: &where, + }, + } + + suite.mockClient.On("CreateExport", ctx, clusterId, body). + Return(&export.Export{ + ExportId: &exportId, + }, nil) + + tests := []Test{ + { + name: "export data with table filter", + args: []string{"-c", clusterId, "--where", where, "--filter", pattern1, "--filter", pattern2}, + stdoutString: fmt.Sprintf("export %s is running now\n", exportId), + }, + { + name: "export data with table filter2", + args: []string{"-c", clusterId, "--where", where, "--filter", fmt.Sprintf("%s,%s", pattern1, pattern2)}, + stdoutString: fmt.Sprintf("export %s is running now\n", exportId), + }, + } + suite.AssertTest(ctx, tests) +} + +func getDefaultCreateExportBody() *export.ExportServiceCreateExportBody { + defaultFileType := export.EXPORTFILETYPEENUM_CSV + defaultTargetType := export.EXPORTTARGETTYPEENUM_LOCAL + defaultCompression := export.EXPORTCOMPRESSIONTYPEENUM_GZIP + separatorDefaultValue := "," + delimiterDefaultValue := "\"" + nullValueDefaultValue := "\\N" + skipHeaderDefaultValue := false + return &export.ExportServiceCreateExportBody{ + ExportOptions: &export.ExportOptions{ + FileType: &defaultFileType, + Compression: &defaultCompression, + CsvFormat: &export.ExportOptionsCSVFormat{ + Separator: &separatorDefaultValue, + Delimiter: *export.NewNullableString(&delimiterDefaultValue), + NullValue: *export.NewNullableString(&nullValueDefaultValue), + SkipHeader: &skipHeaderDefaultValue, + }, + }, + Target: &export.ExportTarget{ + Type: &defaultTargetType, + }, + } +} + +func TestCreateExportSuite(t *testing.T) { + suite.Run(t, new(CreateExportSuite)) +} + +func (suite *CreateExportSuite) AssertTest(ctx context.Context, tests []Test) { + assert := require.New(suite.T()) + for _, tt := range tests { + suite.T().Run(tt.name, func(t *testing.T) { + cmd := CreateCmd(suite.h) + cmd.SetContext(ctx) + suite.h.IOStreams.Out.(*bytes.Buffer).Reset() + suite.h.IOStreams.Err.(*bytes.Buffer).Reset() + cmd.SetArgs(tt.args) + err := cmd.Execute() + if err != nil { + assert.Equal(tt.err.Error(), err.Error()) + } + + assert.Equal(tt.stdoutString, suite.h.IOStreams.Out.(*bytes.Buffer).String()) + assert.Equal(tt.stderrString, suite.h.IOStreams.Err.(*bytes.Buffer).String()) + if tt.err == nil { + suite.mockClient.AssertExpectations(suite.T()) + } + }) + } +} + +type Test struct { + name string + args []string + err error + stdoutString string + stderrString string +} diff --git a/internal/cli/serverless/export/describe_test.go b/internal/cli/serverless/export/describe_test.go new file mode 100644 index 00000000..5baa9e57 --- /dev/null +++ b/internal/cli/serverless/export/describe_test.go @@ -0,0 +1,151 @@ +// Copyright 2024 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package export + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "os" + "testing" + "tidbcloud-cli/pkg/tidbcloud/v1beta1/serverless/export" + + "tidbcloud-cli/internal" + "tidbcloud-cli/internal/iostream" + "tidbcloud-cli/internal/mock" + "tidbcloud-cli/internal/service/cloud" + + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" +) + +const getExportResp = `{ + "clusterId": "fake-cluster-id", + "completeTime": "2024-09-02T11:31:13Z", + "createTime": "2024-09-02T11:31:01Z", + "createdBy": "apikey-MCTGR3Jv", + "displayName": "SNAPSHOT_2024-09-02T11:30:57Z", + "expireTime": "2024-09-04T11:31:13Z", + "exportId": "fake-export-id", + "exportOptions": { + "compression": "GZIP", + "csvFormat": { + "delimiter": "\"", + "nullValue": "\\N", + "separator": ",", + "skipHeader": false + }, + "database": "", + "fileType": "CSV", + "filter": { + "table": { + "patterns": [ + "test.t1" + ], + "where": "" + } + }, + "table": "" + }, + "name": "clusters/fake-cluster-id/exports/fake-export-id", + "snapshotTime": "2024-09-02T11:30:57.571Z", + "state": "SUCCEEDED", + "target": { + "type": "LOCAL" + }, + "updateTime": "2024-09-02T11:31:39Z" +} +` + +type DescribeExportSuite struct { + suite.Suite + h *internal.Helper + mockClient *mock.TiDBCloudClient +} + +func (suite *DescribeExportSuite) SetupTest() { + if err := os.Setenv("NO_COLOR", "true"); err != nil { + suite.T().Error(err) + } + + var pageSize int64 = 10 + suite.mockClient = new(mock.TiDBCloudClient) + suite.h = &internal.Helper{ + Client: func() (cloud.TiDBCloudClient, error) { + return suite.mockClient, nil + }, + QueryPageSize: pageSize, + IOStreams: iostream.Test(), + } +} + +func (suite *DescribeExportSuite) TestDescribeExportArgs() { + assert := require.New(suite.T()) + ctx := context.Background() + + body := &export.Export{} + err := json.Unmarshal([]byte(getExportResp), body) + assert.Nil(err) + clusterId := "fake-cluster-id" + exportId := "fake-export-id" + suite.mockClient.On("GetExport", ctx, clusterId, exportId).Return(body, nil) + + tests := []struct { + name string + args []string + err error + stdoutString string + stderrString string + }{ + { + name: "describe export success", + args: []string{"--cluster-id", clusterId, "--export-id", exportId}, + stdoutString: getExportResp, + }, + { + name: "describe export with shorthand flag", + args: []string{"-c", clusterId, "-e", exportId}, + stdoutString: getExportResp, + }, + { + name: "describe export without required cluster id", + args: []string{"-e", exportId}, + err: fmt.Errorf("required flag(s) \"cluster-id\" not set"), + }, + } + + for _, tt := range tests { + suite.T().Run(tt.name, func(t *testing.T) { + cmd := DescribeCmd(suite.h) + cmd.SetContext(ctx) + suite.h.IOStreams.Out.(*bytes.Buffer).Reset() + suite.h.IOStreams.Err.(*bytes.Buffer).Reset() + cmd.SetArgs(tt.args) + err = cmd.Execute() + assert.Equal(tt.err, err) + + assert.Equal(tt.stdoutString, suite.h.IOStreams.Out.(*bytes.Buffer).String()) + assert.Equal(tt.stderrString, suite.h.IOStreams.Err.(*bytes.Buffer).String()) + if tt.err == nil { + suite.mockClient.AssertExpectations(suite.T()) + } + }) + } +} + +func TestDescribeExportSuite(t *testing.T) { + suite.Run(t, new(DescribeExportSuite)) +} diff --git a/internal/cli/serverless/export/list_test.go b/internal/cli/serverless/export/list_test.go new file mode 100644 index 00000000..8364b2bc --- /dev/null +++ b/internal/cli/serverless/export/list_test.go @@ -0,0 +1,239 @@ +// Copyright 2024 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package export + +import ( + "bytes" + "context" + "encoding/json" + "os" + "strings" + "testing" + + "tidbcloud-cli/internal" + "tidbcloud-cli/internal/iostream" + "tidbcloud-cli/internal/mock" + "tidbcloud-cli/internal/service/cloud" + "tidbcloud-cli/pkg/tidbcloud/v1beta1/serverless/export" + + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" +) + +const listResultStr = `{ + "exports": [ + { + "clusterId": "10289717998856001017", + "completeTime": "2024-09-04T04:45:41Z", + "createTime": "2024-09-04T04:45:27Z", + "createdBy": "apikey-MCTGR3Jv", + "displayName": "SNAPSHOT_2024-09-04T04:45:23Z", + "expireTime": "2024-09-06T04:45:41Z", + "exportId": "exp-q6j5hwy7vzhhfhlx3ilxqti3ay", + "exportOptions": { + "compression": "GZIP", + "database": "*", + "fileType": "SQL", + "table": "*" + }, + "name": "clusters/10289717998856001017/exports/exp-q6j5hwy7vzhhfhlx3ilxqti3ay", + "snapshotTime": "2024-09-04T04:45:23.189Z", + "state": "SUCCEEDED", + "target": { + "type": "LOCAL" + }, + "updateTime": "2024-09-04T04:46:44Z" + } + ], + "totalSize": 1 +} +` + +const listResultMultiPageStr = `{ + "exports": [ + { + "clusterId": "10289717998856001017", + "completeTime": "2024-09-04T04:45:41Z", + "createTime": "2024-09-04T04:45:27Z", + "createdBy": "apikey-MCTGR3Jv", + "displayName": "SNAPSHOT_2024-09-04T04:45:23Z", + "expireTime": "2024-09-06T04:45:41Z", + "exportId": "exp-q6j5hwy7vzhhfhlx3ilxqti3ay", + "exportOptions": { + "compression": "GZIP", + "database": "*", + "fileType": "SQL", + "table": "*" + }, + "name": "clusters/10289717998856001017/exports/exp-q6j5hwy7vzhhfhlx3ilxqti3ay", + "snapshotTime": "2024-09-04T04:45:23.189Z", + "state": "SUCCEEDED", + "target": { + "type": "LOCAL" + }, + "updateTime": "2024-09-04T04:46:44Z" + }, + { + "clusterId": "10289717998856001017", + "completeTime": "2024-09-04T04:45:41Z", + "createTime": "2024-09-04T04:45:27Z", + "createdBy": "apikey-MCTGR3Jv", + "displayName": "SNAPSHOT_2024-09-04T04:45:23Z", + "expireTime": "2024-09-06T04:45:41Z", + "exportId": "exp-q6j5hwy7vzhhfhlx3ilxqti3ay", + "exportOptions": { + "compression": "GZIP", + "database": "*", + "fileType": "SQL", + "table": "*" + }, + "name": "clusters/10289717998856001017/exports/exp-q6j5hwy7vzhhfhlx3ilxqti3ay", + "snapshotTime": "2024-09-04T04:45:23.189Z", + "state": "SUCCEEDED", + "target": { + "type": "LOCAL" + }, + "updateTime": "2024-09-04T04:46:44Z" + } + ], + "totalSize": 2 +} +` + +type ListExportsSuite struct { + suite.Suite + h *internal.Helper + mockClient *mock.TiDBCloudClient +} + +func (suite *ListExportsSuite) SetupTest() { + if err := os.Setenv("NO_COLOR", "true"); err != nil { + suite.T().Error(err) + } + + var pageSize int64 = 10 + suite.mockClient = new(mock.TiDBCloudClient) + suite.h = &internal.Helper{ + Client: func() (cloud.TiDBCloudClient, error) { + return suite.mockClient, nil + }, + QueryPageSize: pageSize, + IOStreams: iostream.Test(), + } +} + +func (suite *ListExportsSuite) TestListExportsArgs() { + assert := require.New(suite.T()) + pageSize := int32(suite.h.QueryPageSize) + orderBy := "create_time desc" + ctx := context.Background() + + body := &export.ListExportsResponse{} + err := json.Unmarshal([]byte(listResultStr), body) + assert.Nil(err) + clusterID := "fake-cluster-id" + suite.mockClient.On("ListExports", ctx, clusterID, &pageSize, (*string)(nil), &orderBy).Return(body, nil) + + tests := []struct { + name string + args []string + err error + stdoutString string + stderrString string + }{ + { + name: "list exports with default format(json when without tty)", + args: []string{"--cluster-id", clusterID}, + stdoutString: listResultStr, + }, + { + name: "list exports with output flag", + args: []string{"--cluster-id", clusterID, "-o", "json"}, + stdoutString: listResultStr, + }, + } + + for _, tt := range tests { + suite.T().Run(tt.name, func(t *testing.T) { + cmd := ListCmd(suite.h) + cmd.SetContext(ctx) + suite.h.IOStreams.Out.(*bytes.Buffer).Reset() + suite.h.IOStreams.Err.(*bytes.Buffer).Reset() + cmd.SetArgs(tt.args) + err = cmd.Execute() + assert.Equal(tt.err, err) + + assert.Equal(tt.stdoutString, suite.h.IOStreams.Out.(*bytes.Buffer).String()) + assert.Equal(tt.stderrString, suite.h.IOStreams.Err.(*bytes.Buffer).String()) + if tt.err == nil { + suite.mockClient.AssertExpectations(suite.T()) + } + }) + } +} + +func (suite *ListExportsSuite) TestListExportsWithMultiPages() { + assert := require.New(suite.T()) + ctx := context.Background() + // mock first page + pageSize := int32(suite.h.QueryPageSize) + pageToken := "2" + orderBy := "create_time desc" + body := &export.ListExportsResponse{} + err := json.Unmarshal([]byte(strings.ReplaceAll(listResultStr, `"totalSize": 1`, `"totalSize": 2`)), body) + assert.Nil(err) + body.NextPageToken = &pageToken + + clusterID := "fake-cluster-id" + suite.mockClient.On("ListExports", ctx, clusterID, &pageSize, (*string)(nil), &orderBy).Return(body, nil) + + body2 := &export.ListExportsResponse{} + err = json.Unmarshal([]byte(strings.ReplaceAll(listResultStr, `"totalSize": 1`, `"totalSize": 2`)), body2) + assert.Nil(err) + suite.mockClient.On("ListExports", ctx, clusterID, &pageSize, &pageToken, &orderBy).Return(body2, nil) + + cmd := ListCmd(suite.h) + tests := []struct { + name string + args []string + stdoutString string + stderrString string + }{ + { + name: "query with multi pages", + args: []string{"--cluster-id", clusterID, "--output", "json"}, + stdoutString: listResultMultiPageStr, + }, + } + + for _, tt := range tests { + suite.T().Run(tt.name, func(t *testing.T) { + cmd.SetContext(ctx) + suite.h.IOStreams.Out.(*bytes.Buffer).Reset() + suite.h.IOStreams.Err.(*bytes.Buffer).Reset() + cmd.SetArgs(tt.args) + err = cmd.Execute() + assert.Nil(err) + + assert.Equal(tt.stdoutString, suite.h.IOStreams.Out.(*bytes.Buffer).String()) + assert.Equal(tt.stderrString, suite.h.IOStreams.Err.(*bytes.Buffer).String()) + suite.mockClient.AssertExpectations(suite.T()) + }) + } +} + +func TestListExportSuite(t *testing.T) { + suite.Run(t, new(ListExportsSuite)) +}