From 96aed59e2dcf139b23579c1357e79c38cf53765c Mon Sep 17 00:00:00 2001 From: andrzejmikolajczakds Date: Fri, 28 Jun 2024 13:12:39 +0200 Subject: [PATCH 1/2] feat add support for different s3 providers Change regarding https://github.com/st-tech/gatling-operator/issues/122 --- ...tling-operator.tech.zozo.com_gatlings.yaml | 2 +- docs/user-guide.md | 55 +++++++++++ pkg/cloudstorages/aws.go | 65 ------------- pkg/cloudstorages/cloudstorage.go | 4 +- pkg/cloudstorages/cloudstorage_test.go | 14 ++- pkg/cloudstorages/s3.go | 91 +++++++++++++++++++ pkg/cloudstorages/{aws_test.go => s3_test.go} | 44 +++++---- 7 files changed, 191 insertions(+), 84 deletions(-) delete mode 100644 pkg/cloudstorages/aws.go create mode 100644 pkg/cloudstorages/s3.go rename pkg/cloudstorages/{aws_test.go => s3_test.go} (78%) diff --git a/config/crd/bases/gatling-operator.tech.zozo.com_gatlings.yaml b/config/crd/bases/gatling-operator.tech.zozo.com_gatlings.yaml index e72e55a..f1ee633 100644 --- a/config/crd/bases/gatling-operator.tech.zozo.com_gatlings.yaml +++ b/config/crd/bases/gatling-operator.tech.zozo.com_gatlings.yaml @@ -176,7 +176,7 @@ spec: type: array provider: description: '(Required) Provider specifies the cloud provider - that will be used. Supported providers: `aws`, `gcp`, and `azure`' + that will be used. Supported providers: `aws`, `s3`, `gcp`, and `azure`' type: string region: description: (Optional) Region Name. diff --git a/docs/user-guide.md b/docs/user-guide.md index 27ae18c..e698321 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -22,6 +22,7 @@ - [Parallel Number of Gatling Load Testing](#parallel-number-of-gatling-load-testing) - [Configure Cloud Storage Provider](#configure-cloud-storage-provider) - [Set Amazon S3 as Cloud Storage](#set-amazon-s3-as-cloud-storage) + - [Set Different from Amazon S3 Provider as Cloud Storage](#set-different-from-amazon-s3-provider-as-cloud-storage) - [Set Google Cloud Storage as Cloud Storage](#set-google-cloud-storage-as-cloud-storage) - [Set Azure Blob Storage as Cloud Storage](#set-azure-blob-storage-as-cloud-storage) - [Configure Notification Service Provider](#configure-notification-service-provider) @@ -657,6 +658,60 @@ Here is an IAM policy to attach for Gatling Pod to interact with Amazon S3 bucke - Replace `BUCKET_NAME` above with your bucket name - To know more about the ways to supply rclone with a set of AWS credentials, please check [this](https://rclone.org/s3/#configuration). +#### Set Different from Amazon S3 Provider as Cloud Storage + +This section provides guidance on setting up any cloud storage provider that supports the S3 API. +In this example suppose you want to store Gatling reports to a bucket named `gatling-operator-reports` in OHV's S3 provider, specifically in the `de` region. +You configure each fields in `.spec.cloudStorageSpec` and set `RCLONE_S3_ENDPOINT` env like this: + +```yaml +apiVersion: gatling-operator.tech.zozo.com/v1alpha1 +kind: Gatling +metadata: + name: gatling-sample +spec: + cloudStorageSpec: + provider: "s3" + bucket: "gatling-operator-reports" + region: "de" + env: + - name: RCLONE_S3_ENDPOINT + value: https://s3.de.io.cloud.ovh.net +...omit... +``` + +However, this is not enough. You must supply Gatling Pod (both Gatling Runner Pod and Gatling Reporter Pod) with credentials to access S3 bucket. Strictly speaking, [rclone](https://rclone.org/) container in Gatling Pod interacts with S3 bucket, thus you need to supply rclone with credentials. + +Below is shown how to set S3 credentials via environment variables: + +```yaml +...omit... + cloudStorageSpec: + provider: "s3" + bucket: "gatling-operator-reports" + region: "de" + env: + - name: RCLONE_S3_PROVIDER + value: Other + - name: RCLONE_S3_ACCESS_KEY_ID + valueFrom: + secretKeyRef: + name: s3-keys + key: S3_ACCESS_KEY + - name: RCLONE_S3_SECRET_ACCESS_KEY + valueFrom: + secretKeyRef: + name: s3-keys + key: S3_SECRET_ACCESS + - name: RCLONE_S3_ENDPOINT + value: https://s3.de.io.cloud.ovh.net + - name: RCLONE_S3_REGION + value: de +...omit... +``` + +There are multiple ways to authenticate for more please check [this](https://rclone.org/s3/#configuration). + #### Set Google Cloud Storage as Cloud Storage Suppose that you want to store Gatling reports to a bucket named `gatling-operator-reports` of Google Cloud Storage, you configure each fields in `.spec.cloudStorageSpec` like this: diff --git a/pkg/cloudstorages/aws.go b/pkg/cloudstorages/aws.go deleted file mode 100644 index 019b501..0000000 --- a/pkg/cloudstorages/aws.go +++ /dev/null @@ -1,65 +0,0 @@ -package cloudstorages - -import ( - "fmt" -) - -type AWSCloudStorageProvider struct { - providerName string -} - -func (p *AWSCloudStorageProvider) init(args []EnvVars) { /* do nothing */ } - -func (p *AWSCloudStorageProvider) GetName() string { - return p.providerName -} - -func (p *AWSCloudStorageProvider) GetCloudStoragePath(bucket string, gatlingName string, subDir string) string { - // Format s3:// - return fmt.Sprintf("s3:%s/%s/%s", bucket, gatlingName, subDir) -} - -func (p *AWSCloudStorageProvider) GetCloudStorageReportURL(bucket string, gatlingName string, subDir string) string { - // Format https://.s3.amazonaws.com///index.html - return fmt.Sprintf("https://%s.s3.amazonaws.com/%s/%s/index.html", bucket, gatlingName, subDir) -} - -func (p *AWSCloudStorageProvider) GetGatlingTransferResultCommand(resultsDirectoryPath string, region string, storagePath string) string { - template := ` -RESULTS_DIR_PATH=%s -rclone config create s3 s3 env_auth=true region %s -while true; do - if [ -f "${RESULTS_DIR_PATH}/FAILED" ]; then - echo "Skip transfering gatling results" - break - fi - if [ -f "${RESULTS_DIR_PATH}/COMPLETED" ]; then - for source in $(find ${RESULTS_DIR_PATH} -type f -name *.log) - do - rclone copyto ${source} --s3-no-check-bucket --s3-env-auth %s/${HOSTNAME}.log - done - break - fi - sleep 1; -done -` - return fmt.Sprintf(template, resultsDirectoryPath, region, storagePath) -} - -func (p *AWSCloudStorageProvider) GetGatlingAggregateResultCommand(resultsDirectoryPath string, region string, storagePath string) string { - template := ` -GATLING_AGGREGATE_DIR=%s -rclone config create s3 s3 env_auth=true region %s -rclone copy --s3-no-check-bucket --s3-env-auth %s ${GATLING_AGGREGATE_DIR} -` - return fmt.Sprintf(template, resultsDirectoryPath, region, storagePath) -} - -func (p *AWSCloudStorageProvider) GetGatlingTransferReportCommand(resultsDirectoryPath string, region string, storagePath string) string { - template := ` -GATLING_AGGREGATE_DIR=%s -rclone config create s3 s3 env_auth=true region %s -rclone copy ${GATLING_AGGREGATE_DIR} --exclude "*.log" --s3-no-check-bucket --s3-env-auth %s -` - return fmt.Sprintf(template, resultsDirectoryPath, region, storagePath) -} diff --git a/pkg/cloudstorages/cloudstorage.go b/pkg/cloudstorages/cloudstorage.go index 7fda889..3187fa5 100644 --- a/pkg/cloudstorages/cloudstorage.go +++ b/pkg/cloudstorages/cloudstorage.go @@ -27,7 +27,9 @@ func GetProvider(provider string, args ...EnvVars) *CloudStorageProvider { var csp CloudStorageProvider switch provider { case "aws": - csp = &AWSCloudStorageProvider{providerName: provider} + csp = &S3CloudStorageProvider{providerName: provider} + case "s3": + csp = &S3CloudStorageProvider{providerName: provider} case "gcp": csp = &GCPCloudStorageProvider{providerName: provider} case "azure": diff --git a/pkg/cloudstorages/cloudstorage_test.go b/pkg/cloudstorages/cloudstorage_test.go index eff6cdc..ae5056c 100644 --- a/pkg/cloudstorages/cloudstorage_test.go +++ b/pkg/cloudstorages/cloudstorage_test.go @@ -15,7 +15,7 @@ var _ = Describe("GetProvider", func() { provider = "aws" expectedValue = "aws" }) - It("should get a pointer of AWSCloudStorageProvider that has ProviderName field value = aws", func() { + It("should get a pointer of S3CloudStorageProvider that has ProviderName field value = aws", func() { cspp := GetProvider(provider) Expect(cspp).NotTo(BeNil()) Expect((*cspp).GetName()).To(Equal(expectedValue)) @@ -34,6 +34,18 @@ var _ = Describe("GetProvider", func() { }) }) + Context("Provider is s3", func() { + BeforeEach(func() { + provider = "s3" + expectedValue = "s3" + }) + It("should get a pointer of S3CloudStorageProvider that has ProviderName field value = s3", func() { + cspp := GetProvider(provider) + Expect(cspp).NotTo(BeNil()) + Expect((*cspp).GetName()).To(Equal(expectedValue)) + }) + }) + Context("Provider is non-supported one", func() { BeforeEach(func() { provider = "foo" diff --git a/pkg/cloudstorages/s3.go b/pkg/cloudstorages/s3.go new file mode 100644 index 0000000..4cc71b5 --- /dev/null +++ b/pkg/cloudstorages/s3.go @@ -0,0 +1,91 @@ +package cloudstorages + +import ( + "fmt" + "strings" +) + +type S3CloudStorageProvider struct { + providerName string + customS3ProviderHost string +} + +func (p *S3CloudStorageProvider) init(args []EnvVars) { + if len(args) > 0 { + var envs EnvVars = args[0] + for _, env := range envs { + if env.Name == "RCLONE_S3_ENDPOINT" { + p.customS3ProviderHost = p.checkAndRemoveProtocol(env.Value) + break + } + } + } +} + +func (p *S3CloudStorageProvider) checkAndRemoveProtocol(url string) string { + idx := strings.Index(url, "://") + if idx == -1 { + return url + } + return url[idx+3:] +} + +func (p *S3CloudStorageProvider) GetName() string { + return p.providerName +} + +func (p *S3CloudStorageProvider) GetCloudStoragePath(bucket string, gatlingName string, subDir string) string { + // Format s3:// + return fmt.Sprintf("s3:%s/%s/%s", bucket, gatlingName, subDir) +} + +func (p *S3CloudStorageProvider) GetCloudStorageReportURL(bucket string, gatlingName string, subDir string) string { + // Format https://.///index.html + defaultS3ProviderHost := "s3.amazonaws.com" + s3ProviderHost := defaultS3ProviderHost + if p.customS3ProviderHost != "" { + s3ProviderHost = p.customS3ProviderHost + } + + return fmt.Sprintf("https://%s.%s/%s/%s/index.html", bucket, s3ProviderHost, gatlingName, subDir) +} + +func (p *S3CloudStorageProvider) GetGatlingTransferResultCommand(resultsDirectoryPath string, region string, storagePath string) string { + template := ` +RESULTS_DIR_PATH=%s +rclone config create s3 s3 env_auth=true region %s +while true; do + if [ -f "${RESULTS_DIR_PATH}/FAILED" ]; then + echo "Skip transfering gatling results" + break + fi + if [ -f "${RESULTS_DIR_PATH}/COMPLETED" ]; then + for source in $(find ${RESULTS_DIR_PATH} -type f -name *.log) + do + rclone copyto ${source} --s3-no-check-bucket --s3-env-auth %s/${HOSTNAME}.log + done + break + fi + sleep 1; +done +` + return fmt.Sprintf(template, resultsDirectoryPath, region, storagePath) +} + +func (p *S3CloudStorageProvider) GetGatlingAggregateResultCommand(resultsDirectoryPath string, region string, storagePath string) string { + template := ` +GATLING_AGGREGATE_DIR=%s +rclone config create s3 s3 env_auth=true region %s +rclone copy --s3-no-check-bucket --s3-env-auth %s ${GATLING_AGGREGATE_DIR} +` + return fmt.Sprintf(template, resultsDirectoryPath, region, storagePath) +} + +func (p *S3CloudStorageProvider) GetGatlingTransferReportCommand(resultsDirectoryPath string, region string, storagePath string) string { + template := ` +GATLING_AGGREGATE_DIR=%s +rclone config create s3 s3 env_auth=true region %s +rclone copy ${GATLING_AGGREGATE_DIR} --exclude "*.log" --s3-no-check-bucket --s3-env-auth %s +` + return fmt.Sprintf(template, resultsDirectoryPath, region, storagePath) +} diff --git a/pkg/cloudstorages/aws_test.go b/pkg/cloudstorages/s3_test.go similarity index 78% rename from pkg/cloudstorages/aws_test.go rename to pkg/cloudstorages/s3_test.go index ec0cb8d..960c63c 100644 --- a/pkg/cloudstorages/aws_test.go +++ b/pkg/cloudstorages/s3_test.go @@ -16,7 +16,7 @@ var _ = Describe("GetName", func() { }) Context("Provider is aws", func() { It("should get provider name = aws", func() { - csp := &AWSCloudStorageProvider{providerName: provider} + csp := &S3CloudStorageProvider{providerName: provider} Expect(csp.GetName()).To(Equal(expectedValue)) }) }) @@ -39,7 +39,7 @@ var _ = Describe("GetCloudStoragePath", func() { }) Context("Provider is aws", func() { It("path is aws s3 bucket", func() { - csp := &AWSCloudStorageProvider{providerName: provider} + csp := &S3CloudStorageProvider{providerName: provider} Expect(csp.GetCloudStoragePath(bucket, gatlingName, subDir)).To(Equal(expectedValue)) }) }) @@ -47,25 +47,37 @@ var _ = Describe("GetCloudStoragePath", func() { var _ = Describe("GetCloudStorageReportURL", func() { var ( - provider string - bucket string - gatlingName string - subDir string - expectedValue string + provider string + bucket string + gatlingName string + subDir string ) BeforeEach(func() { - provider = "aws" + provider = "s3" bucket = "testBucket" gatlingName = "testGatling" subDir = "subDir" - expectedValue = "https://testBucket.s3.amazonaws.com/testGatling/subDir/index.html" }) - Context("Provider is aws", func() { - It("path is aws s3 bucket", func() { - csp := &AWSCloudStorageProvider{providerName: provider} - Expect(csp.GetCloudStorageReportURL(bucket, gatlingName, subDir)).To(Equal(expectedValue)) + Context("Provider is s3", func() { + It("path is aws s3 bucket if RCLONE_S3_ENDPOINT not defined", func() { + csp := &S3CloudStorageProvider{providerName: provider} + Expect(csp.GetCloudStorageReportURL(bucket, gatlingName, subDir)).To(Equal("https://testBucket.s3.amazonaws.com/testGatling/subDir/index.html")) + }) + + It("path is S3 bucket with custom provider endpoint", func() { + csp := &S3CloudStorageProvider{providerName: provider} + csp.init([]EnvVars{ + { + { + Name: "RCLONE_S3_ENDPOINT", + Value: "https://s3.de.io.cloud.ovh.net", + }, + }, + }) + Expect(csp.GetCloudStorageReportURL(bucket, gatlingName, subDir)).To(Equal("https://testBucket.s3.de.io.cloud.ovh.net/testGatling/subDir/index.html")) }) }) + }) var _ = Describe("GetGatlingTransferResultCommand", func() { @@ -102,7 +114,7 @@ done }) Context("Provider is aws", func() { It("returns commands with s3 rclone config", func() { - csp := &AWSCloudStorageProvider{providerName: provider} + csp := &S3CloudStorageProvider{providerName: provider} Expect(csp.GetGatlingTransferResultCommand(resultsDirectoryPath, region, storagePath)).To(Equal(expectedValue)) }) }) @@ -129,7 +141,7 @@ rclone copy --s3-no-check-bucket --s3-env-auth testStoragePath ${GATLING_AGGREGA }) Context("Provider is aws", func() { It("returns commands with s3 rclone config", func() { - csp := &AWSCloudStorageProvider{providerName: provider} + csp := &S3CloudStorageProvider{providerName: provider} Expect(csp.GetGatlingAggregateResultCommand(resultsDirectoryPath, region, storagePath)).To(Equal(expectedValue)) }) }) @@ -156,7 +168,7 @@ rclone copy ${GATLING_AGGREGATE_DIR} --exclude "*.log" --s3-no-check-bucket --s3 }) Context("Provider is aws", func() { It("returns commands with s3 rclone config", func() { - csp := &AWSCloudStorageProvider{providerName: provider} + csp := &S3CloudStorageProvider{providerName: provider} Expect(csp.GetGatlingTransferReportCommand(resultsDirectoryPath, region, storagePath)).To(Equal(expectedValue)) }) }) From ecb405d7d070cd854ad633a196a5c39469b340ac Mon Sep 17 00:00:00 2001 From: andrzejmikolajczakds Date: Wed, 3 Jul 2024 12:49:06 +0200 Subject: [PATCH 2/2] feat add support for different s3 providers Docs improvements --- docs/user-guide.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index e698321..3d2262a 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -22,7 +22,7 @@ - [Parallel Number of Gatling Load Testing](#parallel-number-of-gatling-load-testing) - [Configure Cloud Storage Provider](#configure-cloud-storage-provider) - [Set Amazon S3 as Cloud Storage](#set-amazon-s3-as-cloud-storage) - - [Set Different from Amazon S3 Provider as Cloud Storage](#set-different-from-amazon-s3-provider-as-cloud-storage) + - [Set S3 as Cloud Storage](#set-s3-as-cloud-storage) - [Set Google Cloud Storage as Cloud Storage](#set-google-cloud-storage-as-cloud-storage) - [Set Azure Blob Storage as Cloud Storage](#set-azure-blob-storage-as-cloud-storage) - [Configure Notification Service Provider](#configure-notification-service-provider) @@ -658,10 +658,10 @@ Here is an IAM policy to attach for Gatling Pod to interact with Amazon S3 bucke - Replace `BUCKET_NAME` above with your bucket name - To know more about the ways to supply rclone with a set of AWS credentials, please check [this](https://rclone.org/s3/#configuration). -#### Set Different from Amazon S3 Provider as Cloud Storage +#### Set S3 as Cloud Storage This section provides guidance on setting up any cloud storage provider that supports the S3 API. -In this example suppose you want to store Gatling reports to a bucket named `gatling-operator-reports` in OHV's S3 provider, specifically in the `de` region. +In this example suppose you want to store Gatling reports to a bucket named `gatling-operator-reports` in OVH's S3 provider, specifically in the `de` region. You configure each fields in `.spec.cloudStorageSpec` and set `RCLONE_S3_ENDPOINT` env like this: ```yaml