diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4de1235..8f47504 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,14 +2,14 @@ on: workflow_call jobs: build-and-test: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Go - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: - go-version: 1.18 + go-version: 1.19 - name: Build run: go build -v ./... diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a2ce5d2..81a6ee4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,23 +3,23 @@ on: workflow_call jobs: release: name: Compile and Release Binary - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up Go - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: - go-version: 1.18 + go-version: 1.19 - name: Run GoReleaser uses: goreleaser/goreleaser-action@v2 with: distribution: goreleaser version: latest - args: release --rm-dist + args: release -f .goreleaser.yml --rm-dist env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 186ea98..c94f3a7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,2 @@ -.idea/ -bin/ -app-config.yml \ No newline at end of file +# app config +app.yml \ No newline at end of file diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 0000000..3717735 --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,16 @@ +builds: + - goos: + - linux + - windows + - darwin + goarch: + - amd64 + - arm64 +archives: + - format_overrides: + - goos: windows + format: zip + files: + - LICENSE + - README.md + - app.yml.example \ No newline at end of file diff --git a/LICENSE b/LICENSE index deb50f8..c6b4521 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2022 #MDanial (NzK) +Copyright (c) 2023 M Danial (NzK) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/Makefile b/Makefile deleted file mode 100644 index 412d6a0..0000000 --- a/Makefile +++ /dev/null @@ -1,2 +0,0 @@ -bd: - go build -o bin/cron-upload main.go \ No newline at end of file diff --git a/README.md b/README.md index 2a1a017..c422d49 100644 --- a/README.md +++ b/README.md @@ -1,61 +1,57 @@ # Cronjob App for Upload to Cloud Provider -Little app that upload files in any readable local directories to folders in Cloud Provider. Mainly used conjunction -with [cron-backup](https://github.com/mdanialr/cron-backup). -App [cron-backup](https://github.com/mdanialr/cron-backup) for archiving local files and database, then this app -for uploading them to Cloud Provider. +CLI app to upload files in any readable local directories to Cloud Provider. + +Mainly used in conjunction with [cron-backup](https://github.com/mdanialr/cron-backup). +The [cron-backup](https://github.com/mdanialr/cron-backup) is for archiving local files and database dump, then this app is for uploading them to Cloud Provider. # Features * Upload any readable local directories as many as possible. (_make sure your cloud provider's capacity is sufficient_). * Automatically create folders (**that doesn't exist yet**) in cloud provider based on config file. -* Option to delete files in cloud provider that exceed the maximum number of days. +* Option to set the maximum number of days (retain) delete files in cloud provider that exceed the maximum number of days (retain). +* Option to set the number of worker for upload and or delete job. +* Option to set the chunk size (in byte) of the file when uploading. # How to Use 1. Download the latest binary file from Releases. -2. Make directory where the download and extracted binary file will reside. We will use bin directory as example. -```bash -mkdir bin -``` -3. Make sure the binary file is executable. -```bash -chmod u+x bin/cron-upload -``` -4. Create configuration file. -```bash -touch app-config.yml -``` -5. Fill in config file as needed. You can check app-config.yml.example in this repo for reference. -6. Prepare required files. See below. -7. Execute the binary file from directory where this config file exist, otherwise you will get error config file is not found. -8. Execute with `-refresh` params first to get refresh token then, exchange it with access token with `-init` params. -```bash -./bin/cron-upload -refresh -drive -./bin/cron-upload -init -drive -./bin/cron-upload -drive -``` -9. Check logs file for any error. Maybe required fields are empty, etc. -10. __Repeat__ step __no.8__ for every 6 days. Because `refresh token` lifetime is just 7 days so make sure to renew them before get expired. -11. (optional but recommended) Create a cronjob to run this app. -> Example -```bash -@daily cd /full/path/to/cron-upload && ./bin/cron-upload -drive -``` +2. Extract the downloaded binary file and make sure it's executable. + ```bash + tar -xzf cron-upload....tar.gz + chmod u+x cron-upload + ``` +3. Create configuration file from the template. + ```bash + cp app.yml.example app.yml + ``` +4. Edit the app config file as needed. You can check the template for explanation of each field. +5. Try to execute and check if there is any error in the app config file. + ```bash + ./cron-upload -test + ``` +6. Check the logs file for any error. Maybe failed to upload or delete files, etc. +7. Create a cronjob to run this app. (*optional but recommended*) + + **Example**: + ```bash + @daily cd /full/path/to/cron-upload && ./cron-upload -log file + ``` -# Prepare Required Files (Google Drive) -1. Create OAuth client with 'Desktop Client'. -2. Download credential.json file. -3. Write the path where credential.json file reside to app-config in `provider.cred` segment. -4. Make sure credential.json file **readable** & **accessible** by this app. -5. You're good to go. +# Supported Cloud Provider +Currently only support Google Drive as the cloud provider. +## Google Drive +1. Create Google Service Account and download the credential in json format. You can follow this awesome [tutorial](https://www.labnol.org/google-api-service-account-220404), + but following until the [#4](https://www.labnol.org/google-api-service-account-220404#4-share-a-drive-folder) step will be sufficient. Use the shared folder's name as `root` in app config file. +2. Put the full file path where the downloaded credential is to the app config, like so. + ```yml + provider: + name: drive + cred: /full/path/to/credential.json + ``` # Arguments -* `-refresh`: if used with provider argument (e.g. `-drive`), renew or init refresh token. -* `-init`: if used with provider argument (e.g. `-drive`), retrieve token for authentication against Google Drive provider. -* `-drive`: do the upload job using Google Drive provider. - -# Under the Hood -1. `./bin/cron-upload -refresh -drive`. this will exchange credential.json file for authorization code and create json file that defined in app-config file that contain refresh token before exchange it for access token. -2. `./bin/cron-upload -init -drive`. this will exchange refresh token for access token and create ...token.json file that contain access token. -3. `./bin/cron-upload -drive`. this will do the upload job sequentially and automatically renew access token in ...token.json file if expired. +* `-path`: set where to find the config file. Default is set to current directory where the binary file is run. +* `-log`: set where to write the log. Default is set to stdout. + You can change it to `-log file` to write the log to file in the directory that you set in config file. +* `-test`: run all sort of tests such as, validations for the config file, try to create folder, upload & delete files to cloud provider and also check if there is any error. # License This project is licensed under the **MIT License** - see the [LICENSE](LICENSE "LICENSE") file for details. diff --git a/app-config.yml.example b/app-config.yml.example deleted file mode 100644 index 9c8db30..0000000 --- a/app-config.yml.example +++ /dev/null @@ -1,16 +0,0 @@ -log: # log directory where log files of this app is written -root_folder: # the name of the root folder where all these files would be uploaded into -provider: - name: # the name of the provider. currently only support 'drive' which is Google Drive - auth: # the full path of file where auth.json for credentials against Google Drive api stored and used by this app - cred: # the full path of ile where credential.json oauth client reside - token: # the full path of directory where temporary token to authenticate is stored -upload: - - folder: - name: # a folder tree that would be created in cloud provider - path: # a full path of directory where these files would be uploaded to cloud provider - retain_days: # a number of days when files in cloud provider before got deleted - - folder: - name: # a folder tree that would be created in cloud provider - path: # a full path of directory where these files would be uploaded to cloud provider - retain_days: # a number of days when files in cloud provider before got deleted \ No newline at end of file diff --git a/app.yml.example b/app.yml.example new file mode 100644 index 0000000..68f5201 --- /dev/null +++ b/app.yml.example @@ -0,0 +1,15 @@ +root: Backup VPS # required. the name of the root folder in the cloud provider +log: /full/path/to/log/cron-upload/ # log directory where the log file will be written +retain: 30 # duration in minutes to decide whether the data in the cloud should be deleted. default is set to 6 days (8640) +worker: 2 # positive number of worker that will be spawned. default is set to 2 +chunk: # the chunk size of the uploaded files. default set to 0 which means there is no chunk size and upload a file at once +provider: + name: drive # the name of the provider. currently only support 'drive' for Google Drive + cred: /full/path/to/credential.json # the full path of credential for the chosen provider in json format +upload: + - name: some-app-name # required. unique. directory name in the provider, will be created if not exist yet + path: /full/path/to/app/dir # required. the app directory that will be archived + retain: 4 # default is set to follow retain field in the root + - name: some-another-app-name + path: /full/path/to/another/app/dir + retain: diff --git a/go.mod b/go.mod index 2320393..0b8b5a1 100644 --- a/go.mod +++ b/go.mod @@ -1,27 +1,45 @@ module github.com/mdanialr/cron-upload -go 1.18 +go 1.19 require ( - github.com/stretchr/testify v1.7.1 - golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 - google.golang.org/api v0.77.0 - gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c + github.com/spf13/viper v1.14.0 + github.com/stretchr/testify v1.8.1 + golang.org/x/exp v0.0.0-20221230185412-738e83a70c30 + golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783 + google.golang.org/api v0.107.0 ) require ( - cloud.google.com/go/compute v1.6.0 // indirect - github.com/davecgh/go-spew v1.1.0 // indirect - github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect + cloud.google.com/go/compute v1.14.0 // indirect + cloud.google.com/go/compute/metadata v0.2.3 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.2 // indirect - github.com/googleapis/gax-go/v2 v2.3.0 // indirect + github.com/google/uuid v1.3.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.2.1 // indirect + github.com/googleapis/gax-go/v2 v2.7.0 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/magiconair/properties v1.8.6 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/pelletier/go-toml v1.9.5 // indirect + github.com/pelletier/go-toml/v2 v2.0.5 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - go.opencensus.io v0.23.0 // indirect - golang.org/x/net v0.0.0-20220412020605-290c469a71a5 // indirect - golang.org/x/sys v0.0.0-20220412211240-33da011f77ad // indirect - golang.org/x/text v0.3.7 // indirect + github.com/spf13/afero v1.9.2 // indirect + github.com/spf13/cast v1.5.0 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/subosito/gotenv v1.4.1 // indirect + go.opencensus.io v0.24.0 // indirect + golang.org/x/net v0.0.0-20221014081412-f15817d10f9b // indirect + golang.org/x/sys v0.1.0 // indirect + golang.org/x/text v0.5.0 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4 // indirect - google.golang.org/grpc v1.45.0 // indirect - google.golang.org/protobuf v1.28.0 // indirect + google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef // indirect + google.golang.org/grpc v1.51.0 // indirect + google.golang.org/protobuf v1.28.1 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index bc8b93a..a4c5a1b 100644 --- a/go.sum +++ b/go.sum @@ -3,6 +3,7 @@ cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMT cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= @@ -15,32 +16,21 @@ cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOY cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= -cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= -cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= -cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= -cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= -cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= -cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= -cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= -cloud.google.com/go v0.100.2 h1:t9Iw5QH5v4XtlEQaCtUY7x6sCABps8sW0acw7e2WQ6Y= -cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go v0.105.0 h1:DNtEKRBAAzeS4KyIory52wWHuClNaXJ5x1F7xa4q+5Y= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= -cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= -cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= -cloud.google.com/go/compute v1.6.0 h1:XdQIN5mdPTSBVwSIVDuY5e8ZzVAccsHvD3qTEz4zIps= -cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= +cloud.google.com/go/compute v1.14.0 h1:hfm2+FfxVmnRlh6LpB7cg1ZNU+5edAHmW679JePztk0= +cloud.google.com/go/compute v1.14.0/go.mod h1:YfLtxrj9sU4Yxv+sXzZkyPjEyPBZfXHUvjxega5vAdo= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= +cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/longrunning v0.3.0 h1:NjljC+FYPV3uh5/OwWT6pVU+doBqMg2x/rZlE+CamDs= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -50,14 +40,11 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -65,31 +52,27 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= @@ -97,8 +80,6 @@ github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= -github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -114,10 +95,8 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -131,13 +110,10 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= -github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -147,65 +123,90 @@ github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.2.1 h1:RY7tHKZcRlk788d5WSo/e83gOyyy742E8GSs771ySpg= +github.com/googleapis/enterprise-certificate-proxy v0.2.1/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= -github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= -github.com/googleapis/gax-go/v2 v2.3.0 h1:nRJtk3y8Fm770D42QV6T90ZnvFZyk7agSo3Q+Z9p3WI= -github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/googleapis/gax-go/v2 v2.7.0 h1:IcsPKeInNvYi7eqSaDjiZqDDKu5rsmunY0Y1YupQSSQ= +github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= +github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.5 h1:ipoSadvV8oGUjnUbMub59IDPPwfxF694nG/jwbMiyQg= +github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k= +github.com/spf13/afero v1.9.2 h1:j49Hj62F0n+DaZ1dDCvhABaPNSGNkt32oRFxI33IEMw= +github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= +github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= +github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.14.0 h1:Rg7d3Lo706X9tHsJMUjdiwMpHB7W8WnSVOssIY+JElU= +github.com/spf13/viper v1.14.0/go.mod h1:WT//axPky3FdvXHzGw33dNdXXXfFQqmEalje+egj8As= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= +github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= -go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -216,6 +217,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20221230185412-738e83a70c30 h1:m9O6OTJ627iFnN2JIWfdqlZCzneRO6EEBsHXI25P8ws= +golang.org/x/exp v0.0.0-20221230185412-738e83a70c30/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -229,7 +232,6 @@ golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRu golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= @@ -240,7 +242,6 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -271,16 +272,10 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220412020605-290c469a71a5 h1:bRb386wvrE+oBNdF1d/Xh9mQrfQ4ecYhW5qJ5GvTGT4= -golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20221014081412-f15817d10f9b h1:tvrvnPFcdzp294diPnrdZZZ8XUt2Tyj7svb7X52iDuU= +golang.org/x/net v0.0.0-20221014081412-f15817d10f9b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -290,17 +285,8 @@ golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 h1:OSnWWcOd/CtWQC2cYSBgbTSJv3ciqd8r54ySIW2y3RE= -golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783 h1:nt+Q6cXKz4MosCSpnbMtqiQ8Oz0pxTef2B4Vca2lvfk= +golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -311,7 +297,6 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -343,42 +328,21 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220412211240-33da011f77ad h1:ntjMns5wyP/fN65tdBD4g8J5w8n015+iIIs9rtjXkY0= -golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= +golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -427,18 +391,12 @@ golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U= -golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -458,24 +416,8 @@ google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz513 google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= -google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= -google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= -google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= -google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= -google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= -google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= -google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= -google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= -google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= -google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= -google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= -google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= -google.golang.org/api v0.77.0 h1:msijLTxwkJ7Jub5tv9KBVCKtHOQwnvnvkX7ErFFCVxY= -google.golang.org/api v0.77.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= +google.golang.org/api v0.107.0 h1:I2SlFjD8ZWabaIFOfeEDg3pf0BHJDh6iYQ1ic3Yu/UU= +google.golang.org/api v0.107.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -507,7 +449,6 @@ google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= @@ -519,42 +460,10 @@ google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= -google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= -google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= -google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= -google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4 h1:myaecH64R0bIEDjNORIel4iXubqzaHU1K2z8ajBwWcM= -google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef h1:uQ2vjV/sHTsWSqdKeLqmwitzgvjMl7o4IdtHwUDXSJY= +google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -568,23 +477,11 @@ google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3Iji google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= -google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/grpc v1.51.0 h1:E1eGv1FTqoLIdnBCZufiSHgKjlqG6fKFf6pPWtMTh8U= +google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -597,17 +494,20 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= -google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/internal/config/config.go b/internal/config/config.go deleted file mode 100644 index fe7b0cd..0000000 --- a/internal/config/config.go +++ /dev/null @@ -1,85 +0,0 @@ -package config - -import ( - "bytes" - "fmt" - "io" - "os" - "strings" - - "gopkg.in/yaml.v3" -) - -// Model holds all data from config file. -type Model struct { - LogDir string `yaml:"log"` // Where info & error log for this app is written. - LogFile *os.File // File instance that would be using by logger to write into. - Provider Provider `yaml:"provider"` // detail about which provider is used. - Upload Upload `yaml:"upload"` // detail about folders that would be uploaded. - RootFolder string `yaml:"root_folder"` // all Upload folders would be created inside this root folder. -} - -// NewConfig read io.Reader then map and load the value to the returned Model. -func NewConfig(fileBuf io.Reader) (mod *Model, err error) { - buf := new(bytes.Buffer) - - if _, err := buf.ReadFrom(fileBuf); err != nil { - return mod, fmt.Errorf("failed to read from file buffer: %v", err) - } - - if err := yaml.Unmarshal(buf.Bytes(), &mod); err != nil { - return mod, fmt.Errorf("failed to unmarshal: %v", err) - } - - return -} - -// Sanitization check and sanitize config Model's instance. -func (m *Model) Sanitization() error { - if m.LogDir == "" { - m.LogDir = "/tmp/" - } - if !strings.HasPrefix(m.LogDir, "/") { - m.LogDir = "/" + m.LogDir - } - if !strings.HasSuffix(m.LogDir, "/") { - m.LogDir += "/" - } - - if m.Provider.Name == "" { - return fmt.Errorf("`provider.name` field is required") - } - if m.Provider.Name == "drive" { - if m.Provider.Auth == "" { - return fmt.Errorf("`provider.auth` field is required") - } - if m.Provider.Cred == "" { - return fmt.Errorf("`provider.cred` is required") - } - - // if provided then: - if m.Provider.Token != "" { - // make sure has leading and trailing slash - if !strings.HasPrefix(m.Provider.Token, "/") { - m.Provider.Token = "/" + m.Provider.Token - } - if !strings.HasSuffix(m.Provider.Token, "/") { - m.Provider.Token += "/" - } - } - // if not provided then use this default value - if m.Provider.Token == "" { - m.Provider.Token = "/tmp/cron-upload-token.json" - } - // append file name if it does not have one yet - if !strings.HasSuffix(m.Provider.Token, "cron-upload-token.json") { - m.Provider.Token += "cron-upload-token.json" - } - } - - if m.RootFolder == "" { - m.RootFolder = "Cron-Backups" - } - - return nil -} diff --git a/internal/config/config_test.go b/internal/config/config_test.go deleted file mode 100644 index 9d52b8b..0000000 --- a/internal/config/config_test.go +++ /dev/null @@ -1,116 +0,0 @@ -package config - -import ( - "bytes" - "fmt" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// fakeReader just fake type to satisfies io.Reader interfaces so it could -// trigger error buffer read from. -type fakeReader struct{} - -func (_ fakeReader) Read(_ []byte) (_ int, _ error) { - return 0, fmt.Errorf("this should trigger error in test") -} - -func TestNewConfig(t *testing.T) { - fakeConfigFile := `log: /tmp/cron-upload-log` - buf := bytes.NewBufferString(fakeConfigFile) - t.Run("Should pass when using valid value", func(t *testing.T) { - mod, err := NewConfig(buf) - require.NoError(t, err) - - assert.Equal(t, "/tmp/cron-upload-log", mod.LogDir) - }) - - fakeConfigFile = `provider: 101` - buf = bytes.NewBufferString(fakeConfigFile) - t.Run("Should error when using mismatch type in yaml unmarshalling", func(t *testing.T) { - _, err := NewConfig(buf) - require.Error(t, err) - }) - - t.Run("Injecting fake reader should be error in buffer read from", func(t *testing.T) { - _, err := NewConfig(fakeReader{}) - require.Error(t, err) - }) -} - -func TestModel_Sanitization(t *testing.T) { - testCases := []struct { - name string - sample Model - expect Model - wantErr bool - }{ - { - name: "Should has default of '/tmp/' for log dir if not provided", - sample: Model{Provider: Provider{Name: "cloud"}}, - expect: Model{LogDir: "/tmp/", Provider: Provider{Name: "cloud"}, RootFolder: "Cron-Backups"}, - }, - { - name: "Log dir should has prefix and trailing slash '/'", - sample: Model{LogDir: "log/dir", Provider: Provider{Name: "cloud"}}, - expect: Model{LogDir: "/log/dir/", Provider: Provider{Name: "cloud"}, RootFolder: "Cron-Backups"}, - }, - { - name: "Should has default value of '1' for max worker if not provided", - sample: Model{Provider: Provider{Name: "cloud"}}, - expect: Model{LogDir: "/tmp/", Provider: Provider{Name: "cloud"}, RootFolder: "Cron-Backups"}, - }, - { - name: "Should error if required `provider's fields` not provided", - sample: Model{}, - wantErr: true, - }, - { - name: "Should error if `provider.name` is 'drive' but `auth` field is not provided", - sample: Model{Provider: Provider{Name: "drive"}}, - wantErr: true, - }, - { - name: "Should error if `provider.name` is 'drive' but `cred` field is not provided", - sample: Model{Provider: Provider{Name: "drive"}}, - wantErr: true, - }, - { - name: "Should has leading and trailing slash for `auth` field if `provider.name` is 'drive'", - sample: Model{Provider: Provider{Name: "drive", Auth: "auth", Token: "dir", Cred: "cred"}}, - expect: Model{LogDir: "/tmp/", RootFolder: "Cron-Backups", - Provider: Provider{Name: "drive", Auth: "auth", Token: "/dir/cron-upload-token.json", Cred: "cred"}, - }, - }, - { - name: "Should has default value of '/tmp/cron-upload-token.json' if `provider.name` is 'drive' and `token` field is not provided", - sample: Model{Provider: Provider{Name: "drive", Auth: "auth", Cred: "cred"}}, - expect: Model{LogDir: "/tmp/", RootFolder: "Cron-Backups", - Provider: Provider{Name: "drive", Auth: "auth", Token: "/tmp/cron-upload-token.json", Cred: "cred"}, - }, - }, - { - name: "Should has default value of 'Cron-Backups' if `root_folder` field is not provided", - sample: Model{Provider: Provider{Name: "cloud"}}, - expect: Model{LogDir: "/tmp/", RootFolder: "Cron-Backups", - Provider: Provider{Name: "cloud"}, - }, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - err := tc.sample.Sanitization() - - switch tc.wantErr { - case false: - require.NoError(t, err) - assert.Equal(t, tc.expect, tc.sample) - case true: - require.Error(t, err) - } - }) - } -} diff --git a/internal/config/provider.go b/internal/config/provider.go deleted file mode 100644 index 4764726..0000000 --- a/internal/config/provider.go +++ /dev/null @@ -1,9 +0,0 @@ -package config - -// Provider holds data about provider that would be used to upload files. -type Provider struct { - Name string `yaml:"name"` // the name of the provider. support only 'drive'. - Auth string `yaml:"auth"` // the file path for authentication to the provider. - Token string `yaml:"token"` // a directory where temporary token file for provider 'drive' is stored. - Cred string `yaml:"cred"` // a json file path for exchanging authorization code for new refresh token if expired. -} diff --git a/internal/config/upload.go b/internal/config/upload.go deleted file mode 100644 index ba0c998..0000000 --- a/internal/config/upload.go +++ /dev/null @@ -1,43 +0,0 @@ -package config - -import ( - "fmt" - "strings" -) - -// Upload holds actual data about which path that would be uploaded and what the folder's name in provider. -type Upload []struct { - Folders Folder `yaml:"folder"` // all folders. -} - -// Folder holds detail folders that would be uploaded to provider. -type Folder struct { - Name string `yaml:"name"` // the name of the folder that would be created (if not exist) in provider. - Path string `yaml:"path"` // actual full path where target files would be uploaded to provider. - Retain uint `yaml:"retain_days"` // number of days before files in Cloud Provider's folder deleted. -} - -// Sanitization sanitize every single folder. -func (f *Folder) Sanitization() error { - if f.Path == "" { - return fmt.Errorf("`path` field is required") - } - if f.Name == "" { - f.Name = f.Path - } - - f.Name = strings.TrimLeft(f.Name, "/") - f.Name = strings.TrimRight(f.Name, "/") - - return nil -} - -// Sanitization sanitize all folders one by one. -func (u *Upload) Sanitization() error { - for i, uu := range *u { - if err := uu.Folders.Sanitization(); err != nil { - return fmt.Errorf("failed to sanitizing #%d folder in config: %s", i, err) - } - } - return nil -} diff --git a/internal/config/upload_test.go b/internal/config/upload_test.go deleted file mode 100644 index 563b00f..0000000 --- a/internal/config/upload_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package config - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestFolder_Sanitization(t *testing.T) { - testCases := []struct { - name string - sample Folder - expect Folder - wantErr bool - }{ - { - name: "Should be failed when not providing required `path` field", - sample: Folder{Name: "test/dir"}, - wantErr: true, - }, - { - name: "Should has same value as `path` field if `name` field is not provided", - sample: Folder{Path: "/full/path/upload/folders"}, - expect: Folder{Path: "/full/path/upload/folders", Name: "full/path/upload/folders", Retain: uint(0)}, - }, - { - name: "Should pass when providing required field", - sample: Folder{Name: "/test/dir/", Path: "full/path/upload/folders"}, - expect: Folder{Name: "test/dir", Path: "full/path/upload/folders", Retain: uint(0)}, - }, - { - name: "Default value of `retain_days` field should be '0' if not provided", - sample: Folder{Name: "/app", Path: "/path/folders"}, - expect: Folder{Name: "app", Path: "/path/folders", Retain: uint(0)}, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - err := tc.sample.Sanitization() - - switch tc.wantErr { - case true: - require.Error(t, err) - case false: - require.NoError(t, err) - assert.Equal(t, tc.expect, tc.sample) - } - }) - } -} - -func TestUpload_Sanitization(t *testing.T) { - testCases := []struct { - name string - sample Upload - expect Upload - wantErr bool - }{ - { - name: "Should error if not providing required `path` field", - sample: Upload{ - {Folders: Folder{}}, - }, - wantErr: true, - }, - { - name: "Should pass if providing minimum required `path` field", - sample: Upload{ - {Folders: Folder{Path: "/full/path/folders"}}, - }, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - err := tc.sample.Sanitization() - - switch tc.wantErr { - case true: - require.Error(t, err) - case false: - require.NoError(t, err) - } - }) - } -} diff --git a/internal/logger/writer.go b/internal/logger/writer.go deleted file mode 100644 index 3656bee..0000000 --- a/internal/logger/writer.go +++ /dev/null @@ -1,29 +0,0 @@ -package logger - -import ( - "fmt" - "log" - "os" - - "github.com/mdanialr/cron-upload/internal/config" -) - -var ( - // InfL info level logger instance that would be used throughout all this app. - InfL *log.Logger - // ErrL error level logger instance that would be used throughout all this app. - ErrL *log.Logger -) - -// InitLogger init and setup log file to write internal logger for this app. -func InitLogger(conf *config.Model) error { - fl, err := os.OpenFile(conf.LogDir+"cron-upload-log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0770) - if err != nil { - return fmt.Errorf("failed to open|create log file: %v\n", err) - } - - InfL = log.New(fl, "[INFO] ", log.Ldate|log.Ltime) - ErrL = log.New(fl, "[ERROR] ", log.Ldate|log.Ltime|log.Lshortfile) - - return nil -} diff --git a/internal/logger/writer_test.go b/internal/logger/writer_test.go deleted file mode 100644 index eb4b887..0000000 --- a/internal/logger/writer_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package logger - -import ( - "testing" - - "github.com/mdanialr/cron-upload/internal/config" - "github.com/stretchr/testify/require" -) - -func TestInitLogger(t *testing.T) { - testCases := []struct { - name string - sampleConf config.Model - wantErr bool - }{ - { - name: "Should be pass when using valid log dir", - sampleConf: config.Model{Provider: config.Provider{Name: "cloud"}}, - }, - { - name: "Should be failed when using fake log dir", - sampleConf: config.Model{Provider: config.Provider{Name: "cloud"}, LogDir: "/fake/log/dir"}, - wantErr: true, - }, - } - - for _, tt := range testCases { - t.Run(tt.name, func(t *testing.T) { - err := tt.sampleConf.Sanitization() - - switch tt.wantErr { - case false: - require.NoError(t, err) - require.NoError(t, InitLogger(&tt.sampleConf)) - case true: - require.Error(t, InitLogger(&tt.sampleConf)) - } - }) - } -} diff --git a/internal/provider/gdrive/constant.go b/internal/provider/gdrive/constant.go index 83ba22c..ac0601d 100644 --- a/internal/provider/gdrive/constant.go +++ b/internal/provider/gdrive/constant.go @@ -1,19 +1,20 @@ package gdrive const ( - MIMEShortcut = "application/vnd.google-apps.shortcut" // MIMEType of the file for shortcut which unique only to Google Drive - MIMEFile = "application/vnd.google-apps.file" // MIMEType of the file for file which unique only to Google Drive - MIMEFolder = "application/vnd.google-apps.folder" // MIMEType of the file for folder which unique only to Google Drive - MIMEDocs = "application/vnd.google-apps.document" // MIMEType of the file for Google Docs which unique only to Google Drive - MIMEZip = "application/zip" // MIMEType of the file for zip archive format (.zip) - MIMEPlainText = "text/plain" // MIMEType of the file for plain text format (.txt) - MIMERichText = "application/rtf" // MIMEType of the file for rich text format (.rtf) - MIMEJpeg = "image/jpeg" // MIMEType of the file for image with jpeg format (.jpeg) - MIMEPng = "image/png" // MIMEType of the file for image with png format (.png) - MIMESvg = "image/svg+xml" // MIMEType of the file for image with svg format (.svg) - FieldId = "id" // id of the file - FieldName = "name" // name of the file - FieldMIME = "mimeType" // a field to define what mime type would be used - FieldDescription = "description" // description of the file - FieldParents = "parents" // parents of the file, which is the folder's id where this file is located + MIMEShortcut = "application/vnd.google-apps.shortcut" // MIMEShortcut MIMEType of the file for shortcut which unique only to Google Drive + MIMEFile = "application/vnd.google-apps.file" // MIMEFile MIMEType of the file for file which unique only to Google Drive + MIMEFolder = "application/vnd.google-apps.folder" // MIMEFolder MIMEType of the file for folder which unique only to Google Drive + MIMEDocs = "application/vnd.google-apps.document" // MIMEDocs MIMEType of the file for Google Docs which unique only to Google Drive + MIMEZip = "application/zip" // MIMEZip MIMEType of the file for zip archive format (.zip) + MIMEPlainText = "text/plain" // MIMEPlainText MIMEType of the file for plain text format (.txt) + MIMERichText = "application/rtf" // MIMERichText MIMEType of the file for rich text format (.rtf) + MIMEJpeg = "image/jpeg" // MIMEJpeg MIMEType of the file for image with jpeg format (.jpeg) + MIMEPng = "image/png" // MIMEPng MIMEType of the file for image with png format (.png) + MIMESvg = "image/svg+xml" // MIMESvg MIMEType of the file for image with svg format (.svg) + FieldId = "id" // FieldId id of the file + FieldName = "name" // FieldName name of the file + FieldMIME = "mimeType" // FieldMIME define what mime type is used + FieldDescription = "description" // FieldDescription description of the file + FieldParents = "parents" // FieldParents parents of the file, which is the folder's id where this file is located + FieldCreatedAt = "createdTime" // FieldCreatedAt the time at which the file was created ) diff --git a/internal/provider/gdrive/example.go b/internal/provider/gdrive/example.go deleted file mode 100644 index 4344679..0000000 --- a/internal/provider/gdrive/example.go +++ /dev/null @@ -1,119 +0,0 @@ -package gdrive - -// LISTING ALL FILES and FOLDERS within given fileId (which is the id of a folder). -//list, _ := dr.Files.List().Q("'1Zl6s95uByQGAplNbtjVgbeXeE6x48vqL' in parents").Do() -//for _, fl := range list.Files { -// fmt.Println("Id:", fl.Id) -// -// b, _ := fl.MarshalJSON() -// var out bytes.Buffer -// json.Indent(&out, b, "", "\t") -// fmt.Println(out.String() + "\n") -//} - -// LISTING ALL FILES (only) within a folder from the given id. retrieve only non-folder data. -//const id = "1I0Ou4Uu0S_RhIdR2RYjPXiBa2TyppLzn" -//query := fmt.Sprintf("'%s' in parents and %s != '%s'", id, gdrive.FieldMIME, gdrive.MIMEFolder) -//list, _ := dr.Files.List().Q(query).Do() -//for _, fl := range list.Files { -// b, _ := fl.MarshalJSON() -// var out bytes.Buffer -// json.Indent(&out, b, "", "\t") -// fmt.Println(out.String() + "\n") -//} - -// DELETE a FOLDER -//const id = "1mdGZuCobVn6grxZJmNzbPeXIS9Jk0ztH" -//if err := dr.Files.Delete(id).Do(); err != nil { -// log.Fatalf("failed to delete a file or a folder with id: %s and error: %s \n", id, err) -//} - -// CREATE a FOLDER -//fl := &drive.File{Name: "VPS-Backup", MimeType: gdrive.MIMEFolder} -//newFl, err := dr.Files.Create(fl).Do() -//if err != nil { -// log.Fatalln("failed to create a folder:", err) -//} -//b, _ := newFl.MarshalJSON() -//var out bytes.Buffer -//json.Indent(&out, b, "", "\t") -//fmt.Println(out.String() + "\n") - -// GET a FOLDER with a NAME of 'VPS-Backup' -//q := fmt.Sprintf("mimeType = '%s' and name = '%s'", gdrive.MIMEFolder, "VPS-Backup") -//folder, err := dr.Files.List().Q(q).Do() -//if err != nil { -// log.Fatalln("failed to query for a folder with a name VPS-Backup:", err) -//} -//// List the result only if the length is not zero -//if len(folder.Files) > 0 { -// b, _ := folder.Files[0].MarshalJSON() -// var out bytes.Buffer -// json.Indent(&out, b, "", "\t") -// fmt.Println(out.String() + "\n") -// fmt.Println("ID:", folder.Files[0].Id) -//} - -// QUERY using GIVEN MIME TYPE & has GIVEN String name -//q := fmt.Sprintf("%s = '%s' and name contains '%s'", gdrive.FieldMIME, gdrive.MIMEPlainText, "newInitServ") -//folder, err := dr.Files.List().Q(q).Do() -//if err != nil { -// log.Fatalln("failed to query for a folder with a name VPS-Backup:", err) -//} -//// List the result only if the length is not zero -//if len(folder.Files) > 0 { -// b, _ := folder.Files[0].MarshalJSON() -// var out bytes.Buffer -// json.Indent(&out, b, "", "\t") -// fmt.Println(out.String() + "\n") -// fmt.Println("ID:", folder.Files[0].Id) -//} - -// RETRIEVE DETAIL OF THE FILE or FOLDER for GIVEN id -//const id = "1chTTpkL9n0Vsru_NdNl7fBxOuX1gwY9F" -//qFile, err := dr.Files.Get(id).Fields( -// gdrive.FieldId, -// gdrive.FieldMIME, -// gdrive.FieldName, -// gdrive.FieldParents, -//).Do() -//if err != nil { -// log.Fatalf("failed to retrieve file with id: %s and error: %s\n", id, err) -//} -//b, _ := qFile.MarshalJSON() -//var out bytes.Buffer -//json.Indent(&out, b, "", "\t") -//fmt.Println(out.String() + "\n") -//fmt.Println("ID:", qFile.Id) - -// UPLOAD a FILE IN THE DESIGNATED FOLDER which is VPS-Backup (1Cyo9qlbW7zzVTMJ7qk9E5n9ojwK--WT1) -//const filePath = "/home/user/ssh-port-forwarding.zip" -//flInstance, err := os.Open(filePath) -//if err != nil { -// log.Fatalf("failed to open file: %s with error: %s\n", filePath, err) -//} -//defer flInstance.Close() -//fl := &drive.File{ -// Parents: []string{"1Cyo9qlbW7zzVTMJ7qk9E5n9ojwK--WT1"}, -// MimeType: gdrive.MIMEZip, // this field is optional if the filename already has extension -// Name: filepath.Base(flInstance.Name()), -//} -//uploadFl, err := dr.Files.Create(fl).Media(flInstance).Fields( -// gdrive.FieldId, -// gdrive.FieldMIME, -// gdrive.FieldName, -// gdrive.FieldParents, -//).Do() -//if err != nil { -// log.Fatalf("failed to upload file: %s with error: %s\n", uploadFl.Name, err) -//} -//b, _ := uploadFl.MarshalJSON() -//var out bytes.Buffer -//json.Indent(&out, b, "", "\t") -//fmt.Println(out.String() + "\n") -//fmt.Println("ID:", uploadFl.Id) - -// EMPTY TRASH -//if err := dr.Files.EmptyTrash().Do(); err != nil { -// log.Fatalln("failed tp emptying trash:", err) -//} diff --git a/internal/provider/gdrive/gdrive.go b/internal/provider/gdrive/gdrive.go deleted file mode 100644 index d76e411..0000000 --- a/internal/provider/gdrive/gdrive.go +++ /dev/null @@ -1,233 +0,0 @@ -package gdrive - -import ( - "context" - "encoding/json" - "fmt" - "math" - "net/http" - "os" - "path/filepath" - "strings" - "time" - - "github.com/mdanialr/cron-upload/internal/config" - "github.com/mdanialr/cron-upload/internal/provider/gdrive/token" - "github.com/mdanialr/cron-upload/internal/scan" - "github.com/mdanialr/cron-upload/internal/service" - "golang.org/x/oauth2" - "google.golang.org/api/drive/v3" - "google.golang.org/api/option" -) - -// GoogleDrive run the job which is upload all files to Google Drive provider. -func GoogleDrive(conf *config.Model) error { - oAuthConfig := &oauth2.Config{} - ctx := context.Background() - - tok, err := token.LoadToken(conf.Provider.Token) - if err != nil { - return fmt.Errorf("failed to read token.json: %s\n", err) - } - client := oAuthConfig.Client(ctx, tok) - - dr, err := drive.NewService(ctx, option.WithHTTPClient(client)) - if err != nil { - return fmt.Errorf("failed to create new drive service instance: %s\n", err) - } - - // Check if refresh token not expired yet. - if err := token.CheckRefreshValidity(dr); err != nil { - return fmt.Errorf("error: %s. Please do run with params '-refresh' first then run this command again", err) - } - - if err := token.CheckTokenValidity(dr); err != nil { - // 1. Prepare NewToken instance - newTokenI := token.NewToken{} - - // 2. Read auth.json and inject their values to NewToken instance - b, err := os.ReadFile(conf.Provider.Auth) - if err != nil { - return fmt.Errorf("failed to read auth.json file: %s\n", err) - } - if err := json.Unmarshal(b, &newTokenI); err != nil { - return fmt.Errorf("failed to binding auth.json to NewToken model: %s\n", err) - } - - cl := &http.Client{} - newToken, err := newTokenI.RenewToken(cl) - if err != nil { - return fmt.Errorf("failed to get new token: %s\n", err) - } - - // 3. Delete old token.json file - os.Remove(conf.Provider.Token) - // 4. Save new token to token.json file - if err := token.SaveToken(conf.Provider.Token, newToken); err != nil { - return fmt.Errorf("failed to save new oauth2.Token instance to token.json file: %s\n", err) - } - } - - // In case after renewing token, we need to reload new token - tok, err = token.LoadToken(conf.Provider.Token) - if err != nil { - return fmt.Errorf("failed to read token.json: %s\n", err) - } - client = oAuthConfig.Client(ctx, tok) - - dr, err = drive.NewService(ctx, option.WithHTTPClient(client)) - if err != nil { - return fmt.Errorf("failed to create new drive service instance: %s\n", err) - } - - // START- - var ( - rootIdFolder = "" - currentParentIdFolder = "" - ) - - // Note: make sure to empty trash first. Otherwise, root folder could never be able to created - if err := dr.Files.EmptyTrash().Do(); err != nil { - return fmt.Errorf("failed to empty trash: %s\n", err) - } - - // 1. Search RootFolder in MyDrive - q := fmt.Sprintf("mimeType = '%s' and name = '%s'", MIMEFolder, conf.RootFolder) - folder, err := dr.Files.List().Q(q).Do() - if err != nil { - return fmt.Errorf("failed to query for a root folder with a name %s: %s\n", conf.RootFolder, err) - } - if len(folder.Files) > 0 { - rootIdFolder = folder.Files[0].Id - } - // 2. If not exist yet, then create it - if len(folder.Files) <= 0 { - fl := &drive.File{Name: conf.RootFolder, MimeType: MIMEFolder} - newFl, err := dr.Files.Create(fl).Do() - if err != nil { - return fmt.Errorf("failed to create root folder: %s with error: %s\n", conf.RootFolder, err) - } - rootIdFolder = newFl.Id - } - - // 3. Loop through all folders in upload section - for _, up := range conf.Upload { - up.Folders.Sanitization() - currentParentIdFolder = rootIdFolder - - // 4. Loop through all folder tree in Folders.Name - foldersToBeChecked := strings.Split(up.Folders.Name, "/") - lastFolderTree := foldersToBeChecked[len(foldersToBeChecked)-1] - for _, folderCheck := range foldersToBeChecked { - - // 5. Search this folder's name starting from RootFolder as parent folder - q := fmt.Sprintf( - "mimeType = '%s' and name = '%s' and '%s' in parents", - MIMEFolder, folderCheck, currentParentIdFolder, - ) - folder, err := dr.Files.List().Q(q).Do() - if err != nil { - return fmt.Errorf("failed to query for a folder with a name: %s under a parent folder's id: %s\n", - folderCheck, currentParentIdFolder, - ) - } - // If found then change the current parent folder to this folder's id - if len(folder.Files) > 0 { - currentParentIdFolder = folder.Files[0].Id - } - - // 6. If not found or not exist yet, then create it and change the current parent folder to this folder's id - if len(folder.Files) <= 0 { - fl := &drive.File{Name: folderCheck, MimeType: MIMEFolder, - Parents: []string{currentParentIdFolder}, - } - newFl, err := dr.Files.Create(fl).Do() - if err != nil { - return fmt.Errorf("failed to create a folder: %s\n", err) - } - currentParentIdFolder = newFl.Id - } - - // 7. If we reach the last in folder tree then upload all files to this folder - if folderCheck == lastFolderTree { - allFiles, err := scan.Files(up.Folders.Path) - if err != nil { - return fmt.Errorf("failed to scan and read folder path: %s with error: %s\n", up.Folders.Path, err) - } - - if len(allFiles) > 0 { - var soonToBeDeletedFiles []string - // 8. Before uploading. Remember to take notes all files (id) that reside in this folder that - // fulfill the requirements to be deleted. like, maybe already pass the date. BUT do this only if - // retain_days of this section is NOT 0. - if up.Folders.Retain > 0 { - query := fmt.Sprintf("'%s' in parents and %s != '%s'", - currentParentIdFolder, FieldMIME, MIMEFolder, - ) - list, _ := dr.Files.List().Q(query).Fields( - "files(id)", - "files(createdTime)", - //"files(name)", - ).Do() - for _, fl := range list.Files { - t, err := time.Parse(time.RFC3339, fl.CreatedTime) - if err != nil { - return fmt.Errorf("failed to parse createdTime of file %s: %s\n", fl.Id, err) - } - sinceCreate := math.Round(time.Since(t).Hours()) - if uint(sinceCreate) > ((up.Folders.Retain * 24) - 1) { - soonToBeDeletedFiles = append(soonToBeDeletedFiles, fl.Id) - } - } - } - - // 9. Index all files in Google Drive then compare it to soon to be uploaded files, ONLY upload - // files that do not exist yet. - var listOfFilesInCloud []string - query := fmt.Sprintf("'%s' in parents and %s != '%s'", - currentParentIdFolder, FieldMIME, MIMEFolder, - ) - list, _ := dr.Files.List().Q(query).Fields("files(name)").Do() - for _, fl := range list.Files { - listOfFilesInCloud = append(listOfFilesInCloud, fl.Name) - } - - // Then we are ready to upload the files to Google Drive's folder - for _, fl := range allFiles { - flInstance, err := os.Open(fl) - if err != nil { - return fmt.Errorf("failed to open file: %s with error: %s\n", fl, err) - } - defer flInstance.Close() - - // If this filename already exist in cloud then do not upload this file. ONLY upload filename - // that does not exist in the cloud yet. - fileName := filepath.Base(flInstance.Name()) - if !service.Contains(listOfFilesInCloud, fileName) { - fl := &drive.File{ - Parents: []string{currentParentIdFolder}, - Name: fileName, - } - uploadFl, err := dr.Files.Create(fl).Media(flInstance).Fields( - FieldId, FieldMIME, - FieldName, FieldParents, - ).Do() - if err != nil { - return fmt.Errorf("failed to upload file: %s with error: %s\n", uploadFl.Name, err) - } - } - } - - // 10. Lastly, delete all soon to be deleted files using their id - for _, filesToDelete := range soonToBeDeletedFiles { - if err := dr.Files.Delete(filesToDelete).Do(); err != nil { - return fmt.Errorf("failed to delete a file or a folder with id: %s and error: %s\n", filesToDelete, err) - } - } - } - } - } - } - // END- - return nil -} diff --git a/internal/provider/gdrive/google_drive.go b/internal/provider/gdrive/google_drive.go new file mode 100644 index 0000000..22c1755 --- /dev/null +++ b/internal/provider/gdrive/google_drive.go @@ -0,0 +1,115 @@ +package gdrive + +import ( + "fmt" + + "github.com/mdanialr/cron-upload/internal/provider" + "google.golang.org/api/drive/v3" + "google.golang.org/api/googleapi" +) + +// NewGoogleDriveProvider return provider that use Google Drive as the cloud provider. +func NewGoogleDriveProvider(svc *drive.FilesService) provider.Cloud { + return &googleDrive{ + svc: svc, + } +} + +type googleDrive struct { + svc *drive.FilesService +} + +func (g *googleDrive) GetFolders(parent ...string) ([]*provider.Payload, error) { + query := fmt.Sprintf("%s = '%s'", FieldMIME, MIMEFolder) + // add first parent as query if provided + if len(parent) > 0 { + query = fmt.Sprintf("%s and '%s' in parents", query, parent[0]) + } + + return g.queryList(query) +} + +func (g *googleDrive) CreateFolder(name string, parent ...string) (string, error) { + folder := &drive.File{ + Name: name, + MimeType: MIMEFolder, + } + // append the first parent if provided + if len(parent) > 0 { + folder.Parents = append(folder.Parents, parent[0]) + } + cr, err := g.svc.Create(folder).Do() + if err != nil { + return "", fmt.Errorf("failed to create a folder with name '%s': %s", name, err) + } + + return cr.Id, nil +} + +func (g *googleDrive) GetFiles(folderId string) ([]*provider.Payload, error) { + query := fmt.Sprintf("%s != '%s' and '%s' in parents", FieldMIME, MIMEFolder, folderId) + return g.queryList(query) +} + +func (g *googleDrive) UploadFile(payload *provider.Payload, chunkSize ...int) (*provider.Payload, error) { + defer payload.File.Close() + var uploadChunkSize int + var resPayload provider.Payload + + // set chunk size if provided + if len(chunkSize) > 0 { + uploadChunkSize = chunkSize[0] + } + + fl := drive.File{ + Name: payload.Name, + Parents: payload.Parent, + } + newFl, err := g.svc.Create(&fl).Media(payload.File, googleapi.ChunkSize(uploadChunkSize)).Do() + if err != nil { + return nil, fmt.Errorf("failed to upload file with name '%s': %s", payload.Name, err) + } + + resPayload.Id = newFl.Id + resPayload.Name = newFl.Name + resPayload.Parent = payload.Parent + + return &resPayload, nil +} + +func (g *googleDrive) Delete(id string) error { + if err := g.svc.Delete(id).Do(); err != nil { + return fmt.Errorf("failed to delete a file/folder with id '%s': %s", id, err) + } + return nil +} + +// queryList helper to do List API call using the provided string as the query. +func (g *googleDrive) queryList(query string) ([]*provider.Payload, error) { + ls, err := g.svc.List().Fields( + toField("files/"+FieldId), + toField("files/"+FieldName), + toField("files/"+FieldParents), + toField("files/"+FieldCreatedAt), + ).Q(query).Do() + if err != nil { + return nil, fmt.Errorf("failed to query for folder: %s", err) + } + + if ls != nil { + if len(ls.Files) > 0 { + var res []*provider.Payload + for _, fl := range ls.Files { + res = append(res, &provider.Payload{ + Id: fl.Id, + Name: fl.Name, + Parent: fl.Parents, + CreatedAt: fl.CreatedTime, + }) + } + return res, nil + } + } + + return nil, nil +} diff --git a/internal/provider/gdrive/helper.go b/internal/provider/gdrive/helper.go new file mode 100644 index 0000000..c8326b0 --- /dev/null +++ b/internal/provider/gdrive/helper.go @@ -0,0 +1,8 @@ +package gdrive + +import "google.golang.org/api/googleapi" + +// toField convert the given string type to googleapi.Field type. +func toField(field string) googleapi.Field { + return googleapi.Field(field) +} diff --git a/internal/provider/gdrive/init.go b/internal/provider/gdrive/init.go index 66da044..c668458 100644 --- a/internal/provider/gdrive/init.go +++ b/internal/provider/gdrive/init.go @@ -1,40 +1,35 @@ package gdrive import ( - "encoding/json" + "context" "fmt" - "net/http" + "log" "os" - "github.com/mdanialr/cron-upload/internal/config" - "github.com/mdanialr/cron-upload/internal/provider/gdrive/token" + "golang.org/x/oauth2" + "golang.org/x/oauth2/google" + "google.golang.org/api/drive/v3" + "google.golang.org/api/option" ) -// InitToken initialize token using auth.json file to retrieve token.json for authentication. -func InitToken(conf *config.Model, client *http.Client) error { - // 1. Prepare NewToken instance - newTokenI := token.NewToken{} - - // 2. Read auth.json and inject their values to NewToken instance - b, err := os.ReadFile(conf.Provider.Auth) +// Init return ready to use Google Apis client that use the given service +// account token path as the credential. +func Init(serviceTokenPath string) (*drive.FilesService, error) { + // init context and read the given token filepath at once + ctx := context.Background() + tk, _ := os.ReadFile(serviceTokenPath) + // create new Google Api credential based on the above token + token, err := google.CredentialsFromJSON(ctx, tk, drive.DriveScope) if err != nil { - return fmt.Errorf("failed to read auth.json file in: %s with error: %s\n", conf.Provider.Auth, err) - } - if err := json.Unmarshal(b, &newTokenI); err != nil { - return fmt.Errorf("failed to binding auth.json to NewToken model: %s\n", err) + return nil, fmt.Errorf("failed to init Google Drive client: %s", err) } - - newToken, err := newTokenI.RenewToken(client) + // create new http client along with the oauth token for Google Api call + cl := oauth2.NewClient(ctx, token.TokenSource) + // create new Google Drive client service + svc, err := drive.NewService(ctx, option.WithHTTPClient(cl)) if err != nil { - return fmt.Errorf("failed to renew token: %s\n", err) + log.Fatalln("failed to create drive service instance:", err) } - - // 3. Delete old token.json file - os.Remove(conf.Provider.Token) - // 4. Save newly retrieved token to token.json file - if err := token.SaveToken(conf.Provider.Token, newToken); err != nil { - return fmt.Errorf("failed to save new oauth2.Token instance to token.json file in: %s with error: %s\n", conf.Provider.Token, err) - } - - return nil + // return just the file service instead of drive service + return drive.NewFilesService(svc), nil } diff --git a/internal/provider/gdrive/init_test.go b/internal/provider/gdrive/init_test.go deleted file mode 100644 index 5721894..0000000 --- a/internal/provider/gdrive/init_test.go +++ /dev/null @@ -1,139 +0,0 @@ -package gdrive - -import ( - "encoding/json" - "github.com/mdanialr/cron-upload/internal/config" - "github.com/mdanialr/cron-upload/internal/provider/gdrive/token" - "github.com/stretchr/testify/require" - "net/http" - "net/http/httptest" - "os" - "testing" -) - -func TestInitToken(t *testing.T) { - // START PREPARE FAKE SERVER to MIMIC GOOGLE APIS SERVER - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - resp := struct { - AccessToken string `json:"access_token"` - TokenType string `json:"token_type"` - Expiry string `json:"expiry"` - }{AccessToken: "access", TokenType: "Bearer", Expiry: "0001-01-01T00:00:00Z"} - - js, _ := json.Marshal(&resp) - - w.WriteHeader(http.StatusOK) - w.Write(js) - })) - // DONE PREPARE FAKE SERVER to MIMIC GOOGLE APIS SERVER - cl := &http.Client{} - - t.Run("Should fail when using invalid or non exist auth.json file", func(t *testing.T) { - mod := &config.Model{Provider: config.Provider{Name: "cloud", Auth: "/fake/dir/auth.json"}} - - err := InitToken(mod, cl) - require.Error(t, err) - }) - - t.Run("Should fail when using valid json file but with mismatch structure", func(t *testing.T) { - // START PREPARE FAKE JSON FILE - fakeJsonFile := ` -{ - "key": "value" -} -` - fakeJsonPath := "/tmp/fake-auth.json" - f, err := os.OpenFile(fakeJsonPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600) - require.NoError(t, err) - defer f.Close() - json.NewEncoder(f).Encode(fakeJsonFile) - // DONE PREPARE FAKE JSON FILE - - mod := &config.Model{Provider: config.Provider{Name: "cloud", Auth: fakeJsonPath}} - - err = InitToken(mod, cl) - require.Error(t, err) - - os.Remove(fakeJsonPath) - }) - - t.Run("Should fail when using invalid or fake server host", func(t *testing.T) { - // START PREPARE FAKE JSON FILE - fakeJsonFile := token.NewToken{ - RefreshToken: "token", - ClientID: "client", - ClientSecret: "secret", - TokenUrl: "http://localhost", - } - fakeJsonPath := "/tmp/fake-auth.json" - f, err := os.OpenFile(fakeJsonPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600) - require.NoError(t, err) - defer f.Close() - json.NewEncoder(f).Encode(fakeJsonFile) - // DONE PREPARE FAKE JSON FILE - - mod := &config.Model{Provider: config.Provider{Name: "cloud", Auth: fakeJsonPath}} - - err = InitToken(mod, cl) - require.Error(t, err) - - os.Remove(fakeJsonPath) - }) - - t.Run("Should fail when using not exist file path to save new token file", func(t *testing.T) { - // START PREPARE FAKE JSON FILE - fakeJsonFile := token.NewToken{ - RefreshToken: "token", - ClientID: "client", - ClientSecret: "secret", - TokenUrl: server.URL, - } - fakeJsonPath := "/tmp/fake-auth.json" - f, err := os.OpenFile(fakeJsonPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600) - require.NoError(t, err) - defer f.Close() - json.NewEncoder(f).Encode(fakeJsonFile) - // DONE PREPARE FAKE JSON FILE - - mod := &config.Model{Provider: config.Provider{ - Name: "cloud", Auth: fakeJsonPath, - Token: "/fake/path/token.json", - }} - - err = InitToken(mod, server.Client()) - require.Error(t, err) - - os.Remove(fakeJsonPath) - }) - - t.Run("Should pass with valid values for server host, auth file & token file path", func(t *testing.T) { - // START PREPARE FAKE JSON FILE - fakeJsonFile := token.NewToken{ - RefreshToken: "token", - ClientID: "client", - ClientSecret: "secret", - TokenUrl: server.URL, - } - fakeJsonPath := "/tmp/fake-auth.json" - f, err := os.OpenFile(fakeJsonPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600) - require.NoError(t, err) - defer f.Close() - json.NewEncoder(f).Encode(fakeJsonFile) - // DONE PREPARE FAKE JSON FILE - - mod := &config.Model{Provider: config.Provider{ - Name: "cloud", Auth: fakeJsonPath, - Token: "/tmp/fake-token.json", - }} - - err = InitToken(mod, server.Client()) - require.NoError(t, err) - - os.Remove(fakeJsonPath) - os.Remove("/tmp/fake-token.json") - }) - - t.Cleanup(func() { - server.Close() - }) -} diff --git a/internal/provider/gdrive/refresh.go b/internal/provider/gdrive/refresh.go deleted file mode 100644 index 9671125..0000000 --- a/internal/provider/gdrive/refresh.go +++ /dev/null @@ -1,77 +0,0 @@ -package gdrive - -import ( - "context" - "fmt" - "os" - - "github.com/mdanialr/cron-upload/internal/config" - "github.com/mdanialr/cron-upload/internal/provider/gdrive/token" - "github.com/mdanialr/cron-upload/internal/service" - "golang.org/x/oauth2" - "golang.org/x/oauth2/google" - "google.golang.org/api/drive/v2" -) - -// Refresh exchange authorization code for new refresh token and save it to 'config.Provider.Auth'. -func Refresh(conf *config.Model) error { - // 1. Read Provider.Cred file - b, err := os.ReadFile(conf.Provider.Cred) - if err != nil { - return fmt.Errorf("failed to read Provider.Cred file: %s\n", err) - } - oConfig, err := google.ConfigFromJSON(b, drive.DriveScope) - if err != nil { - return fmt.Errorf("failed to create oauth2.Config instance from Provider.Cred file: %s", err) - } - - // 2. Get new token - tok, err := refreshToken(oConfig) - if err != nil { - return fmt.Errorf("failed to get new token from web: %s", err) - } - - // 3. Delete old token.json file - os.Remove(conf.Provider.Token) - // 4. Save newly retrieved token to token.json file - if err := token.SaveToken(conf.Provider.Token, tok); err != nil { - return fmt.Errorf("failed to save new oauth2.Token instance to token.json file in: %s with error: %s\n", conf.Provider.Token, err) - } - - // 5. Prepare NewToken instance then assign new refresh token to instance - newRefreshTokenI := token.NewToken{ - RefreshToken: tok.RefreshToken, - ClientID: oConfig.ClientID, - ClientSecret: oConfig.ClientSecret, - TokenUrl: oConfig.Endpoint.TokenURL, - } - - // 6. Delete old file Provider.Auth file - os.Remove(conf.Provider.Auth) - // 7. Save new Provider.Auth file - if err := token.SaveRefreshToken(conf.Provider.Auth, newRefreshTokenI); err != nil { - return fmt.Errorf("failed to save new Provider.Auth file: %s", err) - } - - return nil -} - -// refreshToken exchange authorization code for new refresh. -func refreshToken(config *oauth2.Config) (*oauth2.Token, error) { - msg := make(chan string) - go service.StartHTTPServer(msg) - config.RedirectURL = fmt.Sprintf("http://%s", service.ADDRESS) - - authURL := config.AuthCodeURL("state-token", oauth2.AccessTypeOffline, oauth2.ApprovalForce) - fmt.Printf("Go to the following link in your browser then type the authorization code: \n%v\n", authURL) - - authCode := <-msg - fmt.Println("[INFO] Grabbed authorization code:", authCode) - - tok, err := config.Exchange(context.TODO(), authCode) - if err != nil { - return nil, fmt.Errorf("failed to retrieve token from web: %v", err) - } - - return tok, nil -} diff --git a/internal/provider/gdrive/token/check_refresh_validity.go b/internal/provider/gdrive/token/check_refresh_validity.go deleted file mode 100644 index 431c097..0000000 --- a/internal/provider/gdrive/token/check_refresh_validity.go +++ /dev/null @@ -1,20 +0,0 @@ -package token - -import ( - "fmt" - "strings" - - "google.golang.org/api/drive/v3" -) - -// CheckRefreshValidity just testing using random request to make sure connection to google apis is successful and -// make sure the refresh token is still valid. Only return error if the error contain oauth2. -func CheckRefreshValidity(srv *drive.Service) error { - _, err := srv.About.Get().Do() - if err != nil { - if strings.Contains(err.Error(), "oauth2") { - return fmt.Errorf("expired refresh token: %s\n", strings.TrimSpace(err.Error())) - } - } - return nil -} diff --git a/internal/provider/gdrive/token/check_token_validity.go b/internal/provider/gdrive/token/check_token_validity.go deleted file mode 100644 index 7a84d17..0000000 --- a/internal/provider/gdrive/token/check_token_validity.go +++ /dev/null @@ -1,20 +0,0 @@ -package token - -import ( - "fmt" - "strings" - - "google.golang.org/api/drive/v3" -) - -// CheckTokenValidity just testing using random request to make sure connection to google apis is successful and -// make sure the token is still valid. Only return error if the error contain authError. -func CheckTokenValidity(srv *drive.Service) error { - _, err := srv.About.Get().Do() - if err != nil { - if strings.Contains(err.Error(), "authError") { - return fmt.Errorf("invalid token: %s\n", strings.TrimSpace(err.Error())) - } - } - return nil -} diff --git a/internal/provider/gdrive/token/load_token.go b/internal/provider/gdrive/token/load_token.go deleted file mode 100644 index 4589b7b..0000000 --- a/internal/provider/gdrive/token/load_token.go +++ /dev/null @@ -1,21 +0,0 @@ -package token - -import ( - "encoding/json" - "os" - - "golang.org/x/oauth2" -) - -// LoadToken retrieves an oauth2 token from a local file. -func LoadToken(file string) (*oauth2.Token, error) { - f, err := os.Open(file) - if err != nil { - return nil, err - } - defer f.Close() - - var tok *oauth2.Token - err = json.NewDecoder(f).Decode(&tok) - return tok, err -} diff --git a/internal/provider/gdrive/token/load_token_test.go b/internal/provider/gdrive/token/load_token_test.go deleted file mode 100644 index 80cc7b4..0000000 --- a/internal/provider/gdrive/token/load_token_test.go +++ /dev/null @@ -1,31 +0,0 @@ -package token - -import ( - "os" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestLoadToken(t *testing.T) { - const tokenPath = "/tmp/token-test.json" - - // prepare sample file auth.json in /tmp - b := []byte(`{"key":"value"}`) - require.NoError(t, os.WriteFile(tokenPath, b, 0644)) - - t.Run("Should be success because loading token file that exist and has enough permission", func(t *testing.T) { - _, err := LoadToken(tokenPath) - require.NoError(t, err) - }) - - t.Run("Should error because loading token file that does not exist", func(t *testing.T) { - _, err := LoadToken("/fake/path/auth.json") - require.Error(t, err) - }) - - // cleanup and remove /tmp/auth.json file - t.Cleanup(func() { - os.Remove(tokenPath) - }) -} diff --git a/internal/provider/gdrive/token/renew_token.go b/internal/provider/gdrive/token/renew_token.go deleted file mode 100644 index 37cd20f..0000000 --- a/internal/provider/gdrive/token/renew_token.go +++ /dev/null @@ -1,46 +0,0 @@ -package token - -import ( - "encoding/json" - "fmt" - "io" - "net/http" - "net/url" - - "golang.org/x/oauth2" -) - -// NewToken necessary format to request new token. -type NewToken struct { - ClientID string `json:"client_id"` // oauth2 client id. - ClientSecret string `json:"client_secret"` // oauth2 client secret. - TokenUrl string `json:"token_uri"` // token url where the new token request would be sent. Should exist in credentials.json file. - RefreshToken string `json:"refresh"` // refresh token for exchanging new token to google apis. -} - -// RenewToken get new token from google apis. -func (n *NewToken) RenewToken(cl *http.Client) (*oauth2.Token, error) { - urlValue := url.Values{ - "client_id": {n.ClientID}, - "client_secret": {n.ClientSecret}, - "refresh_token": {n.RefreshToken}, "grant_type": {"refresh_token"}, - } - - resp, err := cl.PostForm(n.TokenUrl, urlValue) - if err != nil { - return nil, fmt.Errorf("failed sent POST request to renew token: %s\n", err) - } - - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response body after sent POST request: %s\n", err) - } - defer resp.Body.Close() - - var newToken *oauth2.Token - if err := json.Unmarshal(body, &newToken); err != nil { - return nil, fmt.Errorf("failed to bind response body to oauth2.Token model: %s\n", err) - } - - return newToken, nil -} diff --git a/internal/provider/gdrive/token/renew_token_test.go b/internal/provider/gdrive/token/renew_token_test.go deleted file mode 100644 index 4dbc96f..0000000 --- a/internal/provider/gdrive/token/renew_token_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package token - -import ( - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNewToken_RenewToken(t *testing.T) { - nToken := NewToken{ - TokenUrl: "/apis/token", - ClientID: "client", - ClientSecret: "secret", - RefreshToken: "asd", - } - - // prepare fake server to mimic Google apis - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - req.ParseForm() - - clID := req.PostForm.Get("client_id") - clSecret := req.PostForm.Get("client_secret") - rToken := req.PostForm.Get("refresh_token") - gType := req.PostForm.Get("grant_type") - - assert.Equal(t, nToken.ClientID, clID) - assert.Equal(t, nToken.ClientSecret, clSecret) - assert.Equal(t, nToken.RefreshToken, rToken) - assert.Equal(t, "refresh_token", gType) - - response := struct { - AccessToken string `json:"access_token"` - TokenType string `json:"token_type"` - }{AccessToken: "token", TokenType: "bearer"} - - b, err := json.Marshal(&response) - require.NoError(t, err, "failed to marshal json response") - - rw.WriteHeader(http.StatusOK) - rw.Write(b) - })) - - t.Run("Should error because using wrong or invalid server url", func(t *testing.T) { - _, err := nToken.RenewToken(server.Client()) - require.Error(t, err) - }) - - t.Run("Should pass because using the right server url", func(t *testing.T) { - nToken.TokenUrl = server.URL - _, err := nToken.RenewToken(server.Client()) - require.NoError(t, err) - }) - - // prepare fake server to mimic Google apis - server2 := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - rw.WriteHeader(http.StatusOK) - rw.Header().Set("content-type", "applications/xml") - })) - - t.Run("Should error because the fake server return non json response body", func(t *testing.T) { - nToken.TokenUrl = server2.URL - _, err := nToken.RenewToken(server2.Client()) - require.Error(t, err) - }) - - // cleanup test env - t.Cleanup(func() { - server.Close() - server2.Close() - }) -} diff --git a/internal/provider/gdrive/token/save_refresh.go b/internal/provider/gdrive/token/save_refresh.go deleted file mode 100644 index aaefdf7..0000000 --- a/internal/provider/gdrive/token/save_refresh.go +++ /dev/null @@ -1,18 +0,0 @@ -package token - -import ( - "encoding/json" - "fmt" - "os" -) - -// SaveRefreshToken Saves a Provider.Auth to a file path. -func SaveRefreshToken(path string, auth NewToken) error { - f, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600) - if err != nil { - return fmt.Errorf("failed to write Provider.Auth to path: %s\n", err) - } - defer f.Close() - - return json.NewEncoder(f).Encode(auth) -} diff --git a/internal/provider/gdrive/token/save_refresh_test.go b/internal/provider/gdrive/token/save_refresh_test.go deleted file mode 100644 index ddad157..0000000 --- a/internal/provider/gdrive/token/save_refresh_test.go +++ /dev/null @@ -1,48 +0,0 @@ -package token - -import ( - "os" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestSaveRefreshToken(t *testing.T) { - var sample = NewToken{ - TokenUrl: "https://example.com/oauth", RefreshToken: "refresh", - ClientSecret: "secret", ClientID: "client", - } - - testCases := []struct { - name string - samplePath string - wantErr bool - }{ - { - name: "Should pass when using correct and accessible file path", - samplePath: "/tmp/test-credentials.json", - }, - { - name: "Should fail when using invalid and or inaccessible file path", - samplePath: "/fake/path/test-credentials.json", - wantErr: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - switch tc.wantErr { - case false: - require.NoError(t, SaveRefreshToken(tc.samplePath, sample)) - case true: - require.Error(t, SaveRefreshToken(tc.samplePath, sample)) - } - }) - } - - t.Cleanup(func() { - for _, tc := range testCases { - os.Remove(tc.samplePath) - } - }) -} diff --git a/internal/provider/gdrive/token/save_token.go b/internal/provider/gdrive/token/save_token.go deleted file mode 100644 index c84ee0a..0000000 --- a/internal/provider/gdrive/token/save_token.go +++ /dev/null @@ -1,21 +0,0 @@ -package token - -import ( - "encoding/json" - "fmt" - "os" - - "golang.org/x/oauth2" -) - -// SaveToken Saves a token to a file path. -func SaveToken(path string, token *oauth2.Token) error { - fmt.Println("Saving credential file to:", path) - f, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600) - if err != nil { - return fmt.Errorf("failed to write oauth2.Token to path: %s\n", err) - } - defer f.Close() - - return json.NewEncoder(f).Encode(token) -} diff --git a/internal/provider/gdrive/token/save_token_test.go b/internal/provider/gdrive/token/save_token_test.go deleted file mode 100644 index 5922a3c..0000000 --- a/internal/provider/gdrive/token/save_token_test.go +++ /dev/null @@ -1,48 +0,0 @@ -package token - -import ( - "os" - "testing" - - "github.com/stretchr/testify/require" - "golang.org/x/oauth2" -) - -func TestSaveToken(t *testing.T) { - testCases := []struct { - name string - sample *oauth2.Token - samplePath string - wantErr bool - }{ - { - name: "Should pass because using correct and accessible file path", - sample: &oauth2.Token{RefreshToken: "refresh", AccessToken: "access", TokenType: "bearer"}, - samplePath: "/tmp/auth.json", - }, - { - name: "Should error because using invalid and not enough permission file path", - sample: &oauth2.Token{RefreshToken: "refresh", AccessToken: "access", TokenType: "bearer"}, - samplePath: "/fake/path/auth.json", - wantErr: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - switch tc.wantErr { - case false: - require.NoError(t, SaveToken(tc.samplePath, tc.sample)) - case true: - require.Error(t, SaveToken(tc.samplePath, tc.sample)) - } - }) - } - - // cleanup and remove /tmp/auth.json - t.Cleanup(func() { - for _, tc := range testCases { - os.Remove(tc.samplePath) - } - }) -} diff --git a/internal/provider/helper.go b/internal/provider/helper.go new file mode 100644 index 0000000..cce419b --- /dev/null +++ b/internal/provider/helper.go @@ -0,0 +1,52 @@ +package provider + +import ( + "fmt" + "os" + "path/filepath" + "strings" +) + +// LookupRoute recursively lookup parent's name for the given route in the +// payloads stack. +func LookupRoute(payloads []*Payload, route string) string { + for _, payload := range payloads { + firstPart := strings.Split(route, "/")[0] + if payload.Name == firstPart { + if len(payload.Parent) > 0 { + r := LookupRouteName(payloads, payload.Parent[0]) // search for the parent's route name + return LookupRoute(payloads, fmt.Sprintf("%s/%s", r, route)) // recursively search for the parents until it has no parents + } + } + } + return route +} + +// LookupRouteName search for matched id in the payloads stack from the given +// id. +func LookupRouteName(payloads []*Payload, id string) string { + for _, payload := range payloads { + if payload.Id == id { + return payload.Name + } + } + return "" +} + +// NewWithFile return new payload instance along with io.ReadCloser for File +// field from the given filepath. Also add the given parents to Parents field. +// return error if failed to read the given filepath. +func NewWithFile(filePath string, parents ...string) (*Payload, error) { + // try open the given filepath and return error if any + flInstance, err := os.Open(filePath) + if err != nil { + return nil, fmt.Errorf("failed to open file path: %s", err) + } + // create new payload along with the file reader + payload := Payload{ + Name: filepath.Base(flInstance.Name()), + Parent: parents, + File: flInstance, + } + return &payload, nil +} diff --git a/internal/provider/helper_test.go b/internal/provider/helper_test.go new file mode 100644 index 0000000..80e311e --- /dev/null +++ b/internal/provider/helper_test.go @@ -0,0 +1,92 @@ +package provider_test + +import ( + "testing" + + "github.com/mdanialr/cron-upload/internal/provider" + "github.com/stretchr/testify/assert" +) + +var samplePayloads = []*provider.Payload{ + { + Id: "12", + Name: "Root", + Parent: []string{}, + }, + { + Id: "2", + Name: "DB", + Parent: []string{"12"}, + }, + { + Id: "3", + Name: "app", + Parent: []string{"12"}, + }, + { + Id: "24", + Name: "sample", + Parent: []string{"3"}, + }, +} + +func TestLookupRoute(t *testing.T) { + testCases := []struct { + name string + sampleRoute string + expect string + }{ + { + name: "Given route 'Root' without parent should return 'Root' without any added route", + sampleRoute: "Root", + expect: "Root", + }, + { + name: "Given route that does not exist yet in the stack should return just like route without parent", + sampleRoute: "Backup", + expect: "Backup", + }, + { + name: "Given route 'DB' that has single parent 'Root' should return 'Root/DB'", + sampleRoute: "DB", + expect: "Root/DB", + }, + { + name: "Given route 'sample' that has nested parent should return 'Root/app/sample'", + sampleRoute: "sample", + expect: "Root/app/sample", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + res := provider.LookupRoute(samplePayloads, tc.sampleRoute) + assert.Equal(t, tc.expect, res) + }) + } +} + +func TestLookupRouteName(t *testing.T) { + testCases := []struct { + name string + sample string + expect string + }{ + { + name: "Given want to search route id '3' and exist in stack should return 'app'", + sample: "3", + expect: "app", + }, + { + name: "Given want to search route id '5' and does not exist in stack should return empty string", + sample: "5", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + res := provider.LookupRouteName(samplePayloads, tc.sample) + assert.Equal(t, tc.expect, res) + }) + } +} diff --git a/internal/provider/pcloud/clear_trash.go b/internal/provider/pcloud/clear_trash.go deleted file mode 100644 index 027abd2..0000000 --- a/internal/provider/pcloud/clear_trash.go +++ /dev/null @@ -1,11 +0,0 @@ -package pcloud - -import "net/url" - -// GetClearTrashUrl generate url that could be used for clearing trash. -func GetClearTrashUrl(a string) string { - val := url.Values{"auth": {a}, "folderid": {"0"}} - uri := url.URL{Scheme: Scheme, Host: EndPoint, Path: ClearTrash, RawQuery: val.Encode()} - - return uri.String() -} diff --git a/internal/provider/pcloud/clear_trash_test.go b/internal/provider/pcloud/clear_trash_test.go deleted file mode 100644 index f6bfd9d..0000000 --- a/internal/provider/pcloud/clear_trash_test.go +++ /dev/null @@ -1,33 +0,0 @@ -package pcloud - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGetClearTrashUrl(t *testing.T) { - testCases := []struct { - name string - sample string - expect string - }{ - { - name: "Should has only one return's path", - sample: "token", - expect: "https://eapi.pcloud.com/trash_clear?auth=token&folderid=0", - }, - { - name: "Should return url value as expected", - sample: "secret", - expect: "https://eapi.pcloud.com/trash_clear?auth=secret&folderid=0", - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - out := GetClearTrashUrl(tc.sample) - assert.Equal(t, tc.expect, out) - }) - } -} diff --git a/internal/provider/pcloud/create_folder.go b/internal/provider/pcloud/create_folder.go deleted file mode 100644 index db79a40..0000000 --- a/internal/provider/pcloud/create_folder.go +++ /dev/null @@ -1,30 +0,0 @@ -package pcloud - -import ( - "fmt" - "net/url" -) - -// CreateFolderResponse standard response from pCloud API call for creating folder. -type CreateFolderResponse struct { - Result int `json:"result"` // non 0 result is errors. - IsCreated bool `json:"created"` // Whether this folder already created or not before this API call requested. - Metadata CreateFolderMetaResponse `json:"metadata"` // Contains metadata about this folder. -} - -// CreateFolderMetaResponse additional metadata for this folder. -type CreateFolderMetaResponse struct { - Path string `json:"path"` // Full path for this folder. - Name string `json:"name"` // Name of this folder. - Id int `json:"folderid"` // Id of this folder. This is so important for uploading files. -} - -// GetCreateFolderUrl generate url that could be used to create folder from the -// given path value. -func GetCreateFolderUrl(a string, p string) string { - val := url.Values{"auth": {a}} - uri := url.URL{Scheme: Scheme, Host: EndPoint, Path: CreateFolder, RawQuery: val.Encode()} - - res := fmt.Sprintf("%s&path=%s", uri.String(), p) - return res -} diff --git a/internal/provider/pcloud/create_folder_test.go b/internal/provider/pcloud/create_folder_test.go deleted file mode 100644 index 39667a9..0000000 --- a/internal/provider/pcloud/create_folder_test.go +++ /dev/null @@ -1,34 +0,0 @@ -package pcloud - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGetCreateFolderUrl(t *testing.T) { - const TOKEN = "token" - testCases := []struct { - name string - sample string - expect string - }{ - { - name: "Should has only one result's path", - sample: "/vps", - expect: "https://eapi.pcloud.com/createfolderifnotexists?auth=token&path=/vps", - }, - { - name: "Should return url as expected", - sample: "/vps/backup", - expect: "https://eapi.pcloud.com/createfolderifnotexists?auth=token&path=/vps/backup", - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - out := GetCreateFolderUrl(TOKEN, tc.sample) - assert.Equal(t, tc.expect, out) - }) - } -} diff --git a/internal/provider/pcloud/delete_file.go b/internal/provider/pcloud/delete_file.go deleted file mode 100644 index e158333..0000000 --- a/internal/provider/pcloud/delete_file.go +++ /dev/null @@ -1,23 +0,0 @@ -package pcloud - -import "net/url" - -// DeleteFileResponse standard response from pCloud API after sending delete file request. -type DeleteFileResponse struct { - Result int `json:"result"` // Should be non-0 value if there are any errors. - Meta DeleteFileMeta `json:"metadata"` // Metadata contain deleted file. -} - -// DeleteFileMeta all metadata contain deleted file. -type DeleteFileMeta struct { - IsDeleted bool `json:"isdeleted"` // Status boolean that should be true if the file successfully deleted. - Name string `json:"name"` // The name of the deleted file. -} - -// GetDeleteFileUrl generate url that could be used to delete a file from the given fileId. -func GetDeleteFileUrl(a, fileId string) string { - val := url.Values{"auth": {a}, "fileid": {fileId}} - uri := url.URL{Scheme: Scheme, Host: EndPoint, Path: DeleteFile, RawQuery: val.Encode()} - - return uri.String() -} diff --git a/internal/provider/pcloud/delete_file_test.go b/internal/provider/pcloud/delete_file_test.go deleted file mode 100644 index b476fb8..0000000 --- a/internal/provider/pcloud/delete_file_test.go +++ /dev/null @@ -1,34 +0,0 @@ -package pcloud - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGetDeleteFileUrl(t *testing.T) { - const TOKEN = "token" - testCases := []struct { - name string - sample string - expect string - }{ - { - name: "Should has only one return's path", - sample: "1234567890", - expect: "https://eapi.pcloud.com/deletefile?auth=token&fileid=1234567890", - }, - { - name: "Should return url as expected", - sample: "0987654321", - expect: "https://eapi.pcloud.com/deletefile?auth=token&fileid=0987654321", - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - out := GetDeleteFileUrl(TOKEN, tc.sample) - assert.Equal(t, tc.expect, out) - }) - } -} diff --git a/internal/provider/pcloud/digest.go b/internal/provider/pcloud/digest.go deleted file mode 100644 index d02de28..0000000 --- a/internal/provider/pcloud/digest.go +++ /dev/null @@ -1,14 +0,0 @@ -package pcloud - -import "net/url" - -// DigestResponse holds standard response when requesting digest from pCloud API. -type DigestResponse struct { - Digest string `json:"digest"` // the digest that only valid for 30s. -} - -// GetDigestUrl generate url that could be used to request digest from API. -func GetDigestUrl() string { - val := url.URL{Scheme: Scheme, Host: EndPoint, Path: GetDigest} - return val.String() -} diff --git a/internal/provider/pcloud/digest_test.go b/internal/provider/pcloud/digest_test.go deleted file mode 100644 index bbf990c..0000000 --- a/internal/provider/pcloud/digest_test.go +++ /dev/null @@ -1,14 +0,0 @@ -package pcloud - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGetDigestUrl(t *testing.T) { - t.Run("Should be only one path that always generate same result", func(t *testing.T) { - expect := "https://eapi.pcloud.com/getdigest" - assert.Equal(t, expect, GetDigestUrl()) - }) -} diff --git a/internal/provider/pcloud/login.go b/internal/provider/pcloud/login.go deleted file mode 100644 index 7fee3c9..0000000 --- a/internal/provider/pcloud/login.go +++ /dev/null @@ -1,40 +0,0 @@ -package pcloud - -import ( - "crypto/sha1" - "encoding/hex" - "net/url" - "strings" -) - -// User holds credential for authentication. -type User struct { - Username string // An email. - Password string // Plain password. - Token string // Would be populated later. -} - -// TokenResponse json response from pCloud API after sending request to generate token. -type TokenResponse struct { - Auth string `json:"auth"` // Generated token from pCloud API. -} - -// GenerateTokenUrl Generate url that could be used to get a token for authentication. -func (u *User) GenerateTokenUrl(digest string) string { - // https://docs.pcloud.com/methods/intro/authentication.html - // sha1( password + sha1( lowercase of username ) + digest) - userHash := sha1.Sum([]byte(strings.ToLower(u.Username))) - userDig := hex.EncodeToString(userHash[:]) - digHash := sha1.Sum([]byte(u.Password + userDig + digest)) - dig := hex.EncodeToString(digHash[:]) - - val := url.Values{ - "getauth": {"1"}, - "username": {u.Username}, - "digest": {digest}, - "passworddigest": {dig}, - } - uri := url.URL{Scheme: Scheme, Host: EndPoint, Path: Login, RawQuery: val.Encode()} - - return uri.String() -} diff --git a/internal/provider/pcloud/login_test.go b/internal/provider/pcloud/login_test.go deleted file mode 100644 index 0004b1f..0000000 --- a/internal/provider/pcloud/login_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package pcloud - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestUser_GenerateTokenUrl(t *testing.T) { - testCases := []struct { - name string - sample User - digest string - expect string - }{ - { - name: "1# Should pass", - sample: User{Username: "user", Password: "pass"}, - digest: "digest", - expect: "https://eapi.pcloud.com/userinfo?digest=digest&getauth=1&passworddigest=55f3b3ce9973588105765465d3a8b45613d426a3&username=user", - }, - { - name: "2# Should pass", - sample: User{Username: "admin", Password: "admin"}, - digest: "digest", - expect: "https://eapi.pcloud.com/userinfo?digest=digest&getauth=1&passworddigest=37fb256ed24ce6677dc53fdd9aabe412fe35e248&username=admin", - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - out := tc.sample.GenerateTokenUrl(tc.digest) - assert.Equal(t, tc.expect, out) - }) - } -} diff --git a/internal/provider/pcloud/logout.go b/internal/provider/pcloud/logout.go deleted file mode 100644 index 0b8c315..0000000 --- a/internal/provider/pcloud/logout.go +++ /dev/null @@ -1,16 +0,0 @@ -package pcloud - -import "net/url" - -// LogoutResponse json response from pCloud API after sending request to log out and invalidate token. -type LogoutResponse struct { - IsDeleted bool `json:"auth_deleted"` // Determine whether token invalidation was successful or not. -} - -// GetLogoutUrl generate url that could be used to log out meaning remove the given token. -func GetLogoutUrl(a string) string { - val := url.Values{"auth": {a}} - uri := url.URL{Scheme: Scheme, Host: EndPoint, Path: Logout, RawQuery: val.Encode()} - - return uri.String() -} diff --git a/internal/provider/pcloud/logout_test.go b/internal/provider/pcloud/logout_test.go deleted file mode 100644 index 7e4a2a4..0000000 --- a/internal/provider/pcloud/logout_test.go +++ /dev/null @@ -1,33 +0,0 @@ -package pcloud - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGetLogoutUrl(t *testing.T) { - testCases := []struct { - name string - sample string - expect string - }{ - { - name: "Should has only one result's path", - sample: "token", - expect: "https://eapi.pcloud.com/logout?auth=token", - }, - { - name: "Should return url that has exact value as expected", - sample: "secret", - expect: "https://eapi.pcloud.com/logout?auth=secret", - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - out := GetLogoutUrl(tc.sample) - assert.Equal(t, tc.expect, out) - }) - } -} diff --git a/internal/provider/pcloud/pcloud.go b/internal/provider/pcloud/pcloud.go deleted file mode 100644 index c034a79..0000000 --- a/internal/provider/pcloud/pcloud.go +++ /dev/null @@ -1,26 +0,0 @@ -package pcloud - -const ( - Scheme = "https" - EndPoint = "eapi.pcloud.com" // Because registered user in Europe region. If in America use api.pcloud.com instead. - GetDigest = "getdigest" // Returns a digest for digest authentication. Digests are valid for 30 seconds. - Login = "userinfo" // Generate token. - Logout = "logout" // Gets a token and invalidates it (Delete a token). - UploadFile = "uploadfile" // Upload a file. - DeleteFile = "deletefile" // Delete a file from the given file id. - TOKENS = "listtokens" // Get a list with the currently active tokens associated with the current user. - CreateFolder = "createfolderifnotexists" // Creates a folder if the folder doesn't exist or returns the existing folder's metadata. - LIST_FOLDERS = "listfolder" // Receive data for a folder. - DELETE_FOLDER = "deletefolder" // Deletes a folder. The folder must be empty. - DELETE_FOLDERS_RECURSIVE = "deletefolderrecursive" // This function deletes files, directories, and removes sharing. Use with extreme care. - ClearTrash = "trash_clear" // Clearing trash. -) - -// StdResponse standard response from pCloud API that always return 'return' that determine -// whether API call is success or failure. Response with non '0' return value are errors. -type StdResponse struct { - Result int `json:"result"` // non 0 result is errors. - DigestResponse // Used by digest. - TokenResponse // Used by login API calls to get token for authentication. - LogoutResponse // Used by logout API calls to invalidate token. -} diff --git a/internal/provider/pcloud/print_quota.go b/internal/provider/pcloud/print_quota.go deleted file mode 100644 index 208d799..0000000 --- a/internal/provider/pcloud/print_quota.go +++ /dev/null @@ -1,20 +0,0 @@ -package pcloud - -import "net/url" - -// QuotaResponse holds standard response from pCloud API to check available & used -// storage quota. -type QuotaResponse struct { - Result int `json:"result"` // non 0 result is errors. - Quota int64 `json:"quota"` // Available quota. In bytes. - UsedQuota int64 `json:"usedquota"` // Used quota. In bytes. -} - -// GetQuotaUrl generate url that could be used to get info regarding available -// and used storage quota. -func GetQuotaUrl(a string) string { - val := url.Values{"auth": {a}} - uri := url.URL{Scheme: Scheme, Host: EndPoint, Path: Login, RawQuery: val.Encode()} - - return uri.String() -} diff --git a/internal/provider/pcloud/print_quota_test.go b/internal/provider/pcloud/print_quota_test.go deleted file mode 100644 index 61282ff..0000000 --- a/internal/provider/pcloud/print_quota_test.go +++ /dev/null @@ -1,33 +0,0 @@ -package pcloud - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGetQuotaUrl(t *testing.T) { - testCases := []struct { - name string - sample string - expect string - }{ - { - name: "Should has only one result's path", - sample: "token", - expect: "https://eapi.pcloud.com/userinfo?auth=token", - }, - { - name: "Should return url value as expected", - sample: "secret", - expect: "https://eapi.pcloud.com/userinfo?auth=secret", - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - out := GetQuotaUrl(tc.sample) - assert.Equal(t, tc.expect, out) - }) - } -} diff --git a/internal/provider/pcloud/upload_file.go b/internal/provider/pcloud/upload_file.go deleted file mode 100644 index da54b57..0000000 --- a/internal/provider/pcloud/upload_file.go +++ /dev/null @@ -1,11 +0,0 @@ -package pcloud - -import "net/url" - -// GetUploadFileUrl generate url that could be used to upload a file to pCLoud. -func GetUploadFileUrl(a string, folderId string) string { - val := url.Values{"auth": {a}, "folderid": {folderId}, "nopartial": {"1"}, "renameifexists": {"1"}} - uri := url.URL{Scheme: Scheme, Host: EndPoint, Path: UploadFile, RawQuery: val.Encode()} - - return uri.String() -} diff --git a/internal/provider/pcloud/upload_file_test.go b/internal/provider/pcloud/upload_file_test.go deleted file mode 100644 index 34eaf2d..0000000 --- a/internal/provider/pcloud/upload_file_test.go +++ /dev/null @@ -1,34 +0,0 @@ -package pcloud - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGetUploadFileUrl(t *testing.T) { - const TOKEN = "token" - testCases := []struct { - name string - sample string - expect string - }{ - { - name: "Should has only one return's path", - sample: "1234567890", - expect: "https://eapi.pcloud.com/uploadfile?auth=token&folderid=1234567890&nopartial=1&renameifexists=1", - }, - { - name: "Should return url as expected", - sample: "0987654321", - expect: "https://eapi.pcloud.com/uploadfile?auth=token&folderid=0987654321&nopartial=1&renameifexists=1", - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - out := GetUploadFileUrl(TOKEN, tc.sample) - assert.Equal(t, tc.expect, out) - }) - } -} diff --git a/internal/provider/provider.go b/internal/provider/provider.go new file mode 100644 index 0000000..47881bc --- /dev/null +++ b/internal/provider/provider.go @@ -0,0 +1,65 @@ +package provider + +import ( + "fmt" + "io" + + "golang.org/x/exp/slices" +) + +type cloud string + +var ( + // GoogleDrive one of the supported cloud provider. + GoogleDrive cloud = "drive" + // Support contain all supported cloud providers. + Support []cloud +) + +func init() { + Support = append(Support, GoogleDrive) // support for Google Drive +} + +// Payload common data structure that's used by provider. +type Payload struct { + Name string // Name can be a file or folder name. + Id string // Id can be a file or folder id. + Parent []string // Parent the parent folder where this file/folder is in. + CreatedAt string // CreatedAt the time at which the file was created (RFC-3339 date-time) + File io.ReadCloser // File can be a file reader that can be used to upload. +} + +// Cloud provider interface. Every supported/implemented cloud storage +// provider should use this interface as the guideline. +type Cloud interface { + // GetFolders retrieve all folders from provider. Optionally Use the + // first given parent as the folder's parent. Does not return error + // if no data found but return error if the API call failed. + GetFolders(parent ...string) ([]*Payload, error) + // CreateFolder create new folder by the given name as the folder name + // and create it inside the provided first parent id. Return the id of + // the newly created folder if success. + CreateFolder(name string, parent ...string) (string, error) + // GetFiles retrieve all non-folder data for the given folder id. + // Does not return error if no data found but return error if the + // API call failed. + GetFiles(folderId string) ([]*Payload, error) + // UploadFile upload the given payload and optionally set the upload chunk + // size to reduce memory allocation but may slow down upload duration. + // Name field as the file name, Parent field as the folder id where this + // file will be uploaded, File field is the binary data of the file. Should + // defer close the File field. + UploadFile(payload *Payload, chunkSize ...int) (*Payload, error) + // Delete do delete the given id. id can be either the id of a file + // or folder. + Delete(id string) error +} + +// ValidateSupportedClouds make sure the provided provider name is currently +// supported. +func ValidateSupportedClouds(providerName string) error { + if !slices.Contains(Support, cloud(providerName)) { + return fmt.Errorf("the given provider name is not supported at this moment. currently support: %s", Support) + } + return nil +} diff --git a/internal/provider/provider_test.go b/internal/provider/provider_test.go new file mode 100644 index 0000000..f1dc999 --- /dev/null +++ b/internal/provider/provider_test.go @@ -0,0 +1,40 @@ +package provider_test + +import ( + "testing" + + "github.com/mdanialr/cron-upload/internal/provider" + "github.com/stretchr/testify/assert" +) + +func TestValidateSupportedClouds(t *testing.T) { + testCases := []struct { + name string + sample string + wantErr bool + containErrMsg string + }{ + { + name: "Add support for Google Drive as 'drive'", + sample: "drive", + }, + { + name: "Give error for unsupported cloud provider and contain error message 'is not supported'", + sample: "s3", + wantErr: true, + containErrMsg: "is not supported", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := provider.ValidateSupportedClouds(tc.sample) + if tc.wantErr { + assert.Error(t, err) + assert.ErrorContains(t, err, tc.containErrMsg) + return + } + assert.NoError(t, err) + }) + } +} diff --git a/internal/service/byte_to_any_bit.go b/internal/service/byte_to_any_bit.go deleted file mode 100644 index 7d2bde3..0000000 --- a/internal/service/byte_to_any_bit.go +++ /dev/null @@ -1,22 +0,0 @@ -package service - -import "fmt" - -// BytesToAnyBit convert bytes to any bytes using bit (1024) (Kibibyte, Mebibyte, Gibibyte). -func BytesToAnyBit(b int64, unit string) (string, error) { - var conversion int64 = 1024 - - switch unit { - case "Kb": - return fmt.Sprintf("%dKb", b/conversion), nil - case "Mb": - conversion = conversion * 1024 - return fmt.Sprintf("%dMb", b/conversion), nil - case "Gb": - conversion = conversion * 1024 - conversion = conversion * 1024 - return fmt.Sprintf("%dGb", b/conversion), nil - } - - return "", fmt.Errorf("unit is not supported") -} diff --git a/internal/service/byte_to_any_bit_test.go b/internal/service/byte_to_any_bit_test.go deleted file mode 100644 index 1e4ff73..0000000 --- a/internal/service/byte_to_any_bit_test.go +++ /dev/null @@ -1,49 +0,0 @@ -package service - -import ( - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "testing" -) - -func TestBytesToAnyBit(t *testing.T) { - testCases := []struct { - name string - sample int64 - unit string - expect string - wantErr bool - }{ - { - name: "Should return 6Gb when using the equivalent of the bytes", - sample: 6442450944, unit: "Gb", expect: "6Gb", - }, - { - name: "Should return 6Mb when using the equivalent of the bytes", - sample: 6291456, unit: "Mb", expect: "6Mb", - }, - { - name: "Should return 6Kb when using the equivalent of the bytes", - sample: 6144, unit: "Kb", expect: "6Kb", - }, - { - name: "Should error when using unit that does not supported", - sample: 1024, unit: "Tb", - wantErr: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - out, err := BytesToAnyBit(tc.sample, tc.unit) - - switch tc.wantErr { - case false: - require.NoError(t, err) - assert.Equal(t, tc.expect, out) - case true: - require.Error(t, err) - } - }) - } -} diff --git a/internal/service/contain_string.go b/internal/service/contain_string.go deleted file mode 100644 index 4f3a022..0000000 --- a/internal/service/contain_string.go +++ /dev/null @@ -1,11 +0,0 @@ -package service - -// Contains tells whether b exist in a. -func Contains(a []string, b string) bool { - for _, n := range a { - if b == n { - return true - } - } - return false -} diff --git a/internal/service/contain_string_test.go b/internal/service/contain_string_test.go deleted file mode 100644 index 4108d40..0000000 --- a/internal/service/contain_string_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package service - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestContains(t *testing.T) { - testCases := []struct { - name string - sample []string - check string - expect bool - }{ - { - name: "Should true if `sample` contains string `check`", - sample: []string{"one.zip", "two.txt", "three.mp4"}, - check: "two.txt", - expect: true, - }, - { - name: "Should false if `sample` does not contains string `check`", - sample: []string{"one.zip", "two.txt", "three.mp4"}, - check: "one.txt", - expect: false, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - out := Contains(tc.sample, tc.check) - assert.Equal(t, tc.expect, out) - }) - } -} diff --git a/internal/service/print_pretty_json.go b/internal/service/print_pretty_json.go deleted file mode 100644 index 9640a91..0000000 --- a/internal/service/print_pretty_json.go +++ /dev/null @@ -1,15 +0,0 @@ -package service - -import ( - "bytes" - "encoding/json" -) - -// PrettyJson print prettify json response. -func PrettyJson(in []byte) (string, error) { - var prettyJSON bytes.Buffer - if err := json.Indent(&prettyJSON, in, "", " "); err != nil { - return "", err - } - return prettyJSON.String(), nil -} diff --git a/internal/service/print_pretty_json_test.go b/internal/service/print_pretty_json_test.go deleted file mode 100644 index 9744697..0000000 --- a/internal/service/print_pretty_json_test.go +++ /dev/null @@ -1,44 +0,0 @@ -package service - -import ( - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "testing" -) - -func TestPrettyJson(t *testing.T) { - testCases := []struct { - name string - sample []byte - expect string - wantErr bool - }{ - { - name: "Should pass if using valid json string", - sample: []byte(`{"name":"user","admin":"admin"}`), - expect: `{ - "name": "user", - "admin": "admin" -}`, - }, - { - name: "Should error if using invalid json string", - sample: []byte(`{"name":"me`), - wantErr: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - out, err := PrettyJson(tc.sample) - - switch tc.wantErr { - case false: - require.NoError(t, err) - assert.Equal(t, tc.expect, out) - case true: - require.Error(t, err) - } - }) - } -} diff --git a/internal/service/server.go b/internal/service/server.go deleted file mode 100644 index 71a756b..0000000 --- a/internal/service/server.go +++ /dev/null @@ -1,19 +0,0 @@ -package service - -import ( - "fmt" - "net/http" -) - -const ADDRESS = "127.0.0.1:9898" - -// StartHTTPServer start new http server to grab given url when exchanging authorization code. -func StartHTTPServer(ch chan<- string) { - http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - code := r.URL.Query().Get("code") - ch <- code - fmt.Fprint(w, "Now you may close this tab, cron-upload automatically grab the authorization code") - }) - - http.ListenAndServe(ADDRESS, nil) -} diff --git a/internal/worker/v1/v1.go b/internal/worker/v1/v1.go new file mode 100644 index 0000000..6397b89 --- /dev/null +++ b/internal/worker/v1/v1.go @@ -0,0 +1,146 @@ +package worker + +import ( + "path/filepath" + "sync" + "time" + + pv "github.com/mdanialr/cron-upload/internal/provider" + w "github.com/mdanialr/cron-upload/internal/worker" + h "github.com/mdanialr/cron-upload/pkg/helper" + "github.com/mdanialr/cron-upload/pkg/logger" +) + +// NewWorker return new first version worker. +func NewWorker(g *sync.WaitGroup, log logger.Writer, cloud pv.Cloud) w.I { + return &worker{ + g: g, + log: log, + cloud: cloud, + } +} + +// worker the first version of worker implementation, in case there is a new +// way to implement how this app should work in the future. +type worker struct { + g *sync.WaitGroup + log logger.Writer + cloud pv.Cloud +} + +func (w *worker) BuildRoutesProvider() map[string]string { + var result = make(map[string]string) + payloads, err := w.cloud.GetFolders() + if err != nil { + w.log.WriteErr(err) + return result + } + for _, payload := range payloads { + route := pv.LookupRoute(payloads, payload.Name) + result[route] = payload.Id + } + return result +} + +func (w *worker) CheckCreateRoute(parentId, name string) (string, error) { + // check whether the given folder name is already exist in folder id + payloads, err := w.cloud.GetFolders(parentId) + if err != nil { + return "", err + } + // return the id if already exists in the cloud provider + for _, payload := range payloads { + if payload.Name == name { + return payload.Id, nil + } + } + // then create new folder + id, err := w.cloud.CreateFolder(name, parentId) + if err != nil { + return "", err + } + return id, nil +} + +func (w *worker) ListExpiredFiles(channels w.Channels, folderId string, expiry uint) { + defer close(channels.In) + defer close(channels.Out) + + payloads, err := w.cloud.GetFiles(folderId) + if err != nil { + w.log.WriteErr("failed to get files in", folderId, "from cloud provider:", err) + return + } + // just skip if there is no files + if len(payloads) < 1 { + return + } + // check files expiry + for _, payload := range payloads { + fmtTime, _ := time.Parse(time.RFC3339, payload.CreatedAt) + if h.ToWib(time.Now()).After(h.ToWib(fmtTime).Add(time.Minute * time.Duration(expiry))) { + // feed expired files + channels.Out <- payload + continue + } + // feed unexpired files + channels.In <- payload + } +} + +func (w *worker) ListUnmatchedFiles(channels w.Channels, folderId string, localFiles ...string) { + defer close(channels.Out) + defer w.g.Done() + + // wait & grab all files in the cloud provider + var fileInCloud []string + for payload := range channels.In { + fileInCloud = append(fileInCloud, payload.Name) + } + // match the local files against files in the cloud + for _, localFile := range localFiles { + var match bool + for _, cloudFile := range fileInCloud { + // don't upload local files that's already exist in the cloud provider + // based on the file name + if filepath.Base(localFile) == cloudFile { + match = true + } + } + if !match { + // if unmatched then feed it to Out channel after successfully opening the file + pay, err := pv.NewWithFile(localFile, folderId) + if err != nil { + w.log.WriteErr(err) + continue + } + channels.Out <- pay + } + } +} + +func (w *worker) DeleteFile(channels w.Channels) { + defer w.g.Done() + + for payload := range channels.In { + w.log.WriteInf(h.LogStart("DELETE", payload.Name)) + // delete for every incoming file id + if err := w.cloud.Delete(payload.Id); err != nil { + w.log.WriteErr(err, "with filename:", payload.Name) + } + w.log.WriteInf(h.LogDone("DELETE", payload.Name)) + } +} + +func (w *worker) UploadFile(channels w.Channels) { + defer w.g.Done() + + for payload := range channels.In { + w.log.WriteInf(h.LogStart("UPLOAD", payload.Name)) + // upload for every incoming file reader + if _, err := w.cloud.UploadFile(payload); err != nil { + w.log.WriteErr(err) + } + w.log.WriteInf(h.LogDone("UPLOAD", payload.Name)) + } +} diff --git a/internal/worker/worker.go b/internal/worker/worker.go new file mode 100644 index 0000000..63c793e --- /dev/null +++ b/internal/worker/worker.go @@ -0,0 +1,39 @@ +package worker + +import "github.com/mdanialr/cron-upload/internal/provider" + +// I every worker implementation should follow the provided guideline/function +// signature here. +type I interface { + // BuildRoutesProvider build routes from cloud provider's folder structure + // along with the target folder id for each route. + BuildRoutesProvider() map[string]string + // CheckCreateRoute check whether the given folder name is already exist + // inside the parent id otherwise create new one in the cloud provider + // and use the given parent id as the parent and name as the folder name. + // return the newly created folder id. + CheckCreateRoute(parentId, name string) (string, error) + // ListExpiredFiles list all files that's inside the given folder id in the + // cloud provider then return unexpired payload to In channel and expired + // payload within the given expiry duration (in minutes) to the Out + // channel. Make sure to close both In & Out channels thereafter. + ListExpiredFiles(channels Channels, folderId string, expiry uint) + // ListUnmatchedFiles match the incoming payload from In chanel with the + // given local file names, then feed the unmatched payload to Out chanel. + // May use In chanel from ListExpiredFiles as feeder of In chanel. Make + // sure to close Out chanel thereafter. + ListUnmatchedFiles(channels Channels, folderId string, localFiles ...string) + // DeleteFile delete file id that's fed from the in channel. May use Out + // chanel from ListExpiredFiles as feeder of In chanel. + DeleteFile(channels Channels) + // UploadFile upload payload that's fed from the given channel. May use Out + // chanel from ListUnmatchedFiles as feeder of In chanel. + UploadFile(channels Channels) +} + +// Channels contain the necessary channels that's passed between signature. +// Can be used by ListExpiredFiles, ListUnmatchedFiles, DeleteFile & +// UploadFile. +type Channels struct { + In, Out chan *provider.Payload +} diff --git a/main.go b/main.go index 31704f9..ff56400 100644 --- a/main.go +++ b/main.go @@ -1,84 +1,218 @@ package main import ( - "bytes" "flag" + "fmt" + "io" "log" - "net/http" "os" + "strings" + "sync" "time" - "github.com/mdanialr/cron-upload/internal/config" - "github.com/mdanialr/cron-upload/internal/logger" + "github.com/mdanialr/cron-upload/internal/provider" "github.com/mdanialr/cron-upload/internal/provider/gdrive" + w "github.com/mdanialr/cron-upload/internal/worker" + "github.com/mdanialr/cron-upload/internal/worker/v1" + "github.com/mdanialr/cron-upload/pkg/config" + h "github.com/mdanialr/cron-upload/pkg/helper" + "github.com/mdanialr/cron-upload/pkg/logger" + "github.com/mdanialr/cron-upload/pkg/scan" + "github.com/spf13/viper" ) var ( - isDrive bool // whether to use Google Drive as provider or not - isInit bool // initialize token in conjunction with Google Drive provider - isRefresh bool // exchange authorization code for new refresh token - conf *config.Model // global variable that would be used in this main pkg + configPath, logType string + isTest bool ) -func setupFlags() { - flag.BoolVar(&isInit, "init", false, "retrieve token.json by using auth.json for Google Drive provider") - flag.BoolVar(&isDrive, "drive", false, "use Google Drive as provider to upload files") - flag.BoolVar(&isRefresh, "refresh", false, "exchange authorization code for new refresh token") +func init() { + flag.BoolVar(&isTest, "test", false, "test whether there is any error in the config file") + flag.StringVar(&configPath, "path", ".", "locate the app config file. Default is set to current directory") + flag.StringVar(&logType, "log", "stdout", "use '-log file' to write the logs to a file. Default is set to stdout") flag.Parse() } func main() { timer := time.Now() - setupFlags() - f, err := os.ReadFile("app-config.yml") + // init app config + v, err := config.Init(configPath) if err != nil { - log.Fatalln("failed to read config file:", err) + log.Fatalln("failed to init config:", err) } - - conf, err = config.NewConfig(bytes.NewReader(f)) - if err != nil { - log.Fatalln("failed to create new config instance:", err) + // do some validation first to make sure all required fields are filled + if err = config.Validate(v); err != nil { + log.Fatalln("config file validation is failed:", err) } - if err := conf.Sanitization(); err != nil { - log.Fatalln("failed to sanitize config file, please make sure config file has valid values:", err) + // sanitize config and setup necessary default value + config.Sanitize(v) + // make sure the provided provider name is currently supported + if err = provider.ValidateSupportedClouds(v.GetString("provider.name")); err != nil { + log.Fatalln(err) } - if err := conf.Upload.Sanitization(); err != nil { - log.Fatalln("failed to sanitize upload in config file, please make sure upload section has valid values:", err) + // init logger and choose the log target output + var lo logger.Writer + switch logType { + case "file": + appLogPath := strings.TrimSuffix(v.GetString("log"), "/") + appLogPath = fmt.Sprintf("%s/%s", appLogPath, "app") + appLog, err := os.OpenFile(appLogPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0770) + if err != nil { + log.Fatalln("failed to init logger file for the app:", err) + } + defer appLog.Close() + lo = logger.NewFile(appLog) + default: + lo = logger.NewFile(os.Stdout) } - - // init internal logging - if err := logger.InitLogger(conf); err != nil { - log.Fatalln("failed to initialize internal logging:", err) + // init the log to do some prerequisite preparation + lo.Init() + // run only if not a test + if !isTest { + lo.WriteInf("START job") + lo.WriteInf("") } - - // if running using drive as provider - if isDrive && isInit { - logger.InfL.Println("START initialize token job") - // if init params also included then init token first, before running the Google Drive's Job - cl := &http.Client{} - - if err := gdrive.InitToken(conf, cl); err != nil { - log.Fatalln("failed to initialize token for Google Drive:", err) + // init wait group & cloud provider + var wg sync.WaitGroup + var chosenCloudProvider provider.Cloud + // chose cloud provider based on value in the config file + switch v.GetString("provider.name") { + case string(provider.GoogleDrive): + // init Google Drive cloud provider + gDriveSvc, err := gdrive.Init(v.GetString("provider.cred")) + if err != nil { + lo.WriteErr("failed to init Google Drive:", err) + return } + chosenCloudProvider = gdrive.NewGoogleDriveProvider(gDriveSvc) + default: + // because cloud provider is mandatory, throw error if there is no one provided + lo.WriteErr("no cloud provider is provided") + return } - - // if refresh params also included then exchange authorization code for refresh token - if isDrive && isRefresh { - logger.InfL.Println("START renew refresh token job") - if err := gdrive.Refresh(conf); err != nil { - log.Fatalln("failed to exchange authorization code for refresh token:", err) + // run test if needed + if isTest { + testConfUploads := config.GetUploads(v) + for _, testUpload := range testConfUploads { + lo.WriteInf(h.LogStart("TEST Scan Local Dir", testUpload.Path)) + if _, err = scan.FilesAsc(testUpload.Path); err != nil { + lo.WriteErr("failed to scan path:", err) + return + } + lo.WriteInf(h.LogDone("TEST Scan Local Dir", testUpload.Path)) + } + // test to create folder + lo.WriteInf(h.LogStart("TEST Cloud Provider", "")) + createdFolder, err := chosenCloudProvider.CreateFolder("test-cron-upload") + if err != nil { + lo.WriteErr("Failed to create a folder in the cloud provider:", err) + return + } + if createdFolder == "" { + lo.WriteErr("Failed to create a folder in the cloud provider:", err) + return + } + lo.WriteInf(h.LogDone("TEST Cloud Provider", "")) + // upload a dummy file then delete them + lo.WriteInf(h.LogStart("TEST Upload", "")) + dummyFile := io.NopCloser(strings.NewReader("hello world")) + testPayload := provider.Payload{ + Name: "test-cron-upload.txt", + File: dummyFile, + Parent: []string{createdFolder}, + } + createdTestPayload, err := chosenCloudProvider.UploadFile(&testPayload) + if err != nil { + lo.WriteErr("Failed to upload a test file:", err) + return + } + lo.WriteInf(h.LogDone("TEST Upload", "")) + // then delete them + lo.WriteInf(h.LogStart("TEST Delete", "")) + if err = chosenCloudProvider.Delete(createdTestPayload.Id); err != nil { + lo.WriteErr("Failed to delete a test file:", err) } + lo.WriteInf(h.LogDone("TEST Delete", "")) + return } - - // if running using drive as provider - if isDrive && !isInit && !isRefresh { - logger.InfL.Println("START job") - // if init not included then run the job - if err := gdrive.GoogleDrive(conf); err != nil { - logger.ErrL.Println(err) + // init worker with the chosen cloud provider as dependency + newWorker := worker.NewWorker(&wg, lo, chosenCloudProvider) + // listing all available folder routes in the cloud provider + cloudRoutes := newWorker.BuildRoutesProvider() + // listing all routes from config file, then matched it with the routes from cloud provider + for _, upload := range config.GetUploads(v) { + trimmedRoute := strings.Trim(upload.Name, "/\\") // remove any the slice and back-slice + route := fmt.Sprintf("%s/%s", v.GetString("root"), trimmedRoute) + routeId := cloudRoutes[route] + // create new routes in the cloud provider + if routeId == "" { + // use the root as starting point for the parent id + routeId = cloudRoutes[v.GetString("root")] + uploadRoutes := strings.Split(trimmedRoute, "/") + // keep checking and creating the routes until reaching the last part of the route + for _, ro := range uploadRoutes { + currentParentId, err := newWorker.CheckCreateRoute(routeId, ro) + if err != nil { + routeId = "" // mark as empty so it does not trigger doTheJob + lo.WriteErr("failed to check and create route", ro, "from", upload.Name, ":", err) + break + } + routeId = currentParentId + cloudRoutes[route] = currentParentId + } + } + // do the job only if the route id is already known + if routeId != "" { + doTheJob(v, lo, &wg, newWorker, upload, routeId) } } + wg.Wait() // block till all jobs are done + + lo.WriteInf("") + lo.WriteInf("END job in:", time.Since(timer)) +} - logger.InfL.Println("END job in:", time.Since(timer)) +func doTheJob( + v *viper.Viper, + log logger.Writer, + wg *sync.WaitGroup, + worker w.I, + upload config.UploadModel, + routeId string, +) { + // read local files from config + files, err := scan.FilesAsc(upload.Path) + if err != nil { + log.WriteErr("failed to scan path:", err) + return + } + // count retain from config but use retain from root as the default value + retainMin := config.GetRetainExpiry(v, upload.Retain) + // init necessary worker channels for this route + listExpired := w.Channels{ + In: make(chan *provider.Payload), + Out: make(chan *provider.Payload), + } + listUnmatched := w.Channels{ + In: listExpired.In, + Out: make(chan *provider.Payload), + } + deleteFile := w.Channels{ + In: listExpired.Out, + } + uploadFile := w.Channels{ + In: listUnmatched.Out, + } + // spawn worker for delete & upload file since they can run independently + for i := uint(1); i < v.GetUint("worker")+1; i++ { + wg.Add(2) + go worker.DeleteFile(deleteFile) + go worker.UploadFile(uploadFile) + } + // one worker for each route is sufficient + go worker.ListExpiredFiles(listExpired, routeId, retainMin) + // because listening to In channel therefor we need to add wait group + wg.Add(1) + go worker.ListUnmatchedFiles(listUnmatched, routeId, files...) } diff --git a/pcloud.go b/pcloud.go deleted file mode 100644 index 72bb060..0000000 --- a/pcloud.go +++ /dev/null @@ -1,273 +0,0 @@ -package main - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "log" - "mime/multipart" - "net/http" - "os" - "path/filepath" - "strings" - - "github.com/mdanialr/cron-upload/internal/provider/pcloud" - "github.com/mdanialr/cron-upload/internal/service" -) - -func TryGetToken(cl *http.Client) { - // GET DIGEST - res, err := cl.Get(pcloud.GetDigestUrl()) - if err != nil { - log.Fatalln("failed to when sending get digest request to pCloud API:", err) - } - defer res.Body.Close() - - b, err := io.ReadAll(res.Body) - if err != nil { - log.Fatalln("failed reading response body after sending request to get digest:", err) - } - - var jsonDigestResponse pcloud.StdResponse - if err = json.Unmarshal(b, &jsonDigestResponse); err != nil { - log.Fatalln("failed unmarshalling response body to json DigestResponse model:", err) - } - - // GENERATE TOKEN via DIGEST AUTH - if jsonDigestResponse.Result != 0 { - js, _ := service.PrettyJson(b) - fmt.Println(js) - log.Fatalln("response from DigestResponse return non-0 value.") - } - us := pcloud.User{Username: "", Password: ""} - res, err = cl.Get(us.GenerateTokenUrl(jsonDigestResponse.Digest)) - if err != nil { - log.Fatalln("failed when sending request to generate token from pCloud API:", err) - } - defer res.Body.Close() - - b, err = io.ReadAll(res.Body) - if err != nil { - log.Fatalln("failed reading response body after sending request to generate token:", err) - } - - var jsonTokenResponse pcloud.StdResponse - if err = json.Unmarshal(b, &jsonTokenResponse); err != nil { - log.Fatalln("failed unmarshalling response body to json TokenResponse model:", err) - } - - if jsonTokenResponse.Result != 0 { - js, _ := service.PrettyJson(b) - fmt.Println(js) - log.Fatalln("response from TokenResponse return non-0 value.") - } - fmt.Println("TOKEN:", jsonTokenResponse.Auth) -} - -func TryLogout(cl *http.Client) { - // LOGOUT / REMOVE / INVALIDATE TOKEN - //res, err := cl.Get(pcloud.GetLogoutUrl("")) - //if err != nil { - // log.Fatalln("failed to when sending logout request to pCloud API:", err) - //} - //defer res.Body.Close() - // - //b, err := io.ReadAll(res.Body) - //if err != nil { - // log.Fatalln("failed reading response body after sending request to logout:", err) - //} - // - //var jsonLogoutResponse pcloud.StdResponse - //if err = json.Unmarshal(b, &jsonLogoutResponse); err != nil { - // log.Fatalln("failed unmarshalling response body to json LogoutResponse model:", err) - //} - // - //if jsonLogoutResponse.Result != 0 { - // js, _ := service.PrettyJson(b) - // fmt.Println(js) - // log.Fatalln("response from LogoutResponse return non-0 value.") - //} - //fmt.Println("Deleted successfully:", jsonLogoutResponse.IsDeleted) -} - -func TryPrintQuota(cl *http.Client, token string) { - // PRINT STORAGE QUOTA - res, err := cl.Get(pcloud.GetQuotaUrl(token)) - if err != nil { - log.Fatalln("failed to when sending userinfo (quota) request to pCloud API:", err) - } - defer res.Body.Close() - - b, err := io.ReadAll(res.Body) - if err != nil { - log.Fatalln("failed reading response body after sending request to userinfo (quota):", err) - } - - var jsonQuotaResponse pcloud.QuotaResponse - if err = json.Unmarshal(b, &jsonQuotaResponse); err != nil { - log.Fatalln("failed unmarshalling response body to json QuotaResponse model:", err) - } - - if jsonQuotaResponse.Result != 0 { - js, _ := service.PrettyJson(b) - fmt.Println(js) - log.Fatalln("response from QuotaResponse return non-0 value.") - } - - aQuota, err := service.BytesToAnyBit(jsonQuotaResponse.Quota, "Gb") - if err != nil { - log.Fatalln("failed to convert bytes to Gibibyte for available quota:", err) - } - uQuota, err := service.BytesToAnyBit(jsonQuotaResponse.UsedQuota, "Mb") - if err != nil { - log.Fatalln("failed to convert bytes to Gibibyte for used quota:", err) - } - fmt.Println("Available:", aQuota) - fmt.Println("Used:", uQuota) -} - -func TryCreateFolder(cl *http.Client, token string) { - const SAMPLE = "vps/backup/db" - // NOTES: should be created one by one. COULD NOT create folders recursively. - // 1# /vps - // 2# /vps/backup - // 3# /vps/backup/db - - folders := strings.Split(SAMPLE, "/") - var tmpFolders string - for _, folder := range folders { - tmpFolders += "/" + folder - fmt.Println("Working on:", tmpFolders) - // CREATE FOLDER one by one - res, err := cl.Get(pcloud.GetCreateFolderUrl(token, tmpFolders)) - if err != nil { - log.Fatalln("failed to when sending create folder request to pCloud API:", err) - } - defer res.Body.Close() - - b, err := io.ReadAll(res.Body) - if err != nil { - log.Fatalln("failed reading response body after sending request to create folder:", err) - } - - var jsonCreateFolderResponse pcloud.CreateFolderResponse - if err = json.Unmarshal(b, &jsonCreateFolderResponse); err != nil { - log.Fatalln("failed unmarshalling response body to json CreateFolderResponse model:", err) - } - - if jsonCreateFolderResponse.Result != 0 { - js, _ := service.PrettyJson(b) - fmt.Println(js) - log.Fatalln("response from CreateFolderResponse return non-0 value.") - } - fmt.Println("Created:", jsonCreateFolderResponse.IsCreated) - fmt.Println("Path:", jsonCreateFolderResponse.Metadata.Path) - fmt.Println("Name:", jsonCreateFolderResponse.Metadata.Name) - fmt.Println("Id:", jsonCreateFolderResponse.Metadata.Id) - } -} - -func TryClearTrash(cl *http.Client, token string) { - // CLEAR TRASH - res, err := cl.Get(pcloud.GetClearTrashUrl(token)) - if err != nil { - log.Fatalln("failed to when sending clear trash request to pCloud API:", err) - } - defer res.Body.Close() - - b, err := io.ReadAll(res.Body) - if err != nil { - log.Fatalln("failed reading response body after sending request to clear trash:", err) - } - - var jsonStdResponse pcloud.StdResponse - if err = json.Unmarshal(b, &jsonStdResponse); err != nil { - log.Fatalln("failed unmarshalling response body to json StdResponse model:", err) - } - - if jsonStdResponse.Result != 0 { - js, _ := service.PrettyJson(b) - fmt.Println(js) - log.Fatalln("response from StdResponse return non-0 value.") - } - fmt.Println("Successfully cleared") -} - -func TryDeleteFile(cl *http.Client, token string) { - // DELETE FILE from the given File id and one by one - fileId := []string{"11649279231", "11649279554", "11649279603"} - for _, id := range fileId { - fmt.Println("Working on File id:", id) - - res, err := cl.Get(pcloud.GetDeleteFileUrl(token, id)) - if err != nil { - log.Fatalln("failed to when sending delete file request to pCloud API:", err) - } - defer res.Body.Close() - - b, err := io.ReadAll(res.Body) - if err != nil { - log.Fatalln("failed reading response body after sending request to delete file:", err) - } - - var jsonDeleteFileResponse pcloud.DeleteFileResponse - if err = json.Unmarshal(b, &jsonDeleteFileResponse); err != nil { - log.Fatalln("failed unmarshalling response body to json DeleteFileResponse model:", err) - } - - if jsonDeleteFileResponse.Result != 0 { - js, _ := service.PrettyJson(b) - fmt.Println(js) - log.Fatalln("response from DeleteFileResponse return non-0 value for file id:", id) - } - fmt.Println("Is Deleted:", jsonDeleteFileResponse.Meta.IsDeleted) - fmt.Println("Filename:", jsonDeleteFileResponse.Meta.Name) - } -} - -func TryUploadFile(cl *http.Client, token string, fPath string) { - const folderId = "2586338097" - // PREPARE THE FILE FIRST - fl, err := os.Open(fPath) - if err != nil { - log.Fatalln("failed to open file from filepath:", err) - } - defer fl.Close() - - // PREPARE MULTIPART FORM-DATA - var buf = &bytes.Buffer{} - wr := multipart.NewWriter(buf) - part, err := wr.CreateFormFile("file", filepath.Base(fl.Name())) - if err != nil { - log.Fatalln("failed to create multi part form data from the given file:", err) - } - io.Copy(part, fl) - wr.Close() - - // SEND POST REQUEST TO pCloud API - req, _ := http.NewRequest(http.MethodPost, pcloud.GetUploadFileUrl(token, folderId), buf) - req.Header.Add("content-type", wr.FormDataContentType()) - res, err := cl.Do(req) - if err != nil { - log.Fatalln("failed to when sending userinfo (quota) request to pCloud API:", err) - } - defer res.Body.Close() - - b, err := io.ReadAll(res.Body) - if err != nil { - log.Fatalln("failed reading response body after sending request to userinfo (quota):", err) - } - - var jsonUploadFileResponse pcloud.StdResponse - if err = json.Unmarshal(b, &jsonUploadFileResponse); err != nil { - log.Fatalln("failed unmarshalling response body to json UploadFileResponse model:", err) - } - - if jsonUploadFileResponse.Result != 0 { - js, _ := service.PrettyJson(b) - fmt.Println(js) - log.Fatalln("response from UploadFileResponse return non-0 value.") - } - fmt.Printf("File %s successfully uploaded", fl.Name()) -} diff --git a/pkg/config/config.go b/pkg/config/config.go new file mode 100644 index 0000000..5303515 --- /dev/null +++ b/pkg/config/config.go @@ -0,0 +1,74 @@ +package config + +import ( + "fmt" + + "github.com/spf13/viper" +) + +// UploadModel data structure of the upload data in config file. +type UploadModel struct { + Path string `mapstructure:"path"` + Name string `mapstructure:"name"` + Retain uint `mapstructure:"retain"` +} + +// Init return new viper instance with the given filepath as +// the directory where the config file is. +func Init(filepath string) (*viper.Viper, error) { + v := viper.New() + v.AddConfigPath(filepath) + v.SetConfigName("app") + v.SetConfigType("yaml") + if err := v.ReadInConfig(); err != nil { + return nil, err + } + return v, nil +} + +// Sanitize set default value for necessary fields. +func Sanitize(v *viper.Viper) { + v.SetDefault("log", "/tmp") + v.SetDefault("retain", 8640) + v.SetDefault("worker", 2) +} + +// Validate do validate for all required fields. +func Validate(v *viper.Viper) error { + // make sure the root field is not empty + if v.GetString("root") == "" { + return fmt.Errorf("`root` field in config file is required") + } + // make sure the worker has minimum value of 1 + if v.GetInt("worker") < 1 { + return fmt.Errorf("`worker` field should has minimum value of 1. if there is no worker then who on earth that will do the job") + } + // make sure provider name & auth is not empty + if v.GetString("provider.name") == "" { + return fmt.Errorf("`provider.name` field in config file is required") + } + if v.GetString("provider.cred") == "" { + return fmt.Errorf("`provider.cred` field in config file is required") + } + // make sure the chunk size is positive number if provided + if v.GetInt("chunk") < 0 { + return fmt.Errorf("please provide positive number for the upload chunk size in field `chunk`") + } + return nil +} + +// GetUploads return all data that will be uploaded to provider. +func GetUploads(v *viper.Viper) []UploadModel { + var res []UploadModel + v.UnmarshalKey("upload", &res) + return res +} + +// GetRetainExpiry get default retain expiry from config if the given retain +// value is zero. +func GetRetainExpiry(v *viper.Viper, retain uint) uint { + if retain == 0 { + return v.GetUint("retain") + } + return retain +} diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go new file mode 100644 index 0000000..63069d5 --- /dev/null +++ b/pkg/config/config_test.go @@ -0,0 +1,235 @@ +package config_test + +import ( + "testing" + + "github.com/mdanialr/cron-upload/pkg/config" + "github.com/spf13/viper" + "github.com/stretchr/testify/assert" +) + +func TestSanitize(t *testing.T) { + // 1st case for `log` + testCasesLog := []struct { + name string + setup func() *viper.Viper + expect string + }{ + { + name: "Given log is empty should has /tmp as the default value", + setup: func() *viper.Viper { + return viper.New() + }, + expect: "/tmp", + }, + { + name: "Given log is /my/log/path should has /my/log/path as the value", + setup: func() *viper.Viper { + v := viper.New() + v.Set("log", "/my/log/path") + return v + }, + expect: "/my/log/path", + }, + } + for _, tc := range testCasesLog { + t.Run(tc.name, func(t *testing.T) { + conf := tc.setup() + config.Sanitize(conf) + assert.Equal(t, tc.expect, conf.GetString("log")) + }) + } + + // 2nd case for `retain` + testCasesRetain := []struct { + name string + setup func() *viper.Viper + expect uint + }{ + { + name: "Given retain is empty should has 8640 minutes as the default value", + setup: func() *viper.Viper { + return viper.New() + }, + expect: 8640, + }, + { + name: "Given retain is 60 minutes should has 60 minutes as the value", + setup: func() *viper.Viper { + v := viper.New() + v.Set("retain", 60) + return v + }, + expect: 60, + }, + } + for _, tc := range testCasesRetain { + t.Run(tc.name, func(t *testing.T) { + conf := tc.setup() + config.Sanitize(conf) + assert.Equal(t, tc.expect, conf.GetUint("retain")) + }) + } + + // 3rd case for `worker` + testCasesWorker := []struct { + name string + setup func() *viper.Viper + expect uint + }{ + { + name: "Given worker is empty should has 2 workers as the default value", + setup: func() *viper.Viper { + return viper.New() + }, + expect: 2, + }, + { + name: "Given worker is 8 workers should has 8 workers as the value", + setup: func() *viper.Viper { + v := viper.New() + v.Set("worker", 8) + return v + }, + expect: 8, + }, + } + for _, tc := range testCasesWorker { + t.Run(tc.name, func(t *testing.T) { + conf := tc.setup() + config.Sanitize(conf) + assert.Equal(t, tc.expect, conf.GetUint("worker")) + }) + } +} + +func TestValidate(t *testing.T) { + testCases := []struct { + name string + setup func() *viper.Viper + wantErr bool + containErrMsg string + }{ + { + name: "Given root is empty should throw error and contain message `is required`", + setup: func() *viper.Viper { + return viper.New() + }, + wantErr: true, + containErrMsg: "is required", + }, + { + name: "Given root is 'Backup' and worker is '0' should throw error and contain message" + + " `has minimum value of 1`", + setup: func() *viper.Viper { + v := viper.New() + v.Set("root", "Backup") + return v + }, + wantErr: true, + containErrMsg: "has minimum value of 1", + }, + { + name: "Given root is 'Backup', worker is '5' and provider.name is empty should throw error and" + + " contain message `is required`", + setup: func() *viper.Viper { + v := viper.New() + v.Set("root", "Backup") + v.Set("worker", 5) + return v + }, + wantErr: true, + containErrMsg: "is required", + }, + { + name: "Given root is 'Backup', worker is '5', provider.name is 'hi' and provider.cred is empty" + + " should throw error and contain message `is required`", + setup: func() *viper.Viper { + v := viper.New() + v.Set("root", "Backup") + v.Set("worker", 5) + v.Set("provider.name", "hi") + return v + }, + wantErr: true, + containErrMsg: "is required", + }, + { + name: "Given root is 'Backup', worker is '5', provider.name is 'hi', provider.cred is /path/to/cred.json" + + " and chunk is '-1' should throw error and contain message `provide positive number`", + setup: func() *viper.Viper { + v := viper.New() + v.Set("root", "Backup") + v.Set("worker", 5) + v.Set("provider.name", "hi") + v.Set("provider.cred", "/path/to/cred.json") + v.Set("chunk", -1) + return v + }, + wantErr: true, + containErrMsg: "provide positive number", + }, + { + name: "Given root is 'Backup', worker is '5', provider.name is 'hi', provider.cred is /path/to/cred.json" + + " and chunk is empty should has no error", + setup: func() *viper.Viper { + v := viper.New() + v.Set("root", "Backup") + v.Set("worker", 5) + v.Set("provider.name", "hi") + v.Set("provider.cred", "/path/to/cred.json") + return v + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + conf := tc.setup() + err := config.Validate(conf) + if tc.wantErr { + assert.Error(t, err) + assert.ErrorContains(t, err, tc.containErrMsg) + return + } + assert.NoError(t, err) + }) + } +} + +func TestGetRetainExpiry(t *testing.T) { + testCases := []struct { + name string + setup func() *viper.Viper + sample uint + expect uint + }{ + { + name: "Given retain is 0 and viper with retain value of 5 should return 5", + setup: func() *viper.Viper { + v := viper.New() + v.Set("retain", 5) + return v + }, + sample: 0, + expect: 5, + }, + { + name: "Given retain is 4 and viper with retain value of 5 should return 4", + setup: func() *viper.Viper { + v := viper.New() + v.Set("retain", 5) + return v + }, + sample: 4, + expect: 4, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + res := config.GetRetainExpiry(tc.setup(), tc.sample) + assert.Equal(t, tc.expect, res) + }) + } +} diff --git a/pkg/helper/common.go b/pkg/helper/common.go new file mode 100644 index 0000000..e21309d --- /dev/null +++ b/pkg/helper/common.go @@ -0,0 +1,25 @@ +package helper + +import ( + "fmt" + "time" +) + +// LogStart append [BEGN] and the prefix to the given message and return it. +// Also append newline at the end. +func LogStart(prefix, message string) string { + return fmt.Sprintf("[BEGN] | %s | %s", prefix, message) +} + +// LogDone append [DONE] and the prefix to the given message and return it. +// Also append newline at the end. +func LogDone(prefix, message string) string { + return fmt.Sprintf("[DONE] | %s | %s", prefix, message) +} + +// ToWib convert the given time to Waktu Indonesia Barat which is time zone +// UTC+7. +func ToWib(t time.Time) time.Time { + wib, _ := time.LoadLocation("Asia/Jakarta") + return t.In(wib) +} diff --git a/pkg/helper/common_test.go b/pkg/helper/common_test.go new file mode 100644 index 0000000..10a266c --- /dev/null +++ b/pkg/helper/common_test.go @@ -0,0 +1,48 @@ +package helper_test + +import ( + "testing" + "time" + + h "github.com/mdanialr/cron-upload/pkg/helper" + "github.com/stretchr/testify/assert" +) + +func TestToWib(t *testing.T) { + testCases := []struct { + name string + setup func() time.Time + expect string + }{ + { + name: "Given date 2023-01-15 08:16:25 UTC should return 2023-01-15 15:16:25 WIB", + setup: func() time.Time { + return time.Date(2023, 01, 15, 8, 16, 25, 0, time.UTC) + }, + expect: "2023-01-15 15:16:25 WIB", + }, + { + name: "Given date 2023-01-15 08:16:25 UTC+9 should return 2023-01-15 06:16:25 WIB", + setup: func() time.Time { + jpn, _ := time.LoadLocation("Asia/Tokyo") + return time.Date(2023, 01, 15, 8, 16, 25, 0, jpn) + }, + expect: "2023-01-15 06:16:25 WIB", + }, + { + name: "Given date 2023-01-15 00:16:25 UTC+8 should return 2023-01-14 23:16:25 WIB", + setup: func() time.Time { + sgp, _ := time.LoadLocation("Asia/Singapore") + return time.Date(2023, 01, 15, 0, 16, 25, 0, sgp) + }, + expect: "2023-01-14 23:16:25 WIB", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + formatTime := "2006-01-02 15:04:05 MST" + assert.Equal(t, tc.expect, h.ToWib(tc.setup()).Format(formatTime)) + }) + } +} diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go new file mode 100644 index 0000000..90cc9ee --- /dev/null +++ b/pkg/logger/logger.go @@ -0,0 +1,11 @@ +package logger + +// Writer interface to write log message. +type Writer interface { + // Init do some necessary setup before calling WriteInf & WriteErr + Init() + // WriteInf write message for info log + WriteInf(...any) + // WriteErr write message for error log + WriteErr(...any) +} diff --git a/pkg/logger/writer.go b/pkg/logger/writer.go new file mode 100644 index 0000000..a3ae9cb --- /dev/null +++ b/pkg/logger/writer.go @@ -0,0 +1,32 @@ +package logger + +import ( + "io" + "log" +) + +// NewFile return logger that write the message to a file. +func NewFile(file io.Writer) Writer { + return &LogFile{ + file: file, + } +} + +// LogFile log writer that write to a file. +type LogFile struct { + file io.Writer + infLog, errLog *log.Logger +} + +func (l *LogFile) Init() { + l.infLog = log.New(l.file, "[INF] ", log.Ldate|log.Ltime) + l.errLog = log.New(l.file, "[ERR] ", log.Ldate|log.Ltime|log.Lshortfile) +} + +func (l *LogFile) WriteInf(msg ...any) { + l.infLog.Println(msg...) +} + +func (l *LogFile) WriteErr(msg ...any) { + l.errLog.Println(msg...) +} diff --git a/internal/scan/file.go b/pkg/scan/file.go similarity index 61% rename from internal/scan/file.go rename to pkg/scan/file.go index 024cbb4..cf99149 100644 --- a/internal/scan/file.go +++ b/pkg/scan/file.go @@ -2,7 +2,7 @@ package scan import ( "fmt" - "io/ioutil" + "os" "sort" "time" ) @@ -12,20 +12,20 @@ type byDateAsc []sortedFile func (a byDateAsc) Len() int { return len(a) } func (a byDateAsc) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a byDateAsc) Less(i, j int) bool { return a[i].date.Before(a[j].date) } +func (a byDateAsc) Less(i, j int) bool { return a[i].Date.Before(a[j].Date) } // sortedFile custom struct for sorting purpose only. type sortedFile struct { - name string // the name of the file. - date time.Time // the latest modified time of the file. + Name string // Name the name of the file. + Date time.Time // the latest modified time of the file. } -// Files scan the given directory and return the list of the filename that already +// FilesAsc scan the given directory and return the list of the filename that already // sorted by date in ascending direction. -func Files(dir string) (result []string, err error) { +func FilesAsc(dir string) (result []string, err error) { var sorted []sortedFile - files, err := ioutil.ReadDir(dir) + files, err := os.ReadDir(dir) if err != nil { return []string{}, fmt.Errorf("failed to read the given dir: %s", err) } @@ -33,16 +33,17 @@ func Files(dir string) (result []string, err error) { for _, fl := range files { // append only if the data is NOT a directory if !fl.IsDir() { + info, _ := fl.Info() sorted = append(sorted, sortedFile{ - fmt.Sprintf("%s/%s", dir, fl.Name()), - fl.ModTime(), + Name: fmt.Sprintf("%s/%s", dir, fl.Name()), + Date: info.ModTime(), }) } } sort.Sort(byDateAsc(sorted)) for _, st := range sorted { - result = append(result, st.name) + result = append(result, st.Name) } return result, nil diff --git a/internal/scan/file_test.go b/pkg/scan/file_test.go similarity index 83% rename from internal/scan/file_test.go rename to pkg/scan/file_test.go index 14d3b35..350d24c 100644 --- a/internal/scan/file_test.go +++ b/pkg/scan/file_test.go @@ -1,8 +1,9 @@ -package scan +package scan_test import ( "testing" + "github.com/mdanialr/cron-upload/pkg/scan" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -26,7 +27,7 @@ func TestFiles(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { - _, err := Files(tc.sampleDir) + _, err := scan.FilesAsc(tc.sampleDir) switch tc.wantErr { case true: @@ -38,13 +39,13 @@ func TestFiles(t *testing.T) { } t.Run("Scanning testdata directory should has exactly three files", func(t *testing.T) { - out, err := Files("testdata") + out, err := scan.FilesAsc("testdata") require.NoError(t, err) assert.Equal(t, 3, len(out)) }) t.Run("Scanning testdata directory should ignore directory and indexing only the files", func(t *testing.T) { - out, err := Files("testdata") + out, err := scan.FilesAsc("testdata") require.NoError(t, err) assert.Equal(t, 3, len(out)) }) diff --git a/internal/scan/testdata/directory/dummyfile.txt b/pkg/scan/testdata/directory/dummyfile.txt similarity index 100% rename from internal/scan/testdata/directory/dummyfile.txt rename to pkg/scan/testdata/directory/dummyfile.txt diff --git a/internal/scan/testdata/dummydoc.docx b/pkg/scan/testdata/dummydoc.docx similarity index 100% rename from internal/scan/testdata/dummydoc.docx rename to pkg/scan/testdata/dummydoc.docx diff --git a/internal/scan/testdata/dummypdf.pdf b/pkg/scan/testdata/dummypdf.pdf similarity index 100% rename from internal/scan/testdata/dummypdf.pdf rename to pkg/scan/testdata/dummypdf.pdf diff --git a/internal/scan/testdata/dummytext.txt b/pkg/scan/testdata/dummytext.txt similarity index 100% rename from internal/scan/testdata/dummytext.txt rename to pkg/scan/testdata/dummytext.txt