From 8413dad30a26f32915fdf4932e6d6a4f0c49e113 Mon Sep 17 00:00:00 2001 From: tarepan Date: Sat, 23 Mar 2024 07:12:16 +0000 Subject: [PATCH 01/10] =?UTF-8?q?refactor:=20step=E3=81=AE=E5=91=BD?= =?UTF-8?q?=E5=90=8D=E8=A6=8F=E5=89=87=E3=82=92=E7=B5=B1=E4=B8=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-docker.yml | 17 ++- .github/workflows/build.yml | 129 +++++++++++----------- .github/workflows/release-test-docker.yml | 24 ++-- .github/workflows/release-test.yml | 16 +-- .github/workflows/test.yml | 16 +-- .github/workflows/upload-gh-pages.yml | 11 +- 6 files changed, 109 insertions(+), 104 deletions(-) diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index 424b1646f..032a1511c 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -81,12 +81,13 @@ jobs: platforms: linux/amd64 steps: - - uses: actions/checkout@v4 + - name: Set up the repository + uses: actions/checkout@v4 - - name: Setup QEMU + - name: Set up QEMU uses: docker/setup-qemu-action@v2 - - name: Setup Docker Buildx + - name: Set up Docker Buildx id: buildx uses: docker/setup-buildx-action@v2 @@ -96,15 +97,14 @@ jobs: username: ${{ vars.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - # Download VOICEVOX RESOURCE - - name: Prepare VOICEVOX RESOURCE cache + - name: Set up VOICEVOX RESOURCE cache uses: actions/cache@v3 id: voicevox-resource-cache with: key: voicevox-resource-${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - - name: Checkout VOICEVOX RESOURCE + - name: Set up VOICEVOX RESOURCE repository if: steps.voicevox-resource-cache.outputs.cache-hit != 'true' uses: actions/checkout@v4 with: @@ -112,13 +112,12 @@ jobs: ref: ${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - # Merge VOICEVOX RESOURCE - - name: Merge VOICEVOX RESOURCE + - name: Build VOICEVOX RESOURCE env: DOWNLOAD_RESOURCE_PATH: download/resource run: bash build_util/process_voicevox_resource.bash - - name: Build and Deploy Docker image + - name: Build and Deploy the application Docker image uses: docker/build-push-action@v3 env: IMAGE_TAG: diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1afa0ee47..5e91df0ec 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -105,30 +105,31 @@ jobs: split: ${{ startsWith(matrix.os, 'macos-') && 'gsplit' || 'split' }} steps: - - name: declare variables + - name: Set up variables id: vars run: | echo "package_name=voicevox_engine-${{ matrix.target }}-${{ needs.config.outputs.version }}" >> "$GITHUB_OUTPUT" - - uses: actions/checkout@v4 + - name: Set up the repository + uses: actions/checkout@v4 # NOTE: The default 'sed' and 'split' of macOS is BSD 'sed' and 'split'. # There is a difference in specification between BSD 'sed' and 'split' and GNU 'sed' and 'split', # so you need to install GNU 'sed' and 'split'. - - name: Install GNU sed on macOS + - name: Set up macOS dependencies if: startsWith(matrix.os, 'macos-') run: | brew install gnu-sed coreutils # ONNX Runtime providersとCUDA周りをリンクするために使う - - name: Install patchelf + - name: Set up Ubuntu+ORT dependencies if: startsWith(matrix.os, 'ubuntu-') && endsWith(matrix.target, 'nvidia') run: | sudo apt-get update sudo apt-get install -y patchelf - # Download CUDA - - name: Restore cached CUDA + # CUDA + - name: Set up CUDA from cache if: matrix.cuda_version != '' uses: actions/cache/restore@v3 id: cuda-dll-cache-restore @@ -137,7 +138,7 @@ jobs: key: ${{ matrix.os }}-cuda-dll-${{ matrix.cuda_version }}-v1 path: download/cuda - - name: Setup CUDA + - name: Set up CUDA toolkit if: matrix.cuda_version != '' && steps.cuda-dll-cache-restore.outputs.cache-hit != 'true' uses: Jimver/cuda-toolkit@v0.2.10 id: cuda-toolkit @@ -145,7 +146,7 @@ jobs: method: network cuda: ${{ matrix.cuda_version }} - - name: Extract CUDA Dynamic Libraries + - name: Set up CUDA Dynamic Libraries if: matrix.cuda_version != '' && steps.cuda-dll-cache-restore.outputs.cache-hit != 'true' run: | set -eux @@ -176,19 +177,19 @@ jobs: sudo rm -rf "${CUDA_ROOT}" fi - - name: Save CUDA cache + - name: Set up CUDA cache if: matrix.cuda_version != '' uses: actions/cache/save@v3 with: key: ${{ steps.cuda-dll-cache-restore.outputs.cache-primary-key }} path: download/cuda - # Download cuDNN - - name: Export cuDNN url to calc hash + # cuDNN + - name: Set up cuDNN url file if: matrix.cudnn_url != '' run: echo "${{ matrix.cudnn_url }}" > download/cudnn_url.txt - - name: Restore cached cuDNN + - name: Set up cuDNN from cache if: matrix.cudnn_url != '' uses: actions/cache/restore@v3 id: cudnn-dll-cache-restore @@ -197,7 +198,7 @@ jobs: key: ${{ matrix.os }}-cudnn-dll-${{ hashFiles('download/cudnn_url.txt') }}-v1 path: download/cudnn - - name: Download and extract cuDNN Dynamic Libraries + - name: Set up cuDNN Dynamic Libraries if: matrix.cudnn_url != '' && steps.cudnn-dll-cache-restore.outputs.cache-hit != 'true' run: | set -eux @@ -228,19 +229,19 @@ jobs: rm download/cudnn.tar.xz fi - - name: Save cuDNN cache + - name: Set up cuDNN cache if: matrix.cudnn_url != '' uses: actions/cache/save@v3 with: key: ${{ steps.cudnn-dll-cache-restore.outputs.cache-primary-key }} path: download/cudnn - # Donwload zlib - - name: Export zlib url to calc hash + # zlib + - name: Set up zlib url file if: matrix.zlib_url != '' run: echo "${{ matrix.zlib_url }}" >> download/zlib_url.txt - - name: Restore cached zlib + - name: Set up zlib from cache if: matrix.zlib_url != '' uses: actions/cache/restore@v3 id: zlib-cache-restore @@ -248,7 +249,7 @@ jobs: key: zlib-cache-v1-${{ hashFiles('download/zlib_url.txt') }} path: download/zlib - - name: Download zlib + - name: Set up zlib Dynamic Libraries if: steps.zlib-cache-restore.outputs.cache-hit != 'true' && matrix.zlib_url != '' run: | curl -L "${{ matrix.zlib_url }}" -o download/zlib.zip @@ -260,19 +261,20 @@ jobs: mv download/zlib/dll_${{ matrix.architecture }}/zlibwapi.dll download/zlib/zlibwapi.dll rm -r download/zlib/dll_${{ matrix.architecture }} - - name: Save zlib cache + - name: Set up zlib cache if: matrix.zlib_url != '' uses: actions/cache/save@v3 with: key: ${{ steps.zlib-cache-restore.outputs.cache-primary-key }} path: download/zlib - - name: Setup MSVC + # + - name: Set up Windows dependencies if: startsWith(matrix.os, 'windows-') uses: ilammy/msvc-dev-cmd@v1 # Python install path of windows: C:/hostedtoolcache/windows/Python - - name: Setup Python + - name: Set up Python id: setup-python uses: actions/setup-python@v5 with: @@ -280,7 +282,7 @@ jobs: architecture: ${{ matrix.architecture }} cache: pip - - name: Install Python dependencies + - name: Set up Python dependencies run: | python -m pip install -r requirements-dev.txt @@ -306,15 +308,15 @@ jobs: exit "$EXIT_CODE" fi - - name: Create download directory + - name: Set up download directory run: mkdir -p download/ - # Donwload DirectML - - name: Export DirectML url to calc hash + # DirectML + - name: Set up DirectML url file if: endswith(matrix.target, '-directml') run: echo "${{ matrix.directml_url }}" >> download/directml_url.txt - - name: Restore cached DirectML + - name: Set up DirectML from cache if: endswith(matrix.target, '-directml') uses: actions/cache/restore@v3 id: directml-cache-restore @@ -322,7 +324,7 @@ jobs: key: directml-cache-v1-${{ hashFiles('download/directml_url.txt') }} path: download/directml - - name: Download DirectML + - name: Set up DirectML Dynamic Libraries if: steps.directml-cache-restore.outputs.cache-hit != 'true' && endswith(matrix.target, '-directml') run: | curl -L "${{ matrix.directml_url }}" -o download/directml.zip @@ -334,25 +336,25 @@ jobs: mv download/directml/bin/${{ matrix.architecture }}-win/DirectML.dll download/directml/DirectML.dll rm -r download/directml/bin - - name: Save DirectML cache + - name: Set up DirectML cache if: endswith(matrix.target, '-directml') uses: actions/cache/save@v3 with: key: ${{ steps.directml-cache-restore.outputs.cache-primary-key }} path: download/directml - # Download ONNX Runtime - - name: Export ONNX Runtime url to calc hash + # ONNX Runtime (`ORT`) + - name: Set up ORT url file run: echo "${{ matrix.onnxruntime_url }}" > download/onnxruntime_url.txt - - name: Restore cached ONNX Runtime + - name: Set up ORT from cache uses: actions/cache/restore@v3 id: onnxruntime-cache-restore with: key: ${{ matrix.os }}-onnxruntime-${{ hashFiles('download/onnxruntime_url.txt') }}-v1 path: download/onnxruntime - - name: Download ONNX Runtime (Windows) + - name: Set up ORT (Windows) if: steps.onnxruntime-cache-restore.outputs.cache-hit != 'true' && startsWith(matrix.os, 'windows-') run: | curl -L "${{ matrix.onnxruntime_url }}" > download/onnxruntime.zip @@ -370,7 +372,7 @@ jobs: rm download/onnxruntime.zip - - name: Download ONNX Runtime (Mac/Linux) + - name: Set up ORT (Mac/Linux) if: steps.onnxruntime-cache-restore.outputs.cache-hit != 'true' && startsWith(matrix.os, 'windows-') != true run: | curl -L "${{ matrix.onnxruntime_url }}" > download/onnxruntime.tgz @@ -378,21 +380,21 @@ jobs: tar xf "download/onnxruntime.tgz" -C "download/onnxruntime" --strip-components 1 rm download/onnxruntime.tgz - - name: Save ONNX Runtime cache + - name: Set up ORT cache uses: actions/cache/save@v3 with: key: ${{ steps.onnxruntime-cache-restore.outputs.cache-primary-key }} path: download/onnxruntime - # Download VOICEVOX RESOURCE - - name: Prepare VOICEVOX RESOURCE cache + # VOICEVOX RESOURCE (`RESOURCE`) + - name: Set up RESOURCE cache uses: actions/cache@v3 id: voicevox-resource-cache with: key: voicevox-resource-${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - - name: Checkout VOICEVOX RESOURCE + - name: Set up RESOURCE repository if: steps.voicevox-resource-cache.outputs.cache-hit != 'true' uses: actions/checkout@v4 with: @@ -400,21 +402,20 @@ jobs: ref: ${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - # Merge VOICEVOX RESOURCE - - name: Merge VOICEVOX RESOURCE + - name: Build RESOURCE env: DOWNLOAD_RESOURCE_PATH: download/resource run: bash build_util/process_voicevox_resource.bash - # Download VOICEVOX Core - - name: Prepare VOICEVOX Core cache + # VOICEVOX CORE (`CORE`) + - name: Set up CORE cache uses: actions/cache@v3 id: voicevox-core-cache with: key: ${{ matrix.os }}-voicevox-core-${{ matrix.voicevox_core_asset_prefix }}-${{ env.VOICEVOX_CORE_VERSION }} path: download/core - - name: Download VOICEVOX Core + - name: Set up CORE if: steps.voicevox-core-cache.outputs.cache-hit != 'true' env: VOICEVOX_CORE_ASSET_NAME: ${{ matrix.voicevox_core_asset_prefix }}-${{ env.VOICEVOX_CORE_VERSION }} @@ -430,14 +431,15 @@ jobs: rm -rf download/${{ env.VOICEVOX_CORE_ASSET_NAME }} rm download/${{ env.VOICEVOX_CORE_ASSET_NAME }}.zip - - name: Generate licenses.json + # Build + - name: Build licenses run: | OUTPUT_LICENSE_JSON_PATH=engine_manifest_assets/dependency_licenses.json \ bash build_util/create_venv_and_generate_licenses.bash # FIXME: VOICEVOX (editor) cannot build without licenses.json cp engine_manifest_assets/dependency_licenses.json licenses.json - - name: Build run.py with PyInstaller + - name: Build VOICEVOX ENGINE run.py run: | set -eux @@ -462,15 +464,18 @@ jobs: LIBONNXRUNTIME_PATH="$LIBONNXRUNTIME_PATH" \ pyinstaller --noconfirm run.spec - - name: Gather DLL dependencies to dist/run/ (Windows) + # Because PyInstaller does not copy dynamic loaded libraries, + # manually move DLL dependencies into `dist/run/` (cache already saved) + + - name: Gather DLL dependencies (Windows) if: startsWith(matrix.os, 'windows-') run: | set -eux - # Move DLL dependencies (cache already saved) - + # (Windows CUDA) if [ -f "download/onnxruntime/lib/onnxruntime_providers_cuda.dll" ]; then - # ONNX Runtime providers (PyInstaller does not copy dynamic loaded libraries) + + # ONNX Runtime providers mv download/onnxruntime/lib/onnxruntime_*.dll dist/run/ # CUDA @@ -487,29 +492,29 @@ jobs: # zlib mv download/zlib/zlibwapi.dll dist/run/ - # Remove source directories to reduce disk usage (already cached) + # Clean source directories (already cached) rm -rf download/onnxruntime rm -rf download/cuda rm -rf download/cudnn rm -rf download/zlib fi + # (Windows DirectML) if [[ ${{ matrix.target }} == *-directml ]]; then + # DirectML mv download/directml/DirectML.dll dist/run/ - # Remove source directory (already cached) + # Clean source directories (already cached) rm -rf download/directml fi - - name: Gather DLL dependencies to dist/run/ (Linux CUDA) + - name: Gather DLL dependencies (Linux CUDA) if: startsWith(matrix.os, 'ubuntu-') && endsWith(matrix.target, 'nvidia') run: | set -eux - # Move DLL dependencies (cache already saved) - - # ONNX Runtime providers (PyInstaller does not copy dynamic loaded libraries) + # ONNX Runtime providers patchelf --set-rpath '$ORIGIN' "$(pwd)/download/onnxruntime/lib"/libonnxruntime_providers_*.so mv download/onnxruntime/lib/libonnxruntime_*.so dist/run/ @@ -524,7 +529,7 @@ jobs: mv download/cudnn/bin/libcudnn.so.* dist/run/ mv download/cudnn/bin/libcudnn_*_infer.so.* dist/run/ - # Remove source directories to reduce disk usage (already cached) + # Clean source directories (already cached) rm -rf download/onnxruntime rm -rf download/cuda rm -rf download/cudnn @@ -548,7 +553,7 @@ jobs: mv dist/run/ "${{ matrix.target }}/" # 7z archives - - name: Create 7z archives + - name: Build the application as 7z archives run: | # Compress to artifact.7z.001, artifact.7z.002, ... 7z -r -v1900m a "${{ steps.vars.outputs.package_name }}.7z" "${{ matrix.target }}/" @@ -557,7 +562,7 @@ jobs: ls ${{ steps.vars.outputs.package_name }}.7z.* > archives_7z.txt mv archives_7z.txt "${{ steps.vars.outputs.package_name }}.7z.txt" - - name: Upload 7z archives to artifact + - name: Deploy 7z archives to artifact if: github.event.inputs.upload_artifact == 'true' uses: actions/upload-artifact@v3 with: @@ -565,7 +570,7 @@ jobs: path: | ${{ steps.vars.outputs.package_name }}.7z.* - - name: Upload 7z archives to Release assets + - name: Deploy 7z archives to Release assets if: needs.config.outputs.version != '' uses: ncipollo/release-action@v1 with: @@ -577,12 +582,12 @@ jobs: ${{ steps.vars.outputs.package_name }}.7z.* commit: ${{ github.sha }} - - name: Clean 7z archives to reduce disk usage + - name: Clean 7z archives run: | rm -f ${{ steps.vars.outputs.package_name }}.7z.* # VVPP archives - - name: Create VVPP archives + - name: Build the application as VVPP archives run: | # Compress to compressed.zip.001, compressed.zip.002, ... # NOTE: 1000th archive will be "compressed.zip.1000" after "compressed.zip.999". This is unconsidered as an extreme case. @@ -603,7 +608,7 @@ jobs: ls ${{ steps.vars.outputs.package_name }}*.vvppp ${{ steps.vars.outputs.package_name }}.vvpp > archives_vvpp.txt || true mv archives_vvpp.txt "${{ steps.vars.outputs.package_name }}.vvpp.txt" - - name: Upload VVPP archives to artifact + - name: Deploy VVPP archives to artifact if: github.event.inputs.upload_artifact == 'true' uses: actions/upload-artifact@v3 with: @@ -613,7 +618,7 @@ jobs: ${{ steps.vars.outputs.package_name }}*.vvppp ${{ steps.vars.outputs.package_name }}.vvpp.txt - - name: Upload VVPP archives to Release assets + - name: Deploy VVPP archives to Release assets if: needs.config.outputs.version != '' uses: ncipollo/release-action@v1 with: diff --git a/.github/workflows/release-test-docker.yml b/.github/workflows/release-test-docker.yml index d30337798..eb29f9ec6 100644 --- a/.github/workflows/release-test-docker.yml +++ b/.github/workflows/release-test-docker.yml @@ -37,24 +37,20 @@ jobs: - cpu-ubuntu20.04 steps: - - uses: actions/checkout@v4 + - name: Set up the repository + uses: actions/checkout@v4 - # - # Setup Python Environment - # - - uses: actions/setup-python@v5 + - name: Set up Python + uses: actions/setup-python@v5 with: python-version: "3.11.3" cache: pip - - name: Install requirements + - name: Set up Python dependencies run: | pip install -r requirements-test.txt - # - # Setup Docker Environment - # - - name: Declare variables + - name: Set up variables id: docker_vars run: | if [ "${{ matrix.tag }}" != "" ]; then @@ -63,17 +59,17 @@ jobs: echo "image_tag=${{ env.IMAGE_NAME }}:${{ env.VERSION }}" >> "$GITHUB_OUTPUT" fi - - name: Docker pull + - name: Set up the application image run: docker pull "${{ steps.docker_vars.outputs.image_tag }}" - - name: Docker run + - name: Set up the application container run: docker run -d -p 50021:50021 "${{ steps.docker_vars.outputs.image_tag }}" # Docker コンテナが起動してから、レスポンスが返ってくるまで待機する # リトライは10回まで `/version` にアクセスしてレスポンスのステータスコードをチェック # - ステータスコードが `200` の場合は正常終了します # - ステータスコードが `200` 以外の場合は、5秒間スリープしてリトライします - - name: Wait for container to start + - name: Set up the application server by waiting warmup run: | set +e # curlのエラーを無視する @@ -93,5 +89,5 @@ jobs: done exit 1 - - name: Test + - name: Test the application run: python build_util/check_release_build.py --skip_run_process --dist_dir dist/ diff --git a/.github/workflows/release-test.yml b/.github/workflows/release-test.yml index ec680ab10..266c7507e 100644 --- a/.github/workflows/release-test.yml +++ b/.github/workflows/release-test.yml @@ -53,20 +53,22 @@ jobs: runs-on: ${{ matrix.os }} steps: - - name: declare variables + - name: Set up variables id: vars run: | echo "release_url=${{ env.REPO_URL }}/releases/download/${{ env.VERSION }}" >> "$GITHUB_OUTPUT" echo "package_name=voicevox_engine-${{ matrix.target }}-${{ env.VERSION }}" >> "$GITHUB_OUTPUT" - - uses: actions/checkout@v4 + - name: Set up the repository + uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - name: Set up Python + uses: actions/setup-python@v5 with: python-version: "3.11.3" cache: pip - - name: Download + - name: Set up the application run: | mkdir -p download curl -L -o "download/list.txt" "${{ steps.vars.outputs.release_url }}/${{ steps.vars.outputs.package_name }}.7z.txt" @@ -74,13 +76,13 @@ jobs: 7z x "download/$(head -n1 download/list.txt)" mv "${{ matrix.target }}" dist/ - - name: chmod +x + - name: Set up permission if: startsWith(matrix.target, 'linux') || startsWith(matrix.target, 'macos') run: chmod +x dist/run - - name: Install requirements + - name: Set up Python test dependencies run: | pip install -r requirements-test.txt - - name: Test + - name: Test the application run: python build_util/check_release_build.py --dist_dir dist/ diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 18843d492..343b2020d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,25 +20,26 @@ jobs: python: ["3.11.3"] steps: - - uses: actions/checkout@v4 + - name: Set up the repository + uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python }} + - name: Set up Python uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} cache: pip - - name: Install dependencies + - name: Set up Python dependencies run: | python -m pip install --upgrade pip setuptools wheel python -m pip install -r requirements-test.txt - - name: Validate poetry.lock + - name: Test poetry.lock run: | poetry lock --no-update git diff --exit-code - - name: Run poetry and check + - name: Test dependency lists run: | poetry export --without-hashes -o requirements.txt.check poetry export --without-hashes --with dev -o requirements-dev.txt.check @@ -54,9 +55,10 @@ jobs: exit 1 fi - - run: pysen run lint + - name: Test format + run: pysen run lint - - name: Run pytest and get coverage + - name: Test codes and Get coverage run: | coverage run --omit=test/* -m pytest diff --git a/.github/workflows/upload-gh-pages.yml b/.github/workflows/upload-gh-pages.yml index 3efc702b4..83d9d59b3 100644 --- a/.github/workflows/upload-gh-pages.yml +++ b/.github/workflows/upload-gh-pages.yml @@ -21,24 +21,25 @@ jobs: upload-doc: runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v4 + - name: Set up the repository + uses: actions/checkout@v4 - - name: Setup Python + - name: Set up Python id: setup-python uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} cache: pip - - name: Install Python dependencies + - name: Set up Python dependencies run: | pip install -r requirements.txt - - name: Make documents + - name: Build documents run: | PYTHONPATH=. python build_util/make_docs.py - - name: Deploy to GitHub Pages + - name: Deploy documents to GitHub Pages uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} From 34dc05a265ac79082402be6775d1e5822e2cc95e Mon Sep 17 00:00:00 2001 From: tarepan Date: Sat, 23 Mar 2024 07:49:23 +0000 Subject: [PATCH 02/10] =?UTF-8?q?refactor:=20=E3=82=AB=E3=83=90=E3=83=AC?= =?UTF-8?q?=E3=83=83=E3=82=B8=E9=96=A2=E4=BF=82=20workflow=20step=20?= =?UTF-8?q?=E5=91=BD=E5=90=8D=E7=B5=B1=E4=B8=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/test.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 343b2020d..ce04bd6c1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -62,20 +62,20 @@ jobs: run: | coverage run --omit=test/* -m pytest - - name: Submit coverage to Coveralls + - name: Test coverage and Deploy the results to Coveralls if: matrix.os == 'ubuntu-20.04' run: coveralls --service=github env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Create coverage result + - name: Deploy coverage report to local files if: github.event_name == 'pull_request' && matrix.os == 'ubuntu-20.04' run: | mkdir report coverage report > report/report.txt echo ${{ github.event.number }} > report/pr_num.txt - - name: Upload coverage result + - name: Deploy coverage report to GitHub if: github.event_name == 'pull_request' && matrix.os == 'ubuntu-20.04' uses: actions/upload-artifact@v3 with: From f69004a708806f73a342d0688f0c1f97daa9269d Mon Sep 17 00:00:00 2001 From: tarepan Date: Sat, 23 Mar 2024 07:58:55 +0000 Subject: [PATCH 03/10] =?UTF-8?q?fix:=20workflow=20=E3=82=B9=E3=83=86?= =?UTF-8?q?=E3=83=83=E3=83=97=E5=90=8D=E3=81=A8=E5=87=A6=E7=90=86=E3=81=AE?= =?UTF-8?q?=E4=B8=8D=E4=B8=80=E8=87=B4=E3=82=92=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ce04bd6c1..e93939f19 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -58,11 +58,11 @@ jobs: - name: Test format run: pysen run lint - - name: Test codes and Get coverage + - name: Test codes and coverage run: | coverage run --omit=test/* -m pytest - - name: Test coverage and Deploy the results to Coveralls + - name: Deploy coverage results to Coveralls if: matrix.os == 'ubuntu-20.04' run: coveralls --service=github env: From 056c8789aebea0e30f7f571ac7795d8d3a42ef41 Mon Sep 17 00:00:00 2001 From: tarepan Date: Thu, 28 Mar 2024 04:18:14 +0000 Subject: [PATCH 04/10] =?UTF-8?q?fix:=20workflow=20=E3=82=B9=E3=83=86?= =?UTF-8?q?=E3=83=83=E3=83=97=E5=90=8D=E3=81=AB=E5=88=86=E9=A1=9E=20prefix?= =?UTF-8?q?=20=E3=82=92=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index 032a1511c..f84db39c6 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -81,7 +81,7 @@ jobs: platforms: linux/amd64 steps: - - name: Set up the repository + - name: [Setup] Check out the repository uses: actions/checkout@v4 - name: Set up QEMU From 416d37e9e8a800568c688a6d759821d047bfa4cb Mon Sep 17 00:00:00 2001 From: tarepan Date: Thu, 28 Mar 2024 04:19:20 +0000 Subject: [PATCH 05/10] =?UTF-8?q?fix:=20workflow=20=E3=82=B9=E3=83=86?= =?UTF-8?q?=E3=83=83=E3=83=97=E5=90=8D=E3=81=AB=E5=88=86=E9=A1=9E=20prefix?= =?UTF-8?q?=20=E3=82=92=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e93939f19..f6922c2ff 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,7 +20,7 @@ jobs: python: ["3.11.3"] steps: - - name: Set up the repository + - name: [Setup] Check out the repository uses: actions/checkout@v4 - name: Set up Python From 2fec5bf1a07b4b287c49cee7e22c5b1a349235fc Mon Sep 17 00:00:00 2001 From: tarepan Date: Thu, 28 Mar 2024 04:21:06 +0000 Subject: [PATCH 06/10] =?UTF-8?q?fix:=20workflow=20=E6=A7=8B=E6=96=87?= =?UTF-8?q?=E3=81=8B=E3=82=89=E3=81=AE=E9=80=B8=E8=84=B1=E3=82=92=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-docker.yml | 2 +- .github/workflows/test.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index f84db39c6..9fa982f29 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -81,7 +81,7 @@ jobs: platforms: linux/amd64 steps: - - name: [Setup] Check out the repository + - name: Check out the repository uses: actions/checkout@v4 - name: Set up QEMU diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f6922c2ff..2c127969c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,7 +20,7 @@ jobs: python: ["3.11.3"] steps: - - name: [Setup] Check out the repository + - name: Check out the repository uses: actions/checkout@v4 - name: Set up Python From 4033ef0be78c810201687c81dc224cc810e186dd Mon Sep 17 00:00:00 2001 From: tarepan Date: Thu, 28 Mar 2024 04:47:01 +0000 Subject: [PATCH 07/10] =?UTF-8?q?fix:=20workflow=20=E3=82=B9=E3=83=86?= =?UTF-8?q?=E3=83=83=E3=83=97=E5=90=8D=E3=81=AB=E5=88=86=E9=A1=9E=20prefix?= =?UTF-8?q?=20=E3=82=92=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-docker.yml | 14 ++-- .github/workflows/build.yml | 96 +++++++++++------------ .github/workflows/release-test-docker.yml | 16 ++-- .github/workflows/release-test.yml | 14 ++-- .github/workflows/test.yml | 18 ++--- .github/workflows/upload-gh-pages.yml | 10 +-- 6 files changed, 84 insertions(+), 84 deletions(-) diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index 9fa982f29..04d977e1e 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -84,27 +84,27 @@ jobs: - name: Check out the repository uses: actions/checkout@v4 - - name: Set up QEMU + - name: Set up QEMU uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx + - name: Set up Docker Buildx id: buildx uses: docker/setup-buildx-action@v2 - - name: Login to DockerHub + - name: Login to DockerHub uses: docker/login-action@v2 with: username: ${{ vars.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up VOICEVOX RESOURCE cache + - name: Prepare VOICEVOX RESOURCE cache uses: actions/cache@v3 id: voicevox-resource-cache with: key: voicevox-resource-${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - - name: Set up VOICEVOX RESOURCE repository + - name: Check out the VOICEVOX RESOURCE repository if: steps.voicevox-resource-cache.outputs.cache-hit != 'true' uses: actions/checkout@v4 with: @@ -112,12 +112,12 @@ jobs: ref: ${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - - name: Build VOICEVOX RESOURCE + - name: Merge VOICEVOX RESOURCE env: DOWNLOAD_RESOURCE_PATH: download/resource run: bash build_util/process_voicevox_resource.bash - - name: Build and Deploy the application Docker image + - name: Build and Deploy Docker image uses: docker/build-push-action@v3 env: IMAGE_TAG: diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5e91df0ec..2ce3ed35f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -40,7 +40,7 @@ jobs: version: ${{ steps.vars.outputs.version }} version_or_latest: ${{ steps.vars.outputs.version_or_latest }} steps: - - name: declare variables + - name: Declare variables id: vars run: | : # release タグ名, または workflow_dispatch でのバージョン名. リリースでない (push event) 場合は空文字列 @@ -105,31 +105,31 @@ jobs: split: ${{ startsWith(matrix.os, 'macos-') && 'gsplit' || 'split' }} steps: - - name: Set up variables + - name: Declare variables id: vars run: | echo "package_name=voicevox_engine-${{ matrix.target }}-${{ needs.config.outputs.version }}" >> "$GITHUB_OUTPUT" - - name: Set up the repository + - name: Check out the repository uses: actions/checkout@v4 # NOTE: The default 'sed' and 'split' of macOS is BSD 'sed' and 'split'. # There is a difference in specification between BSD 'sed' and 'split' and GNU 'sed' and 'split', # so you need to install GNU 'sed' and 'split'. - - name: Set up macOS dependencies + - name: Install macOS dependencies if: startsWith(matrix.os, 'macos-') run: | brew install gnu-sed coreutils # ONNX Runtime providersとCUDA周りをリンクするために使う - - name: Set up Ubuntu+ORT dependencies + - name: Install Ubuntu+ORT dependencies if: startsWith(matrix.os, 'ubuntu-') && endsWith(matrix.target, 'nvidia') run: | sudo apt-get update sudo apt-get install -y patchelf # CUDA - - name: Set up CUDA from cache + - name: Restore cached CUDA if: matrix.cuda_version != '' uses: actions/cache/restore@v3 id: cuda-dll-cache-restore @@ -138,7 +138,7 @@ jobs: key: ${{ matrix.os }}-cuda-dll-${{ matrix.cuda_version }}-v1 path: download/cuda - - name: Set up CUDA toolkit + - name: Set up CUDA toolkit if: matrix.cuda_version != '' && steps.cuda-dll-cache-restore.outputs.cache-hit != 'true' uses: Jimver/cuda-toolkit@v0.2.10 id: cuda-toolkit @@ -146,7 +146,7 @@ jobs: method: network cuda: ${{ matrix.cuda_version }} - - name: Set up CUDA Dynamic Libraries + - name: Extract CUDA Dynamic Libraries if: matrix.cuda_version != '' && steps.cuda-dll-cache-restore.outputs.cache-hit != 'true' run: | set -eux @@ -177,7 +177,7 @@ jobs: sudo rm -rf "${CUDA_ROOT}" fi - - name: Set up CUDA cache + - name: Save CUDA cache if: matrix.cuda_version != '' uses: actions/cache/save@v3 with: @@ -185,11 +185,11 @@ jobs: path: download/cuda # cuDNN - - name: Set up cuDNN url file + - name: Export cuDNN url to calc hash if: matrix.cudnn_url != '' run: echo "${{ matrix.cudnn_url }}" > download/cudnn_url.txt - - name: Set up cuDNN from cache + - name: Restore cached cuDNN if: matrix.cudnn_url != '' uses: actions/cache/restore@v3 id: cudnn-dll-cache-restore @@ -198,7 +198,7 @@ jobs: key: ${{ matrix.os }}-cudnn-dll-${{ hashFiles('download/cudnn_url.txt') }}-v1 path: download/cudnn - - name: Set up cuDNN Dynamic Libraries + - name: Download and extract cuDNN Dynamic Libraries if: matrix.cudnn_url != '' && steps.cudnn-dll-cache-restore.outputs.cache-hit != 'true' run: | set -eux @@ -229,7 +229,7 @@ jobs: rm download/cudnn.tar.xz fi - - name: Set up cuDNN cache + - name: Save cuDNN cache if: matrix.cudnn_url != '' uses: actions/cache/save@v3 with: @@ -237,11 +237,11 @@ jobs: path: download/cudnn # zlib - - name: Set up zlib url file + - name: Export zlib url to calc hash if: matrix.zlib_url != '' run: echo "${{ matrix.zlib_url }}" >> download/zlib_url.txt - - name: Set up zlib from cache + - name: Restore cached zlib if: matrix.zlib_url != '' uses: actions/cache/restore@v3 id: zlib-cache-restore @@ -249,7 +249,7 @@ jobs: key: zlib-cache-v1-${{ hashFiles('download/zlib_url.txt') }} path: download/zlib - - name: Set up zlib Dynamic Libraries + - name: Download zlib dynamic Library if: steps.zlib-cache-restore.outputs.cache-hit != 'true' && matrix.zlib_url != '' run: | curl -L "${{ matrix.zlib_url }}" -o download/zlib.zip @@ -261,7 +261,7 @@ jobs: mv download/zlib/dll_${{ matrix.architecture }}/zlibwapi.dll download/zlib/zlibwapi.dll rm -r download/zlib/dll_${{ matrix.architecture }} - - name: Set up zlib cache + - name: Save zlib cache if: matrix.zlib_url != '' uses: actions/cache/save@v3 with: @@ -269,12 +269,12 @@ jobs: path: download/zlib # - - name: Set up Windows dependencies + - name: Set up Windows dependencies if: startsWith(matrix.os, 'windows-') uses: ilammy/msvc-dev-cmd@v1 # Python install path of windows: C:/hostedtoolcache/windows/Python - - name: Set up Python + - name: Set up Python id: setup-python uses: actions/setup-python@v5 with: @@ -282,7 +282,7 @@ jobs: architecture: ${{ matrix.architecture }} cache: pip - - name: Set up Python dependencies + - name: Install Python dependencies run: | python -m pip install -r requirements-dev.txt @@ -308,15 +308,15 @@ jobs: exit "$EXIT_CODE" fi - - name: Set up download directory + - name: Create download directory run: mkdir -p download/ # DirectML - - name: Set up DirectML url file + - name: Export DirectML url to calc hash if: endswith(matrix.target, '-directml') run: echo "${{ matrix.directml_url }}" >> download/directml_url.txt - - name: Set up DirectML from cache + - name: Restore cached DirectML if: endswith(matrix.target, '-directml') uses: actions/cache/restore@v3 id: directml-cache-restore @@ -324,7 +324,7 @@ jobs: key: directml-cache-v1-${{ hashFiles('download/directml_url.txt') }} path: download/directml - - name: Set up DirectML Dynamic Libraries + - name: Download DirectML dynamic Library if: steps.directml-cache-restore.outputs.cache-hit != 'true' && endswith(matrix.target, '-directml') run: | curl -L "${{ matrix.directml_url }}" -o download/directml.zip @@ -336,7 +336,7 @@ jobs: mv download/directml/bin/${{ matrix.architecture }}-win/DirectML.dll download/directml/DirectML.dll rm -r download/directml/bin - - name: Set up DirectML cache + - name: Save DirectML cache if: endswith(matrix.target, '-directml') uses: actions/cache/save@v3 with: @@ -347,14 +347,14 @@ jobs: - name: Set up ORT url file run: echo "${{ matrix.onnxruntime_url }}" > download/onnxruntime_url.txt - - name: Set up ORT from cache + - name: Export ONNX Runtime url to calc hash uses: actions/cache/restore@v3 id: onnxruntime-cache-restore with: key: ${{ matrix.os }}-onnxruntime-${{ hashFiles('download/onnxruntime_url.txt') }}-v1 path: download/onnxruntime - - name: Set up ORT (Windows) + - name: Download ORT (Windows) if: steps.onnxruntime-cache-restore.outputs.cache-hit != 'true' && startsWith(matrix.os, 'windows-') run: | curl -L "${{ matrix.onnxruntime_url }}" > download/onnxruntime.zip @@ -372,7 +372,7 @@ jobs: rm download/onnxruntime.zip - - name: Set up ORT (Mac/Linux) + - name: Download ORT (Mac/Linux) if: steps.onnxruntime-cache-restore.outputs.cache-hit != 'true' && startsWith(matrix.os, 'windows-') != true run: | curl -L "${{ matrix.onnxruntime_url }}" > download/onnxruntime.tgz @@ -380,21 +380,21 @@ jobs: tar xf "download/onnxruntime.tgz" -C "download/onnxruntime" --strip-components 1 rm download/onnxruntime.tgz - - name: Set up ORT cache + - name: Save ORT cache uses: actions/cache/save@v3 with: key: ${{ steps.onnxruntime-cache-restore.outputs.cache-primary-key }} path: download/onnxruntime # VOICEVOX RESOURCE (`RESOURCE`) - - name: Set up RESOURCE cache + - name: Prepare RESOURCE cache uses: actions/cache@v3 id: voicevox-resource-cache with: key: voicevox-resource-${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - - name: Set up RESOURCE repository + - name: Check out RESOURCE repository if: steps.voicevox-resource-cache.outputs.cache-hit != 'true' uses: actions/checkout@v4 with: @@ -402,20 +402,20 @@ jobs: ref: ${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - - name: Build RESOURCE + - name: Merge RESOURCE env: DOWNLOAD_RESOURCE_PATH: download/resource run: bash build_util/process_voicevox_resource.bash # VOICEVOX CORE (`CORE`) - - name: Set up CORE cache + - name: Prepare CORE cache uses: actions/cache@v3 id: voicevox-core-cache with: key: ${{ matrix.os }}-voicevox-core-${{ matrix.voicevox_core_asset_prefix }}-${{ env.VOICEVOX_CORE_VERSION }} path: download/core - - name: Set up CORE + - name: Download CORE if: steps.voicevox-core-cache.outputs.cache-hit != 'true' env: VOICEVOX_CORE_ASSET_NAME: ${{ matrix.voicevox_core_asset_prefix }}-${{ env.VOICEVOX_CORE_VERSION }} @@ -432,14 +432,14 @@ jobs: rm download/${{ env.VOICEVOX_CORE_ASSET_NAME }}.zip # Build - - name: Build licenses + - name: Generate licenses.json run: | OUTPUT_LICENSE_JSON_PATH=engine_manifest_assets/dependency_licenses.json \ bash build_util/create_venv_and_generate_licenses.bash # FIXME: VOICEVOX (editor) cannot build without licenses.json cp engine_manifest_assets/dependency_licenses.json licenses.json - - name: Build VOICEVOX ENGINE run.py + - name: Build VOICEVOX ENGINE run.py run: | set -eux @@ -467,7 +467,7 @@ jobs: # Because PyInstaller does not copy dynamic loaded libraries, # manually move DLL dependencies into `dist/run/` (cache already saved) - - name: Gather DLL dependencies (Windows) + - name: Gather DLL dependencies (Windows) if: startsWith(matrix.os, 'windows-') run: | set -eux @@ -509,7 +509,7 @@ jobs: rm -rf download/directml fi - - name: Gather DLL dependencies (Linux CUDA) + - name: Gather DLL dependencies (Linux CUDA) if: startsWith(matrix.os, 'ubuntu-') && endsWith(matrix.target, 'nvidia') run: | set -eux @@ -534,12 +534,12 @@ jobs: rm -rf download/cuda rm -rf download/cudnn - - name: Set @rpath to @executable_path + - name: Set @rpath to @executable_path if: startsWith(matrix.os, 'macos-') run: | install_name_tool -add_rpath @executable_path/. dist/run/run - - name: Code signing + - name: Code signing if: github.event.inputs.code_signing == 'true' && startsWith(matrix.os, 'windows-') run: | bash build_util/codesign.bash "dist/run/run.exe" @@ -548,12 +548,12 @@ jobs: ESIGNERCKA_PASSWORD: ${{ secrets.ESIGNERCKA_PASSWORD }} ESIGNERCKA_TOTP_SECRET: ${{ secrets.ESIGNERCKA_TOTP_SECRET }} - - name: Rename artifact directory to archive + - name: Rename artifact directory to archive run: | mv dist/run/ "${{ matrix.target }}/" # 7z archives - - name: Build the application as 7z archives + - name: Create 7z archives run: | # Compress to artifact.7z.001, artifact.7z.002, ... 7z -r -v1900m a "${{ steps.vars.outputs.package_name }}.7z" "${{ matrix.target }}/" @@ -562,7 +562,7 @@ jobs: ls ${{ steps.vars.outputs.package_name }}.7z.* > archives_7z.txt mv archives_7z.txt "${{ steps.vars.outputs.package_name }}.7z.txt" - - name: Deploy 7z archives to artifact + - name: Upload 7z archives to artifact if: github.event.inputs.upload_artifact == 'true' uses: actions/upload-artifact@v3 with: @@ -570,7 +570,7 @@ jobs: path: | ${{ steps.vars.outputs.package_name }}.7z.* - - name: Deploy 7z archives to Release assets + - name: Upload 7z archives to Release assets if: needs.config.outputs.version != '' uses: ncipollo/release-action@v1 with: @@ -582,12 +582,12 @@ jobs: ${{ steps.vars.outputs.package_name }}.7z.* commit: ${{ github.sha }} - - name: Clean 7z archives + - name: Clean 7z archives run: | rm -f ${{ steps.vars.outputs.package_name }}.7z.* # VVPP archives - - name: Build the application as VVPP archives + - name: Create VVPP archives run: | # Compress to compressed.zip.001, compressed.zip.002, ... # NOTE: 1000th archive will be "compressed.zip.1000" after "compressed.zip.999". This is unconsidered as an extreme case. @@ -608,7 +608,7 @@ jobs: ls ${{ steps.vars.outputs.package_name }}*.vvppp ${{ steps.vars.outputs.package_name }}.vvpp > archives_vvpp.txt || true mv archives_vvpp.txt "${{ steps.vars.outputs.package_name }}.vvpp.txt" - - name: Deploy VVPP archives to artifact + - name: Upload VVPP archives to artifact if: github.event.inputs.upload_artifact == 'true' uses: actions/upload-artifact@v3 with: @@ -618,7 +618,7 @@ jobs: ${{ steps.vars.outputs.package_name }}*.vvppp ${{ steps.vars.outputs.package_name }}.vvpp.txt - - name: Deploy VVPP archives to Release assets + - name: Upload VVPP archives to Release assets if: needs.config.outputs.version != '' uses: ncipollo/release-action@v1 with: diff --git a/.github/workflows/release-test-docker.yml b/.github/workflows/release-test-docker.yml index eb29f9ec6..da696d743 100644 --- a/.github/workflows/release-test-docker.yml +++ b/.github/workflows/release-test-docker.yml @@ -37,20 +37,20 @@ jobs: - cpu-ubuntu20.04 steps: - - name: Set up the repository + - name: Check out the repository uses: actions/checkout@v4 - - name: Set up Python + - name: Set up Python uses: actions/setup-python@v5 with: python-version: "3.11.3" cache: pip - - name: Set up Python dependencies + - name: Install Python dependencies run: | pip install -r requirements-test.txt - - name: Set up variables + - name: Declare variables id: docker_vars run: | if [ "${{ matrix.tag }}" != "" ]; then @@ -59,17 +59,17 @@ jobs: echo "image_tag=${{ env.IMAGE_NAME }}:${{ env.VERSION }}" >> "$GITHUB_OUTPUT" fi - - name: Set up the application image + - name: Pull the application image run: docker pull "${{ steps.docker_vars.outputs.image_tag }}" - - name: Set up the application container + - name: Run application container run: docker run -d -p 50021:50021 "${{ steps.docker_vars.outputs.image_tag }}" # Docker コンテナが起動してから、レスポンスが返ってくるまで待機する # リトライは10回まで `/version` にアクセスしてレスポンスのステータスコードをチェック # - ステータスコードが `200` の場合は正常終了します # - ステータスコードが `200` 以外の場合は、5秒間スリープしてリトライします - - name: Set up the application server by waiting warmup + - name: Warm up the application server by waiting run: | set +e # curlのエラーを無視する @@ -89,5 +89,5 @@ jobs: done exit 1 - - name: Test the application + - name: Test the application run: python build_util/check_release_build.py --skip_run_process --dist_dir dist/ diff --git a/.github/workflows/release-test.yml b/.github/workflows/release-test.yml index 266c7507e..03622e79e 100644 --- a/.github/workflows/release-test.yml +++ b/.github/workflows/release-test.yml @@ -53,22 +53,22 @@ jobs: runs-on: ${{ matrix.os }} steps: - - name: Set up variables + - name: Declare variables id: vars run: | echo "release_url=${{ env.REPO_URL }}/releases/download/${{ env.VERSION }}" >> "$GITHUB_OUTPUT" echo "package_name=voicevox_engine-${{ matrix.target }}-${{ env.VERSION }}" >> "$GITHUB_OUTPUT" - - name: Set up the repository + - name: Check out the repository uses: actions/checkout@v4 - - name: Set up Python + - name: Set up Python uses: actions/setup-python@v5 with: python-version: "3.11.3" cache: pip - - name: Set up the application + - name: Download the application run: | mkdir -p download curl -L -o "download/list.txt" "${{ steps.vars.outputs.release_url }}/${{ steps.vars.outputs.package_name }}.7z.txt" @@ -76,13 +76,13 @@ jobs: 7z x "download/$(head -n1 download/list.txt)" mv "${{ matrix.target }}" dist/ - - name: Set up permission + - name: Set up permission if: startsWith(matrix.target, 'linux') || startsWith(matrix.target, 'macos') run: chmod +x dist/run - - name: Set up Python test dependencies + - name: Install Python test dependencies run: | pip install -r requirements-test.txt - - name: Test the application + - name: Test the application run: python build_util/check_release_build.py --dist_dir dist/ diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2c127969c..710a8c98a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -23,23 +23,23 @@ jobs: - name: Check out the repository uses: actions/checkout@v4 - - name: Set up Python + - name: Set up Python uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} cache: pip - - name: Set up Python dependencies + - name: Install Python dependencies run: | python -m pip install --upgrade pip setuptools wheel python -m pip install -r requirements-test.txt - - name: Test poetry.lock + - name: Check poetry.lock run: | poetry lock --no-update git diff --exit-code - - name: Test dependency lists + - name: Check dependency lists run: | poetry export --without-hashes -o requirements.txt.check poetry export --without-hashes --with dev -o requirements-dev.txt.check @@ -55,27 +55,27 @@ jobs: exit 1 fi - - name: Test format + - name: Check format run: pysen run lint - - name: Test codes and coverage + - name: Test codes and coverage run: | coverage run --omit=test/* -m pytest - - name: Deploy coverage results to Coveralls + - name: Submit coverage results to Coveralls if: matrix.os == 'ubuntu-20.04' run: coveralls --service=github env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Deploy coverage report to local files + - name: Create coverage report if: github.event_name == 'pull_request' && matrix.os == 'ubuntu-20.04' run: | mkdir report coverage report > report/report.txt echo ${{ github.event.number }} > report/pr_num.txt - - name: Deploy coverage report to GitHub + - name: Upload coverage report to GitHub if: github.event_name == 'pull_request' && matrix.os == 'ubuntu-20.04' uses: actions/upload-artifact@v3 with: diff --git a/.github/workflows/upload-gh-pages.yml b/.github/workflows/upload-gh-pages.yml index 83d9d59b3..25d86546d 100644 --- a/.github/workflows/upload-gh-pages.yml +++ b/.github/workflows/upload-gh-pages.yml @@ -21,25 +21,25 @@ jobs: upload-doc: runs-on: ubuntu-20.04 steps: - - name: Set up the repository + - name: Check out the repository uses: actions/checkout@v4 - - name: Set up Python + - name: Set up Python id: setup-python uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} cache: pip - - name: Set up Python dependencies + - name: Install Python dependencies run: | pip install -r requirements.txt - - name: Build documents + - name: Make documents run: | PYTHONPATH=. python build_util/make_docs.py - - name: Deploy documents to GitHub Pages + - name: Deploy documents to GitHub Pages uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} From 006bf0d42d0c255337c145603a08cd3453e5f0f1 Mon Sep 17 00:00:00 2001 From: tarepan Date: Thu, 28 Mar 2024 04:55:23 +0000 Subject: [PATCH 08/10] =?UTF-8?q?fix:=20=E9=96=93=E9=81=95=E3=81=A3?= =?UTF-8?q?=E3=81=9F=20step=20=E8=A7=A3=E8=AA=AC=E3=82=92=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build.yml | 4 ++-- .github/workflows/test.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2ce3ed35f..8030123b4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -344,10 +344,10 @@ jobs: path: download/directml # ONNX Runtime (`ORT`) - - name: Set up ORT url file + - name: Export ONNX Runtime url to calc hash run: echo "${{ matrix.onnxruntime_url }}" > download/onnxruntime_url.txt - - name: Export ONNX Runtime url to calc hash + - name: Restore cached ONNX Runtime uses: actions/cache/restore@v3 id: onnxruntime-cache-restore with: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 710a8c98a..09d8d4e81 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -34,7 +34,7 @@ jobs: python -m pip install --upgrade pip setuptools wheel python -m pip install -r requirements-test.txt - - name: Check poetry.lock + - name: Validate poetry.lock run: | poetry lock --no-update git diff --exit-code From 87cee1770495ca46323c7a013ab54e80bf075159 Mon Sep 17 00:00:00 2001 From: tarepan Date: Mon, 15 Apr 2024 07:24:47 +0000 Subject: [PATCH 09/10] =?UTF-8?q?fix:=20step=E5=90=8D=E3=81=A8=E3=82=B3?= =?UTF-8?q?=E3=83=A1=E3=83=B3=E3=83=88=E3=81=AE=E8=A8=98=E8=BF=B0=E3=82=92?= =?UTF-8?q?=E8=AA=BF=E6=95=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-engine-package.yml | 27 ++++++++++----------- .github/workflows/test-engine-container.yml | 8 +++--- .github/workflows/test-engine-package.yml | 4 +-- 3 files changed, 19 insertions(+), 20 deletions(-) diff --git a/.github/workflows/build-engine-package.yml b/.github/workflows/build-engine-package.yml index d14681fd9..7635a0437 100644 --- a/.github/workflows/build-engine-package.yml +++ b/.github/workflows/build-engine-package.yml @@ -122,7 +122,7 @@ jobs: brew install gnu-sed coreutils # ONNX Runtime providersとCUDA周りをリンクするために使う - - name: Install Ubuntu+ORT dependencies + - name: Install Ubuntu+ONNXRuntime dependencies if: startsWith(matrix.os, 'ubuntu-') && endsWith(matrix.target, 'nvidia') run: | sudo apt-get update @@ -273,7 +273,6 @@ jobs: key: ${{ steps.zlib-cache-restore.outputs.cache-primary-key }} path: download/zlib - # - name: Set up Windows dependencies if: startsWith(matrix.os, 'windows-') uses: ilammy/msvc-dev-cmd@v1 @@ -329,7 +328,7 @@ jobs: key: directml-cache-v1-${{ hashFiles('download/directml_url.txt') }} path: download/directml - - name: Download DirectML dynamic Library + - name: Set up DirectML dynamic Library if: steps.directml-cache-restore.outputs.cache-hit != 'true' && endswith(matrix.target, '-directml') run: | curl -L "${{ matrix.directml_url }}" -o download/directml.zip @@ -348,7 +347,7 @@ jobs: key: ${{ steps.directml-cache-restore.outputs.cache-primary-key }} path: download/directml - # ONNX Runtime (`ORT`) + # ONNX Runtime - name: Export ONNX Runtime url to calc hash run: echo "${{ matrix.onnxruntime_url }}" > download/onnxruntime_url.txt @@ -359,7 +358,7 @@ jobs: key: ${{ matrix.os }}-onnxruntime-${{ hashFiles('download/onnxruntime_url.txt') }}-v1 path: download/onnxruntime - - name: Download ORT (Windows) + - name: Download ONNX Runtime (Windows) if: steps.onnxruntime-cache-restore.outputs.cache-hit != 'true' && startsWith(matrix.os, 'windows-') run: | curl -L "${{ matrix.onnxruntime_url }}" > download/onnxruntime.zip @@ -379,7 +378,7 @@ jobs: rm download/onnxruntime.zip - - name: Download ORT (Mac/Linux) + - name: Download ONNX Runtime (Mac/Linux) if: steps.onnxruntime-cache-restore.outputs.cache-hit != 'true' && startsWith(matrix.os, 'windows-') != true run: | curl -L "${{ matrix.onnxruntime_url }}" > download/onnxruntime.tgz @@ -387,13 +386,13 @@ jobs: tar xf "download/onnxruntime.tgz" -C "download/onnxruntime" --strip-components 1 rm download/onnxruntime.tgz - - name: Save ORT cache + - name: Save ONNX Runtime cache uses: actions/cache/save@v3 with: key: ${{ steps.onnxruntime-cache-restore.outputs.cache-primary-key }} path: download/onnxruntime - # VOICEVOX RESOURCE (`RESOURCE`) + # VOICEVOX RESOURCE - name: Prepare RESOURCE cache uses: actions/cache@v3 id: voicevox-resource-cache @@ -414,7 +413,7 @@ jobs: DOWNLOAD_RESOURCE_PATH: download/resource run: bash build_util/process_voicevox_resource.bash - # VOICEVOX CORE (`CORE`) + # VOICEVOX CORE - name: Prepare CORE cache uses: actions/cache@v3 id: voicevox-core-cache @@ -483,7 +482,7 @@ jobs: run: | set -eux - # (Windows CUDA) + # Windows CUDA if [ -f "download/onnxruntime/lib/onnxruntime_providers_cuda.dll" ]; then # ONNX Runtime providers @@ -503,14 +502,14 @@ jobs: # zlib mv download/zlib/zlibwapi.dll dist/run/ - # Clean source directories (already cached) + # Clean source directories to reduce disk usage (already cached) rm -rf download/onnxruntime rm -rf download/cuda rm -rf download/cudnn rm -rf download/zlib fi - # (Windows DirectML) + # Windows DirectML # 一度代入して actionlint のエラー回避 (詳細: NOTE 1) TARGET=${{ matrix.target }} if [[ $TARGET == *-directml ]]; then @@ -541,7 +540,7 @@ jobs: mv download/cudnn/bin/libcudnn.so.* dist/run/ mv download/cudnn/bin/libcudnn_*_infer.so.* dist/run/ - # Clean source directories (already cached) + # Clean source directories to reduce disk usage (already cached) rm -rf download/onnxruntime rm -rf download/cuda rm -rf download/cudnn @@ -594,7 +593,7 @@ jobs: ${{ steps.vars.outputs.package_name }}.7z.* commit: ${{ github.sha }} - - name: Clean 7z archives + - name: Clean 7z archives to reduce disk usage run: | rm -f ${{ steps.vars.outputs.package_name }}.7z.* diff --git a/.github/workflows/test-engine-container.yml b/.github/workflows/test-engine-container.yml index da696d743..29f86e31d 100644 --- a/.github/workflows/test-engine-container.yml +++ b/.github/workflows/test-engine-container.yml @@ -59,17 +59,17 @@ jobs: echo "image_tag=${{ env.IMAGE_NAME }}:${{ env.VERSION }}" >> "$GITHUB_OUTPUT" fi - - name: Pull the application image + - name: Pull ENGINE application docker image run: docker pull "${{ steps.docker_vars.outputs.image_tag }}" - - name: Run application container + - name: Run ENGINE application docker container run: docker run -d -p 50021:50021 "${{ steps.docker_vars.outputs.image_tag }}" # Docker コンテナが起動してから、レスポンスが返ってくるまで待機する # リトライは10回まで `/version` にアクセスしてレスポンスのステータスコードをチェック # - ステータスコードが `200` の場合は正常終了します # - ステータスコードが `200` 以外の場合は、5秒間スリープしてリトライします - - name: Warm up the application server by waiting + - name: Warm up ENGINE server by waiting run: | set +e # curlのエラーを無視する @@ -89,5 +89,5 @@ jobs: done exit 1 - - name: Test the application + - name: Test ENGINE application docker container run: python build_util/check_release_build.py --skip_run_process --dist_dir dist/ diff --git a/.github/workflows/test-engine-package.yml b/.github/workflows/test-engine-package.yml index 03622e79e..132783f4a 100644 --- a/.github/workflows/test-engine-package.yml +++ b/.github/workflows/test-engine-package.yml @@ -68,7 +68,7 @@ jobs: python-version: "3.11.3" cache: pip - - name: Download the application + - name: Download ENGINE application package run: | mkdir -p download curl -L -o "download/list.txt" "${{ steps.vars.outputs.release_url }}/${{ steps.vars.outputs.package_name }}.7z.txt" @@ -84,5 +84,5 @@ jobs: run: | pip install -r requirements-test.txt - - name: Test the application + - name: Test ENGINE application package run: python build_util/check_release_build.py --dist_dir dist/ From 028a24cfb33c85c94a87e5f7a9f1f1edd548a6ac Mon Sep 17 00:00:00 2001 From: Hiroshiba Kazuyuki Date: Wed, 1 May 2024 16:14:38 +0900 Subject: [PATCH 10/10] =?UTF-8?q?=E5=BE=AE=E8=AA=BF=E6=95=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-engine-package.yml | 6 +++--- .github/workflows/test-engine-package.yml | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-engine-package.yml b/.github/workflows/build-engine-package.yml index 7635a0437..df94c3bf4 100644 --- a/.github/workflows/build-engine-package.yml +++ b/.github/workflows/build-engine-package.yml @@ -116,13 +116,13 @@ jobs: # NOTE: The default 'sed' and 'split' of macOS is BSD 'sed' and 'split'. # There is a difference in specification between BSD 'sed' and 'split' and GNU 'sed' and 'split', # so you need to install GNU 'sed' and 'split'. - - name: Install macOS dependencies + - name: Install dependencies (macOS) if: startsWith(matrix.os, 'macos-') run: | brew install gnu-sed coreutils # ONNX Runtime providersとCUDA周りをリンクするために使う - - name: Install Ubuntu+ONNXRuntime dependencies + - name: Install ONNX Runtime dependencies (Linux) if: startsWith(matrix.os, 'ubuntu-') && endsWith(matrix.target, 'nvidia') run: | sudo apt-get update @@ -273,7 +273,7 @@ jobs: key: ${{ steps.zlib-cache-restore.outputs.cache-primary-key }} path: download/zlib - - name: Set up Windows dependencies + - name: Set up MSVC if: startsWith(matrix.os, 'windows-') uses: ilammy/msvc-dev-cmd@v1 diff --git a/.github/workflows/test-engine-package.yml b/.github/workflows/test-engine-package.yml index 132783f4a..6e052f25c 100644 --- a/.github/workflows/test-engine-package.yml +++ b/.github/workflows/test-engine-package.yml @@ -68,7 +68,7 @@ jobs: python-version: "3.11.3" cache: pip - - name: Download ENGINE application package + - name: Download ENGINE package run: | mkdir -p download curl -L -o "download/list.txt" "${{ steps.vars.outputs.release_url }}/${{ steps.vars.outputs.package_name }}.7z.txt" @@ -84,5 +84,5 @@ jobs: run: | pip install -r requirements-test.txt - - name: Test ENGINE application package + - name: Test ENGINE package run: python build_util/check_release_build.py --dist_dir dist/