diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index c8fdba40..64012baa 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,21 +1,15 @@ FROM mcr.microsoft.com/devcontainers/go:1 -ENV CHAT_DOWNLOADER_VER=0.2.8 - RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt-get -y install --no-install-recommends ffmpeg python3 python3-pip \ && apt-get clean -y && rm -rf /var/lib/apt/lists/* RUN pip3 install --no-cache --upgrade --break-system-packages pip streamlink chat-downloader - WORKDIR /tmp RUN wget https://github.com/rsms/inter/releases/download/v4.0-beta7/Inter-4.0-beta7.zip && unzip Inter-4.0-beta7.zip && mkdir -p /usr/share/fonts/opentype/inter/ && cp /tmp/Desktop/Inter-*.otf /usr/share/fonts/opentype/inter/ && fc-cache -f -v -RUN wget https://github.com/lay295/TwitchDownloader/releases/download/1.54.3/TwitchDownloaderCLI-1.54.3-Linux-x64.zip && unzip TwitchDownloaderCLI-1.54.3-Linux-x64.zip && mv TwitchDownloaderCLI /usr/local/bin/ && chmod +x /usr/local/bin/TwitchDownloaderCLI && rm TwitchDownloaderCLI-1.54.3-Linux-x64.zip - -#RUN wget https://github.com/xenova/chat-downloader/archive/refs/tags/v${CHAT_DOWNLOADER_VER}.tar.gz -#RUN tar -xvf v${CHAT_DOWNLOADER_VER}.tar.gz && cd chat-downloader-${CHAT_DOWNLOADER_VER} && python3 setup.py install && cd .. && rm -f v${CHAT_DOWNLOADER_VER}.tar.gz && rm -rf chat-downloader-${CHAT_DOWNLOADER_VER} +RUN wget https://github.com/lay295/TwitchDownloader/releases/download/1.55.0/TwitchDownloaderCLI-1.55.0-Linux-x64.zip && unzip TwitchDownloaderCLI-1.55.0-Linux-x64.zip && mv TwitchDownloaderCLI /usr/local/bin/ && chmod +x /usr/local/bin/TwitchDownloaderCLI && rm TwitchDownloaderCLI-1.55.0-Linux-x64.zip RUN curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.50.1 diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index ee987d70..a6d15895 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,7 +2,8 @@ "build": { "dockerfile": "Dockerfile" }, "features": { "ghcr.io/jungaretti/features/make:1": {}, - "ghcr.io/devcontainers/features/github-cli:1": {} + "ghcr.io/devcontainers/features/github-cli:1": {}, + "ghcr.io/devcontainers/features/docker-in-docker:2": {} }, "customizations": { "vscode": { @@ -10,7 +11,8 @@ "dbaeumer.vscode-eslint", "esbenp.prettier-vscode", "eamodio.gitlens", - "github.copilot" + "github.copilot", + "yzhang.markdown-all-in-one" ] } }, @@ -35,5 +37,5 @@ ], "workspaceMount": "source=${localWorkspaceFolder},target=/workspace,type=bind,consistency=cached", "workspaceFolder": "/workspace", - "postAttachCommand": "go install github.com/joho/godotenv/cmd/godotenv@latest && go install github.com/cosmtrek/air@latest" + "postAttachCommand": "make dev_setup" } diff --git a/.dockerignore b/.dockerignore index 87b757ef..65c8dae4 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,5 +1,4 @@ .github -.git dev tmp bin \ No newline at end of file diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index 29187ce7..38419ca7 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -15,180 +15,49 @@ env: IMAGE_NAME: ${{ github.repository }} jobs: - build-push-amd64: + docker-build: runs-on: ubuntu-latest - permissions: - contents: read - packages: write - # This is used to complete the identity challenge - # with sigstore/fulcio when running outside of PRs. - id-token: write - steps: + # Checkout the repo - name: Checkout repository uses: actions/checkout@v4 - - # Workaround: https://github.com/docker/build-push-action/issues/461 - - name: Setup Docker buildx - uses: docker/setup-buildx-action@v3.3.0 - - # Login against a Docker registry except on PR - # https://github.com/docker/login-action - - name: Log into registry ${{ env.REGISTRY }} - if: github.event_name != 'pull_request' - uses: docker/login-action@v3.1.0 with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - # Extract metadata (tags, labels) for Docker - # https://github.com/docker/metadata-action - - name: Extract Docker metadata - id: meta - uses: docker/metadata-action@v5.5.1 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - - # Build and push Docker image with Buildx (don't push on PR) - # https://github.com/docker/build-push-action - - name: Build and push Docker image (amd64) - id: build-and-push-amd64 - uses: docker/build-push-action@v5.3.0 - with: - context: . - push: ${{ github.event_name != 'pull_request' }} - tags: ${{ steps.meta.outputs.tags }} - provenance: false - # labels: ${{ steps.meta.outputs.labels }} - secrets: | - VERSION=${{ steps.meta.outputs.version }} - platforms: linux/amd64 - file: Dockerfile - cache-from: type=gha,scope=${{ env.IMAGE_NAME }} - cache-to: type=gha,scope=${{ env.IMAGE_NAME }},mode=max - - build-push-arm64: - # Do not run on PRs - if: github.event_name != 'pull_request' - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - # This is used to complete the identity challenge - # with sigstore/fulcio when running outside of PRs. - id-token: write - - steps: - - name: Checkout repository - uses: actions/checkout@v4 + fetch-depth: 0 + # Set up QEMU for Arm64 - name: Set up QEMU - uses: docker/setup-qemu-action@v3.0.0 - with: - platforms: arm64 + uses: docker/setup-qemu-action@v3 - # Workaround: https://github.com/docker/build-push-action/issues/461 - - name: Setup Docker buildx - uses: docker/setup-buildx-action@v3.3.0 + # Set up Docker Buildx + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 - # Login against a Docker registry except on PR - # https://github.com/docker/login-action + # Login into GitHub Container Registry except on PR - name: Log into registry ${{ env.REGISTRY }} if: github.event_name != 'pull_request' - uses: docker/login-action@v3.1.0 + uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} # Extract metadata (tags, labels) for Docker - # https://github.com/docker/metadata-action - - name: Extract Docker metadata + - name: Extract Docker metadata (release) id: meta - uses: docker/metadata-action@v5.5.1 + uses: docker/metadata-action@v5 with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + flavor: | + latest=auto + tags: | + type=semver,pattern={{version}} + type=raw,value=dev - # Build and push Docker image with Buildx (don't push on PR) - # https://github.com/docker/build-push-action - - name: Build and push Docker image (arm64) - id: build-and-push-arm64 - uses: docker/build-push-action@v5.3.0 + - name: Build and push + uses: docker/build-push-action@v6 with: - context: . + platforms: linux/amd64,linux/arm64 push: ${{ github.event_name != 'pull_request' }} - tags: ${{ steps.meta.outputs.tags }}-arm64 - provenance: false - # labels: ${{ steps.meta.outputs.labels }} - secrets: | - VERSION=${{ steps.meta.outputs.version }} - platforms: linux/arm64 - file: Dockerfile.aarch64 - cache-from: type=gha,scope=${{ env.IMAGE_NAME }} - cache-to: type=gha,scope=${{ env.IMAGE_NAME }},mode=max - - create-manifests: - # Do not run on PRs - if: github.event_name != 'pull_request' - runs-on: ubuntu-latest - needs: [build-push-amd64, build-push-arm64] - permissions: - contents: read - packages: write - id-token: write - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Extract Docker metadata - id: meta - uses: docker/metadata-action@v5.5.1 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - - - name: Log into registry ${{ env.REGISTRY }} - if: github.event_name != 'pull_request' - uses: docker/login-action@v3.1.0 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Set repo name - run: | - echo "IMAGE_NAME=$(echo ${{ github.repository }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV - - # Create v* tag manifests and push - - name: Create ref tag manifest and push - if: startsWith(github.ref, 'refs/tags/v') - run: | - echo "Creating manifest for: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}" - docker manifest create \ - ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }} \ - ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest \ - ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-arm64 - docker manifest push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }} - - # Create latest tag manifests and push - - name: Create latest tag manifest and push - if: startsWith(github.ref, 'refs/tags/v') - run: | - echo "Creating manifest for: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}" - docker manifest create \ - ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest \ - ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest \ - ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-arm64 - docker manifest push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest - - # Create manifest and push - - name: Create manifest and push - # Run only on main branch push - if: github.ref == 'refs/heads/main' - run: | - docker manifest create \ - ${{ steps.meta.outputs.tags }} \ - --amend ${{ steps.meta.outputs.tags }} \ - --amend ${{ steps.meta.outputs.tags }}-arm64 - docker manifest push ${{ steps.meta.outputs.tags }} + tags: ${{ steps.meta.outputs.tags }} + context: . + dockerfile: ./Dockerfile diff --git a/.github/workflows/go-test.yml b/.github/workflows/go-test.yml index 79222ab6..696317da 100644 --- a/.github/workflows/go-test.yml +++ b/.github/workflows/go-test.yml @@ -22,11 +22,8 @@ jobs: - name: Install dependencies run: go mod download - - name: Create directories - run: sudo mkdir -p /vods && sudo chmod 777 /vods && sudo mkdir -p /logs && sudo chmod 777 /logs - - name: Run Tests - run: go test -v ./... env: TWITCH_CLIENT_ID: ${{ secrets.TWITCH_CLIENT_ID }} TWITCH_CLIENT_SECRET: ${{ secrets.TWITCH_CLIENT_SECRET }} + run: make test diff --git a/.gitignore b/.gitignore index 1520da7e..89e69333 100644 --- a/.gitignore +++ b/.gitignore @@ -20,7 +20,7 @@ # Go workspace file go.work -.env.dev +.env /cmd/server/__debug_bin dev diff --git a/.server.air.toml b/.server.air.toml index 036cf911..b397b8fb 100644 --- a/.server.air.toml +++ b/.server.air.toml @@ -5,14 +5,14 @@ tmp_dir = "tmp" [build] args_bin = [] bin = "./tmp/server" - cmd = "go build -o ./tmp/server ./cmd/server/main.go" + cmd = "make build_dev_server" delay = 1000 exclude_dir = ["tmp", "dev"] exclude_file = [] exclude_regex = ["_test.go"] exclude_unchanged = false follow_symlink = false - full_bin = "godotenv -f .env.dev ./tmp/server" + full_bin = "godotenv -f .env ./tmp/server" include_dir = [] include_ext = ["go", "tpl", "tmpl", "html"] include_file = [] @@ -25,7 +25,7 @@ tmp_dir = "tmp" rerun = false rerun_delay = 500 send_interrupt = false - stop_on_error = false + stop_on_error = true [color] app = "" diff --git a/.vscode/launch.json b/.vscode/launch.json index 3e28194b..dfed666b 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -16,7 +16,7 @@ "request": "launch", "mode": "auto", "program": "${workspaceFolder}/cmd/server/main.go", - "envFile": "${workspaceFolder}/.env.dev" + "envFile": "${workspaceFolder}/.env" }, { "name": "dev-worker", @@ -24,7 +24,7 @@ "request": "launch", "mode": "auto", "program": "${workspaceFolder}/cmd/worker/main.go", - "envFile": "${workspaceFolder}/.env.dev" + "envFile": "${workspaceFolder}/.env" } ] } diff --git a/.worker.air.toml b/.worker.air.toml index 378d637c..3e8b8ef7 100644 --- a/.worker.air.toml +++ b/.worker.air.toml @@ -5,14 +5,14 @@ tmp_dir = "tmp" [build] args_bin = [] bin = "./tmp/worker" - cmd = "go build -o ./tmp/worker ./cmd/worker/main.go" + cmd = "make build_dev_worker" delay = 1000 exclude_dir = ["tmp", "dev"] exclude_file = [] exclude_regex = ["_test.go"] exclude_unchanged = false follow_symlink = false - full_bin = "godotenv -f .env.dev ./tmp/worker" + full_bin = "godotenv -f .env ./tmp/worker" include_dir = [] include_ext = ["go", "tpl", "tmpl", "html"] include_file = [] @@ -25,7 +25,7 @@ tmp_dir = "tmp" rerun = false rerun_delay = 500 send_interrupt = false - stop_on_error = false + stop_on_error = true [color] app = "" diff --git a/Dockerfile b/Dockerfile index 0cb9971e..2637cf94 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,70 +1,72 @@ -FROM golang:1.22-bookworm AS build-stage-01 +ARG TWITCHDOWNLOADER_VERSION="1.55.0" -RUN mkdir /app -ADD . /app +# Build stage +FROM --platform=$BUILDPLATFORM golang:1.22-bookworm AS build WORKDIR /app +COPY . . +RUN make build_server build_worker -RUN CGO_ENABLED=0 GOOS=linux go build -ldflags "-s -X main.Version=${VERSION} -X main.BuildTime=`TZ=UTC date -u '+%Y-%m-%dT%H:%M:%SZ'` -X main.GitHash=`git rev-parse HEAD`" -o ganymede-api cmd/server/main.go -RUN CGO_ENABLED=0 GOOS=linux go build -ldflags "-s -X main.Version=${VERSION} -X main.BuildTime=`TZ=UTC date -u '+%Y-%m-%dT%H:%M:%SZ'` -X main.GitHash=`git rev-parse HEAD`" -o ganymede-worker cmd/worker/main.go - -FROM debian:bookworm-slim AS build-stage-02 - -RUN apt update && apt install -y git wget unzip - +# Tools stage +FROM --platform=$BUILDPLATFORM debian:bookworm-slim AS tools WORKDIR /tmp -RUN wget https://github.com/lay295/TwitchDownloader/releases/download/1.54.3/TwitchDownloaderCLI-1.54.3-Linux-x64.zip && unzip TwitchDownloaderCLI-1.54.3-Linux-x64.zip - -RUN git clone https://github.com/xenova/chat-downloader.git - -FROM debian:bookworm-slim AS production +RUN apt-get update && apt-get install -y --no-install-recommends \ +unzip git ca-certificates curl \ +&& rm -rf /var/lib/apt/lists/* + +# Download TwitchDownloader for the correct platform +ARG TWITCHDOWNLOADER_VERSION +ENV TWITCHDOWNLOADER_URL=https://github.com/lay295/TwitchDownloader/releases/download/${TWITCHDOWNLOADER_VERSION}/TwitchDownloaderCLI-${TWITCHDOWNLOADER_VERSION}-Linux +RUN if [ "$BUILDPLATFORM" = "arm64" ]; then \ + export TWITCHDOWNLOADER_URL=${TWITCHDOWNLOADER_URL}Arm; \ + fi && \ + export TWITCHDOWNLOADER_URL=${TWITCHDOWNLOADER_URL}-x64.zip && \ + echo "Download URL: $TWITCHDOWNLOADER_URL" && \ + curl -L $TWITCHDOWNLOADER_URL -o twitchdownloader.zip && \ + unzip twitchdownloader.zip && \ + rm twitchdownloader.zip +RUN git clone --depth 1 https://github.com/xenova/chat-downloader.git + +# Production stage +FROM --platform=$BUILDPLATFORM debian:bookworm-slim +WORKDIR /opt/app -# install packages -RUN apt update && apt install -y python3 python3-pip fontconfig ffmpeg tzdata curl procps -RUN ln -sf python3 /usr/bin/python +# Install dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + python3 python3-pip fontconfig ffmpeg tzdata procps \ + fonts-noto-core fonts-noto-cjk fonts-noto-extra fonts-inter \ + curl \ + && rm -rf /var/lib/apt/lists/* \ + && ln -sf python3 /usr/bin/python -# RUN apk add --update --no-cache python3 fontconfig icu-libs python3-dev gcc g++ ffmpeg bash tzdata shadow su-exec py3-pip && ln -sf python3 /usr/bin/python -RUN pip3 install --no-cache --upgrade pip streamlink --break-system-packages +# Install pip packages +RUN pip3 install --no-cache-dir --upgrade pip streamlink --break-system-packages -## Installing su-exec in debain/ubuntu container. -RUN set -ex; \ - \ - curl -o /usr/local/bin/su-exec.c https://raw.githubusercontent.com/ncopa/su-exec/master/su-exec.c; \ - \ - gcc -Wall \ - /usr/local/bin/su-exec.c -o/usr/local/bin/su-exec; \ - chown root:root /usr/local/bin/su-exec; \ - chmod 0755 /usr/local/bin/su-exec; \ - rm /usr/local/bin/su-exec.c; \ - \ -## Remove the su-exec dependency. It is no longer needed after building. - apt-get purge -y --auto-remove curl libc-dev +# Install gosu +RUN curl -LO https://github.com/tianon/gosu/releases/latest/download/gosu-$(dpkg --print-architecture | awk -F- '{ print $NF }') \ + && chmod 0755 gosu-$(dpkg --print-architecture | awk -F- '{ print $NF }') \ + && mv gosu-$(dpkg --print-architecture | awk -F- '{ print $NF }') /usr/local/bin/gosu -# setup user -RUN useradd -u 911 -d /data abc && \ - usermod -a -G users abc +# Setup user +RUN useradd -u 911 -d /data abc && usermod -a -G users abc -# Install chat-downloader -COPY --from=build-stage-02 /tmp/chat-downloader /tmp/chat-downloader +# Copy and install chat-downloader +COPY --from=tools /tmp/chat-downloader /tmp/chat-downloader RUN cd /tmp/chat-downloader && python3 setup.py install && cd .. && rm -rf chat-downloader -# Install fallback fonts for chat rendering -RUN apt install -y fonts-noto-core fonts-noto-cjk fonts-noto-extra fonts-inter - +# Setup fonts RUN chmod 644 /usr/share/fonts/* && chmod -R a+rX /usr/share/fonts -# TwitchDownloaderCLI -COPY --from=build-stage-02 /tmp/TwitchDownloaderCLI /usr/local/bin/ +# Copy TwitchDownloaderCLI +COPY --from=tools /tmp/TwitchDownloaderCLI /usr/local/bin/ RUN chmod +x /usr/local/bin/TwitchDownloaderCLI -WORKDIR /opt/app - -COPY --from=build-stage-01 /app/ganymede-api . -COPY --from=build-stage-01 /app/ganymede-worker . +# Copy application files +COPY --from=build /app/ganymede-api . +COPY --from=build /app/ganymede-worker . -EXPOSE 4000 - -# copy entrypoint +# Setup entrypoint COPY entrypoint.sh /usr/local/bin/ RUN chmod +x /usr/local/bin/entrypoint.sh +EXPOSE 4000 ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/Dockerfile.aarch64 b/Dockerfile.aarch64 index 1a0141bb..182e5eab 100644 --- a/Dockerfile.aarch64 +++ b/Dockerfile.aarch64 @@ -2,10 +2,11 @@ FROM arm64v8/golang:1.22 AS build-stage-01 RUN mkdir /app ADD . /app +ADD .git /app/.git WORKDIR /app -RUN CGO_ENABLED=0 GOOS=linux go build -ldflags "-s -X main.Version=${VERSION} -X main.BuildTime=`TZ=UTC date -u '+%Y-%m-%dT%H:%M:%SZ'` -X main.GitHash=`git rev-parse HEAD`" -o ganymede-api cmd/server/main.go -RUN CGO_ENABLED=0 GOOS=linux go build -ldflags "-s -X main.Version=${VERSION} -X main.BuildTime=`TZ=UTC date -u '+%Y-%m-%dT%H:%M:%SZ'` -X main.GitHash=`git rev-parse HEAD`" -o ganymede-worker cmd/worker/main.go +RUN make build_server +RUN make build_worker FROM arm64v8/debian:bullseye AS build-stage-02 @@ -14,7 +15,7 @@ RUN apt-get install unzip wget git -y WORKDIR /tmp RUN wget https://github.com/rsms/inter/releases/download/v3.19/Inter-3.19.zip && unzip Inter-3.19.zip -RUN wget https://github.com/lay295/TwitchDownloader/releases/download/1.54.3/TwitchDownloaderCLI-1.54.3-LinuxArm.zip && unzip TwitchDownloaderCLI-1.54.3-LinuxArm.zip +RUN wget https://github.com/lay295/TwitchDownloader/releases/download/1.54.7/TwitchDownloaderCLI-1.54.7-LinuxArm.zip && unzip TwitchDownloaderCLI-1.54.7-LinuxArm.zip RUN git clone https://github.com/xenova/chat-downloader.git diff --git a/Makefile b/Makefile index 480edf8b..1ff811bf 100644 --- a/Makefile +++ b/Makefile @@ -1,9 +1,25 @@ +dev_setup: + go install github.com/joho/godotenv/cmd/godotenv@latest + go install github.com/air-verse/air@latest + +build_server: + go build -ldflags='-X github.com/zibbp/ganymede/internal/utils.Commit=$(shell git rev-parse HEAD) -X github.com/zibbp/ganymede/internal/utils.BuildTime=$(shell date -u "+%Y-%m-%d_%H:%M:%S")' -o ganymede-api cmd/server/main.go + +build_worker: + go build -ldflags='-X github.com/zibbp/ganymede/internal/utils.Commit=$(shell git rev-parse HEAD) -X github.com/zibbp/ganymede/internal/utils.BuildTime=$(shell date -u "+%Y-%m-%d_%H:%M:%S")' -o ganymede-worker cmd/worker/main.go + +build_dev_server: + go build -ldflags='-X github.com/zibbp/ganymede/internal/utils.Commit=$(shell git rev-parse HEAD) -X github.com/zibbp/ganymede/internal/utils.BuildTime=$(shell date -u "+%Y-%m-%d_%H:%M:%S")' -o ./tmp/server ./cmd/server/main.go + +build_dev_worker: + go build -ldflags='-X github.com/zibbp/ganymede/internal/utils.Commit=$(shell git rev-parse HEAD) -X github.com/zibbp/ganymede/internal/utils.BuildTime=$(shell date -u "+%Y-%m-%d_%H:%M:%S")' -o ./tmp/worker ./cmd/worker/main.go + dev_server: - rm ./tmp/server + rm -f ./tmp/server air -c ./.server.air.toml dev_worker: - rm ./tmp/worker + rm -f ./tmp/worker air -c ./.worker.air.toml ent_generate: @@ -11,3 +27,15 @@ ent_generate: go_update_packages: go get -u ./... && go mod tidy + +lint: + golangci-lint run + +test: + go test -v ./... + +river-webui: + curl -L https://github.com/riverqueue/riverui/releases/latest/download/riverui_linux_amd64.gz | gzip -d > /tmp/riverui + chmod +x /tmp/riverui + @export $(shell grep -v '^#' .env | xargs) && \ + VITE_RIVER_API_BASE_URL=http://localhost:8080/api DATABASE_URL=postgres://$$DB_USER:$$DB_PASS@dev.tycho:$$DB_PORT/$$DB_NAME /tmp/riverui \ No newline at end of file diff --git a/README.md b/README.md index 8a0366ea..057683e4 100644 --- a/README.md +++ b/README.md @@ -7,13 +7,13 @@

Ganymede

- Twitch VOD and Stream archiving platform with a rendered chat. Files are saved in a friendly format allowing for use without Ganymede. + Twitch VOD and Live Stream archiving platform with a real-time and rendered chat experience. Files are saved in a friendly format allowing for use without Ganymede.

--- -## Demo +## Screenshot ![ganymede-readme_landing](https://user-images.githubusercontent.com/21207065/203620886-f40b82f6-317c-4ded-afdc-733d1658f6ca.jpg) @@ -21,21 +21,23 @@ https://user-images.githubusercontent.com/21207065/203620893-41a6a3a0-339a-4c62- ## About -Ganymede allows archiving of past streams (VODs) and livestreams both with a rendered chat. All files are saved in a friendly way that doesn't require Ganymede to view them (see [file structure](https://github.com/Zibbp/ganymede/wiki/File-Structure)). Ganymede is the successor of [Ceres](https://github.com/Zibbp/Ceres). +Ganymede allows archiving of past streams (VODs) and live streams with a real-time chat playback along with a archival-friendly rendered chat. All files are saved in a friendly way that doesn't require Ganymede to view them (see [file structure](https://github.com/Zibbp/ganymede/wiki/File-Structure)). Ganymede is the successor of [Ceres](https://github.com/Zibbp/Ceres). ## Features - Realtime Chat Playback - SSO / OAuth authentication ([wiki](https://github.com/Zibbp/ganymede/wiki/SSO---OpenID-Connect)) - Light/dark mode toggle. -- Watch channels for new videos and streams. +- 'Watched channels' - watch channels for videos and live streams. - Twitch VOD/Livestream support. -- Queue holds. -- Queue task restarts. - Full VOD, Channel, and User management. - Custom post-download video FFmpeg parameters. - Custom chat render parameters. - Webhook notifications. +- Simple file structure for long-term archival that will outlas Ganymede. +- Recoverable queue system. +- Playback / progress saving. +- Playlists. ## Documentation @@ -67,7 +69,7 @@ Ganymede consists of four docker containers: Feel free to use an existing Postgres database container and Nginx container if you don't want to spin new ones up. 1. Download a copy of the `docker-compose.yml` file and `nginx.conf`. -2. Edit the `docker-compose.yml` file modifying the environment variables, see [environment variables](https://github.com/Zibbp/ganymede#environment-variables). +2. Edit the `docker-compose.yml` file modifying the environment variables, see [environment variables](https://github.com/Zibbp/ganymede#environment-variables) for more information. 3. Run `docker compose up -d`. 4. Visit the address and port you specified for the frontend and login with username: `admin` password: `ganymede`. 5. Change the admin password _or_ create a new user, grant admin permissions on that user, and delete the admin user. @@ -76,37 +78,43 @@ Feel free to use an existing Postgres database container and Nginx container if The API container can be run as a non root user. To do so add `PUID` and `PGID` environment variables, setting the value to your user. Read [linuxserver's docs](https://docs.linuxserver.io/general/understanding-puid-and-pgid) about this for more information. -Note: On startup the container will `chown` `/data`, `/logs`, and `/tmp`. It will not recursively `chown` the `/vods` directory. Ensure the mounted `vods` directory is readable by the set user. +Note: On startup the container will `chown` the config, temp, and logs directory. It will not recursively `chown` the `/data/videos` directory. Ensure the mounted `/data/videos` directory is readable by the set user. ### Environment Variables +The `docker-compose.yml` file has comments for each environment variable. The `*_URL` envionrment variables _must_ be the 'public' URLs (e.g. `https://ganymedem.domain.com`) it cannot be a URL to just the docker service. + ##### API -| ENV Name | Description | -| ------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `TZ` | Timezone. | -| `DB_HOST` | Host of the database. | -| `DB_PORT` | Port of the database. | -| `DB_USER` | Username for the database. | -| `DB_PASS` | Password for the database. | -| `DB_NAME` | Name of the database. | -| `DB_SSL` | Whether to use SSL. Default: `disable`. See [DB SSL](https://github.com/Zibbp/ganymede/wiki/DB-SSL) for more information. | -| `DB_SSL_ROOT_CERT` | _Optional_ Path to DB SSL root certificate. See [DB SSL](https://github.com/Zibbp/ganymede/wiki/DB-SSL) for more information. | -| `JWT_SECRET` | Secret for JWT tokens. This should be a long random string. | -| `JWT_REFRESH_SECRET` | Secret for JWT refresh tokens. This should be a long random string. | -| `TWITCH_CLIENT_ID` | Twitch application client ID. | -| `TWITCH_CLIENT_SECRET` | Twitch application client secret. | -| `FRONTEND_HOST` | Host of the frontend, used for CORS. Example: `http://192.168.1.2:4801` | -| `COOKIE_DOMAIN` | _Optional_ Base domain for cookies. Used when reverse proxying. See [reverse proxy](https://github.com/Zibbp/ganymede/wiki/Reverse-Proxy) for more information. | -| `OAUTH_PROVIDER_URL` | _Optional_ OAuth provider URL. See https://github.com/Zibbp/ganymede/wiki/SSO---OpenID-Connect | -| `OAUTH_CLIENT_ID` | _Optional_ OAuth client ID. | -| `OAUTH_CLIENT_SECRET` | _Optional_ OAuth client secret. | -| `OAUTH_REDIRECT_URL` | _Optional_ OAuth redirect URL, points to the API. Example: `http://localhost:4000/api/v1/auth/oauth/callback`. | -| `TEMPORAL_URL` | URL to the Temporal server | -| `MAX_CHAT_DOWNLOAD_EXECUTIONS` | Maximum number of chat downloads that can be running at once. | -| `MAX_CHAT_RENDER_EXECUTIONS` | Maximum number of chat renders that can be running at once. | -| `MAX_VIDEO_DOWNLOAD_EXECUTIONS` | Maximum number of video downloads that can be running at once. | -| `MAX_VIDEO_CONVERT_EXECUTIONS` | Maximum number of video conversions that can be running at once. | +| ENV Name | Description | +| ------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | +| `DEBUG` | Enable debug logging `true` or `false`. | +| `VIDEOS_DIR` | Path inside the container to the videos directory. Default: `/data/videos`. | +| `TEMP_DIR` | Path inside the container where temporary files are stored during archiving. Default: `/data/temp`. | +| `LOGS_DIR` | Path inside the container where log files are stored. Default: `/data/logs`. | +| `CONFIG_DIR` | Path inside the container where the config is stored. Default: `/data/config`. | +| `TZ` | Timezone. | +| `DB_HOST` | Host of the database. | +| `DB_PORT` | Port of the database. | +| `DB_USER` | Username for the database. | +| `DB_PASS` | Password for the database. | +| `DB_NAME` | Name of the database. | +| `DB_SSL` | Whether to use SSL. Default: `disable`. See [DB SSL](https://github.com/Zibbp/ganymede/wiki/DB-SSL) for more information. | +| `DB_SSL_ROOT_CERT` | _Optional_ Path to DB SSL root certificate. See [DB SSL](https://github.com/Zibbp/ganymede/wiki/DB-SSL) for more information. | +| `JWT_SECRET` | Secret for JWT tokens. This should be a long random string. | +| `JWT_REFRESH_SECRET` | Secret for JWT refresh tokens. This should be a long random string. | +| `TWITCH_CLIENT_ID` | Twitch application client ID. | +| `TWITCH_CLIENT_SECRET` | Twitch application client secret. | +| `FRONTEND_HOST` | Host of the frontend, used for CORS. Example: `http://192.168.1.2:4801` | +| `OAUTH_ENABLED` | _Optional_ Wheter OAuth is enabled `true` or `false`. Must have the other OAuth variables set if this is enabled. | +| `OAUTH_PROVIDER_URL` | _Optional_ OAuth provider URL. See https://github.com/Zibbp/ganymede/wiki/SSO---OpenID-Connect | +| `OAUTH_CLIENT_ID` | _Optional_ OAuth client ID. | +| `OAUTH_CLIENT_SECRET` | _Optional_ OAuth client secret. | +| `OAUTH_REDIRECT_URL` | _Optional_ OAuth redirect URL, points to the API. Example: `http://localhost:4000/api/v1/auth/oauth/callback`. | +| `MAX_CHAT_DOWNLOAD_EXECUTIONS` | Maximum number of chat downloads that can be running at once. Live streams bypass this limit. | +| `MAX_CHAT_RENDER_EXECUTIONS` | Maximum number of chat renders that can be running at once. | +| `MAX_VIDEO_DOWNLOAD_EXECUTIONS` | Maximum number of video downloads that can be running at once. Live streams bypass this limit. | +| `MAX_VIDEO_CONVERT_EXECUTIONS` | Maximum number of video conversions that can be running at once. | ##### Frontend @@ -132,22 +140,19 @@ Note: On startup the container will `chown` `/data`, `/logs`, and `/tmp`. It wil ##### API -| Volume | Description | Example | -| ------- | ------------------------------------------------------------------------------- | ----------------------- | -| `/vods` | Mount for VOD storage. This example I have my NAS mounted to `/mnt/vault/vods`. | `/mnt/vault/vods:/vods` | -| `/logs` | Queue log folder. | `./logs:/logs` | -| `/data` | Config folder. | `./data:/data` | - -**Optional** - -`./tmp:/tmp` Binding the `tmp` folder prevents lost data if the container crashes as temporary downloads are stored in `tmp` which gets flushed when the container stops. +| Volume | Description | Example | +| -------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ---------------------------- | +| `/data/videos` | Mount for video storage. This **must** match the `VIDEOS_DIR` environment variable. | `/mnt/nas/vods:/data/videos` | +| `/data/logs` | Mount to store task logs. This **must** match the `LOGS_DIR` environment variable. | `./logs:/data/logs` | +| `/data/temp` | Mount to store temporay files during the archive process. This is mounted to the host so files are recoverable in the event of a crash. This **must** match the `TEMP_DIR` environment variable. | `./temp:/data/temp` | +| `/data/config` | Mount to store the config. This **must** match the `CONFIG_DIR` environment variable. | `./config:/data/config` | ##### Nginx -| Volume | Description | Example | -| -------------------------- | ---------------------------------------------- | ---------------------------------------------- | -| `/mnt/vods` | VOD storage, same as the API container volume. | `/mnt/vault/vods:/mnt/vods` | -| `/etc/nginx/nginx.conf:ro` | Path to the Nginx conf file. | `/path/to/nginx.conf:/etc/nginx/nginx.conf:ro` | +| Volume | Description | Example | +| -------------------------- | ---------------------------------------------------------- | ---------------------------------------------- | +| `/data/videos` | Mount for video storage, same as the API container volume. | `/mnt/nas/vods:/data/videos` | +| `/etc/nginx/nginx.conf:ro` | Path to the Nginx conf file. | `/path/to/nginx.conf:/etc/nginx/nginx.conf:ro` | ## Acknowledgements @@ -158,7 +163,3 @@ Note: On startup the container will `chown` `/data`, `/logs`, and `/tmp`. It wil ## License [GNU General Public License v3.0](https://github.com/Zibbp/ganymede/blob/master/LICENSE) - -## Authors - -- [@Zibbp](https://www.github.com/Zibbp) diff --git a/cmd/server/main.go b/cmd/server/main.go index 782118e6..8bdec376 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -1,122 +1,24 @@ package main import ( - "fmt" + "context" "os" - "strconv" - "time" "github.com/rs/zerolog" "github.com/rs/zerolog/log" - "github.com/rs/zerolog/pkgerrors" - "github.com/spf13/viper" - "github.com/zibbp/ganymede/internal/admin" - "github.com/zibbp/ganymede/internal/archive" - "github.com/zibbp/ganymede/internal/auth" - "github.com/zibbp/ganymede/internal/channel" - "github.com/zibbp/ganymede/internal/chapter" - "github.com/zibbp/ganymede/internal/config" - "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/kv" _ "github.com/zibbp/ganymede/internal/kv" - "github.com/zibbp/ganymede/internal/live" - "github.com/zibbp/ganymede/internal/metrics" - "github.com/zibbp/ganymede/internal/playback" - "github.com/zibbp/ganymede/internal/playlist" - "github.com/zibbp/ganymede/internal/queue" - "github.com/zibbp/ganymede/internal/scheduler" - "github.com/zibbp/ganymede/internal/task" - "github.com/zibbp/ganymede/internal/temporal" - transportHttp "github.com/zibbp/ganymede/internal/transport/http" - "github.com/zibbp/ganymede/internal/twitch" - "github.com/zibbp/ganymede/internal/user" - "github.com/zibbp/ganymede/internal/vod" + "github.com/zibbp/ganymede/internal/server" + "github.com/zibbp/ganymede/internal/utils" ) -var ( - Version = "undefined" - BuildTime = "undefined" - GitHash = "undefined" -) - -// @title Ganymede API -// @version 1.0 -// @description Authentication is handled using JWT tokens. The tokens are set as access-token and refresh-token cookies. -// @description For information regarding which role is authorized for which endpoint, see the http handler https://github.com/Zibbp/ganymede/blob/main/internal/transport/http/handler.go. - -// @contact.name Zibbp -// @contact.url https://github.com/zibbp/ganymede - -// @license.name GPL-3.0 - -// @host localhost:4000 -// @BasePath /api/v1 - -// @securityDefinitions.apikey ApiKeyCookieAuth -// @in cookie -// @name access-token - -// @securityDefinitions.refreshToken ApiKeyCookieRefresh -// @in cookie -// @name refresh-token - -func Run() error { - - config.NewConfig(true) - - configDebug := viper.GetBool("debug") - zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack - if configDebug { - log.Info().Msg("debug mode enabled") - zerolog.SetGlobalLevel(zerolog.DebugLevel) - } else { - zerolog.SetGlobalLevel(zerolog.InfoLevel) - } - - database.InitializeDatabase(false) - store := database.DB() - - // Initialize temporal client - temporal.InitializeTemporalClient() - - authService := auth.NewService(store) - channelService := channel.NewService(store) - vodService := vod.NewService(store) - queueService := queue.NewService(store, vodService, channelService) - twitchService := twitch.NewService() - archiveService := archive.NewService(store, twitchService, channelService, vodService, queueService) - adminService := admin.NewService(store) - userService := user.NewService(store) - configService := config.NewService(store) - liveService := live.NewService(store, twitchService, archiveService) - schedulerService := scheduler.NewService(liveService, archiveService) - playbackService := playback.NewService(store) - metricsService := metrics.NewService(store) - playlistService := playlist.NewService(store) - taskService := task.NewService(store, liveService, archiveService) - chapterService := chapter.NewService() - - httpHandler := transportHttp.NewHandler(authService, channelService, vodService, queueService, twitchService, archiveService, adminService, userService, configService, liveService, schedulerService, playbackService, metricsService, playlistService, taskService, chapterService) - - if err := httpHandler.Serve(); err != nil { - return err - } - - return nil -} - func main() { + ctx := context.Background() + if os.Getenv("ENV") == "dev" { log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}) } - kv.DB().Set("version", Version) - kv.DB().Set("build_time", BuildTime) - kv.DB().Set("git_hash", GitHash) - kv.DB().Set("start_time_unix", strconv.FormatInt(time.Now().Unix(), 10)) - fmt.Printf("Version : %s\n", Version) - fmt.Printf("Git Hash : %s\n", GitHash) - fmt.Printf("Build Time : %s\n", BuildTime) - if err := Run(); err != nil { + log.Info().Str("commit", utils.Commit).Str("build_time", utils.BuildTime).Msg("starting server") + if err := server.Run(ctx); err != nil { log.Fatal().Err(err).Msg("failed to run") } } diff --git a/cmd/worker/main.go b/cmd/worker/main.go index 0b4bee6f..a7d07e79 100644 --- a/cmd/worker/main.go +++ b/cmd/worker/main.go @@ -1,213 +1,122 @@ package main import ( + "context" + "fmt" "os" + "os/signal" + "syscall" - "github.com/kelseyhightower/envconfig" "github.com/rs/zerolog" "github.com/rs/zerolog/log" - "github.com/zibbp/ganymede/internal/activities" + "github.com/zibbp/ganymede/internal/archive" + "github.com/zibbp/ganymede/internal/blocked" + "github.com/zibbp/ganymede/internal/channel" + "github.com/zibbp/ganymede/internal/config" serverConfig "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/temporal" - "github.com/zibbp/ganymede/internal/twitch" - "github.com/zibbp/ganymede/internal/workflows" - - "go.temporal.io/sdk/client" - "go.temporal.io/sdk/worker" + "github.com/zibbp/ganymede/internal/live" + "github.com/zibbp/ganymede/internal/platform" + "github.com/zibbp/ganymede/internal/queue" + tasks_client "github.com/zibbp/ganymede/internal/tasks/client" + tasks_worker "github.com/zibbp/ganymede/internal/tasks/worker" + "github.com/zibbp/ganymede/internal/utils" + "github.com/zibbp/ganymede/internal/vod" ) -type Config struct { - MAX_CHAT_DOWNLOAD_EXECUTIONS int `default:"5"` - MAX_CHAT_RENDER_EXECUTIONS int `default:"3"` - MAX_VIDEO_DOWNLOAD_EXECUTIONS int `default:"5"` - MAX_VIDEO_CONVERT_EXECUTIONS int `default:"3"` - TEMPORAL_URL string `default:"temporal:7233"` -} - -type Logger struct { - logger *zerolog.Logger -} +func main() { + ctx := context.Background() -func (l *Logger) Debug(msg string, keyvals ...interface{}) { - if len(keyvals)%2 != 0 { - l.logger.Debug().Msgf(msg) - return + envConfig := config.GetEnvConfig() + envAppConfig := config.GetEnvApplicationConfig() + _, err := serverConfig.Init() + if err != nil { + log.Panic().Err(err).Msg("Error initializing server config") } - fields := make(map[string]interface{}) - for i := 0; i < len(keyvals); i += 2 { - if key, ok := keyvals[i].(string); ok { - fields[key] = keyvals[i+1] - } + if os.Getenv("ENV") == "dev" { + log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}) } - l.logger.Debug().Fields(fields).Msg(msg) -} - -func (l *Logger) Info(msg string, keyvals ...interface{}) { - if len(keyvals)%2 != 0 { - l.logger.Info().Msgf(msg) - return - } + log.Info().Str("commit", utils.Commit).Str("build_time", utils.BuildTime).Msg("starting worker") - fields := make(map[string]interface{}) - for i := 0; i < len(keyvals); i += 2 { - if key, ok := keyvals[i].(string); ok { - fields[key] = keyvals[i+1] - } - } + dbString := fmt.Sprintf("user=%s password=%s host=%s port=%s dbname=%s sslmode=%s", envAppConfig.DB_USER, envAppConfig.DB_PASS, envAppConfig.DB_HOST, envAppConfig.DB_PORT, envAppConfig.DB_NAME, envAppConfig.DB_SSL) - l.logger.Info().Fields(fields).Msg(msg) -} + db := database.NewDatabase(ctx, database.DatabaseConnectionInput{ + DBString: dbString, + IsWorker: false, + }) -func (l *Logger) Warn(msg string, keyvals ...interface{}) { - if len(keyvals)%2 != 0 { - l.logger.Warn().Msgf(msg) - return + riverClient, err := tasks_client.NewRiverClient(tasks_client.RiverClientInput{ + DB_URL: dbString, + }) + if err != nil { + log.Panic().Err(err).Msg("Error creating river worker") } - fields := make(map[string]interface{}) - for i := 0; i < len(keyvals); i += 2 { - if key, ok := keyvals[i].(string); ok { - fields[key] = keyvals[i+1] + var platformTwitch platform.Platform + // setup twitch platform + if envConfig.TwitchClientId != "" && envConfig.TwitchClientSecret != "" { + platformTwitch = &platform.TwitchConnection{ + ClientId: envConfig.TwitchClientId, + ClientSecret: envConfig.TwitchClientSecret, } - } - - l.logger.Warn().Fields(fields).Msg(msg) -} - -func (l *Logger) Error(msg string, keyvals ...interface{}) { - if len(keyvals)%2 != 0 { - l.logger.Error().Msgf(msg) - return - } - - fields := make(map[string]interface{}) - for i := 0; i < len(keyvals); i += 2 { - if key, ok := keyvals[i].(string); ok { - fields[key] = keyvals[i+1] + _, err = platformTwitch.Authenticate(ctx) + if err != nil { + log.Panic().Err(err).Msg("Error authenticating to Twitch") } } - l.logger.Error().Fields(fields).Msg(msg) -} - -func main() { - if os.Getenv("ENV") == "dev" { - log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}) - } - var config Config - err := envconfig.Process("", &config) + channelService := channel.NewService(db, platformTwitch) + vodService := vod.NewService(db, riverClient, platformTwitch) + queueService := queue.NewService(db, vodService, channelService, riverClient) + blockedVodsService := blocked.NewService(db) + // twitchService := twitch.NewService() + archiveService := archive.NewService(db, channelService, vodService, queueService, blockedVodsService, riverClient, platformTwitch) + liveService := live.NewService(db, archiveService, platformTwitch) + + // initialize river + riverWorkerClient, err := tasks_worker.NewRiverWorker(tasks_worker.RiverWorkerInput{ + DB_URL: dbString, + DB: db, + PlatformTwitch: platformTwitch, + VideoDownloadWorkers: envConfig.MaxVideoDownloadExecutions, + VideoPostProcessWorkers: envConfig.MaxVideoConvertExecutions, + ChatDownloadWorkers: envConfig.MaxChatDownloadExecutions, + ChatRenderWorkers: envConfig.MaxChatRenderExecutions, + }) if err != nil { - log.Fatal().Msgf("Unable to process environment variables: %v", err) - } - - log.Info().Msgf("Starting worker with config: %+v", config) - - // initializte main program config - // this needs to be removed in the future to decouple the worker from the server - serverConfig.NewConfig(false) - - logger := zerolog.New(os.Stdout).With().Timestamp().Logger().With().Str("service", "worker").Logger() - - clientOptions := client.Options{ - HostPort: config.TEMPORAL_URL, - Logger: &Logger{logger: &logger}, + log.Panic().Err(err).Msg("Error creating river worker") } - c, err := client.Dial(clientOptions) + // get periodic tasks + periodicTasks, err := riverWorkerClient.GetPeriodicTasks(liveService) if err != nil { - log.Fatal().Msgf("Unable to create client: %v", err) + log.Panic().Err(err).Msg("Error getting periodic tasks") } - defer c.Close() - // authenticate to Twitch - err = twitch.Authenticate() - if err != nil { - log.Fatal().Msgf("Unable to authenticate to Twitch: %v", err) + for _, task := range periodicTasks { + riverWorkerClient.Client.PeriodicJobs().Add(task) } - database.InitializeDatabase(true) - - // Initialize the temporal client for the worker - temporal.InitializeTemporalClient() + // start worker in a goroutine + go func() { + if err := riverWorkerClient.Start(); err != nil { + log.Panic().Err(err).Msg("Error running river worker") + } + }() - taskQueues := map[string]int{ - "archive": 100, - "chat-download": config.MAX_CHAT_DOWNLOAD_EXECUTIONS, - "chat-render": config.MAX_CHAT_RENDER_EXECUTIONS, - "video-download": config.MAX_VIDEO_DOWNLOAD_EXECUTIONS, - "video-convert": config.MAX_VIDEO_CONVERT_EXECUTIONS, - } + // Set up channel to listen for OS signals + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) - // create worker interrupt channel - interrupt := make(chan os.Signal, 1) - - for queueName, maxActivites := range taskQueues { - hostname, err := os.Hostname() - if err != nil { - log.Fatal().Msgf("Unable to get hostname: %v", err) - } - // create workers - w := worker.New(c, queueName, worker.Options{ - MaxConcurrentActivityExecutionSize: maxActivites, - Identity: hostname, - OnFatalError: func(err error) { - log.Error().Msgf("Worker encountered fatal error: %v", err) - }, - }) - - w.RegisterWorkflow(workflows.ArchiveVideoWorkflow) - w.RegisterWorkflow(workflows.SaveTwitchVideoInfoWorkflow) - w.RegisterWorkflow(workflows.CreateDirectoryWorkflow) - w.RegisterWorkflow(workflows.DownloadTwitchThumbnailsWorkflow) - w.RegisterWorkflow(workflows.ArchiveTwitchVideoWorkflow) - w.RegisterWorkflow(workflows.DownloadTwitchVideoWorkflow) - w.RegisterWorkflow(workflows.PostprocessVideoWorkflow) - w.RegisterWorkflow(workflows.MoveVideoWorkflow) - w.RegisterWorkflow(workflows.ArchiveTwitchChatWorkflow) - w.RegisterWorkflow(workflows.DownloadTwitchChatWorkflow) - w.RegisterWorkflow(workflows.RenderTwitchChatWorkflow) - w.RegisterWorkflow(workflows.MoveTwitchChatWorkflow) - w.RegisterWorkflow(workflows.ArchiveLiveVideoWorkflow) - w.RegisterWorkflow(workflows.ArchiveTwitchLiveVideoWorkflow) - w.RegisterWorkflow(workflows.DownloadTwitchLiveChatWorkflow) - w.RegisterWorkflow(workflows.DownloadTwitchLiveThumbnailsWorkflow) - w.RegisterWorkflow(workflows.DownloadTwitchLiveThumbnailsWorkflowWait) - w.RegisterWorkflow(workflows.DownloadTwitchLiveVideoWorkflow) - w.RegisterWorkflow(workflows.SaveTwitchLiveVideoInfoWorkflow) - w.RegisterWorkflow(workflows.ArchiveTwitchLiveChatWorkflow) - w.RegisterWorkflow(workflows.ConvertTwitchLiveChatWorkflow) - w.RegisterWorkflow(workflows.SaveTwitchVideoChapters) - w.RegisterWorkflow(workflows.UpdateTwitchLiveStreamArchivesWithVodIds) - - w.RegisterActivity(activities.ArchiveVideoActivity) - w.RegisterActivity(activities.SaveTwitchVideoInfo) - w.RegisterActivity(activities.CreateDirectory) - w.RegisterActivity(activities.DownloadTwitchThumbnails) - w.RegisterActivity(activities.DownloadTwitchVideo) - w.RegisterActivity(activities.PostprocessVideo) - w.RegisterActivity(activities.MoveVideo) - w.RegisterActivity(activities.DownloadTwitchChat) - w.RegisterActivity(activities.RenderTwitchChat) - w.RegisterActivity(activities.MoveChat) - w.RegisterActivity(activities.DownloadTwitchLiveChat) - w.RegisterActivity(activities.DownloadTwitchLiveThumbnails) - w.RegisterActivity(activities.DownloadTwitchLiveVideo) - w.RegisterActivity(activities.SaveTwitchLiveVideoInfo) - w.RegisterActivity(activities.KillTwitchLiveChatDownload) - w.RegisterActivity(activities.ConvertTwitchLiveChat) - w.RegisterActivity(activities.TwitchSaveVideoChapters) - w.RegisterActivity(activities.UpdateTwitchLiveStreamArchivesWithVodIds) - - err = w.Start() - if err != nil { - log.Fatal().Msgf("Unable to start worker: %v", err) - } + // Block until a signal is received + <-sigs + // Gracefully stop the worker + if err := riverWorkerClient.Stop(); err != nil { + log.Panic().Err(err).Msg("Error stopping river worker") } - <-interrupt - + log.Info().Msg("worker stopped") } diff --git a/docker-compose.yml b/docker-compose.yml index c72e438e..bac78457 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,37 +5,43 @@ services: image: ghcr.io/zibbp/ganymede:latest restart: unless-stopped depends_on: - - ganymede-temporal + - ganymede-db environment: + - DEBUG=false - TZ=America/Chicago # Set to your timezone + # Data paths in container; update the mounted volume paths as well + - VIDEOS_DIR=/data/videos + - TEMP_DIR=/data/temp + - LOGS_DIR=/data/logs + - CONFIG_DIR=/data/config + # Database settings - DB_HOST=ganymede-db - DB_PORT=5432 - DB_USER=ganymede - DB_PASS=PASSWORD - DB_NAME=ganymede-prd - DB_SSL=disable - - JWT_SECRET=SECRET - - JWT_REFRESH_SECRET=SECRET - - TWITCH_CLIENT_ID= - - TWITCH_CLIENT_SECRET= - - FRONTEND_HOST=http://IP:PORT - # OPTIONAL + - JWT_SECRET=SECRET # set as a random string + - JWT_REFRESH_SECRET=SECRET # set as a random string + - TWITCH_CLIENT_ID= # from your twitch application + - TWITCH_CLIENT_SECRET= # from your twitch application + - FRONTEND_HOST=http://IP:PORT # URL to the frontend service. Needs to be the 'public' url that you visit. + # Worker settings. Max number of tasks to run in parallel per type. + - MAX_CHAT_DOWNLOAD_EXECUTIONS=3 + - MAX_CHAT_RENDER_EXECUTIONS=2 + - MAX_VIDEO_DOWNLOAD_EXECUTIONS=2 + - MAX_VIDEO_CONVERT_EXECUTIONS=3 + # Optional OAuth settings + # - OAUTH_ENABLED=false # - OAUTH_PROVIDER_URL= # - OAUTH_CLIENT_ID= # - OAUTH_CLIENT_SECRET= # - OAUTH_REDIRECT_URL=http://IP:PORT/api/v1/auth/oauth/callback # Points to the API service - - TEMPORAL_URL=ganymede-temporal:7233 - # WORKER - - MAX_CHAT_DOWNLOAD_EXECUTIONS=5 - - MAX_CHAT_RENDER_EXECUTIONS=3 - - MAX_VIDEO_DOWNLOAD_EXECUTIONS=5 - - MAX_VIDEO_CONVERT_EXECUTIONS=3 volumes: - - /path/to/vod/storage:/vods - - ./logs:/logs - - ./data:/data - # Uncomment below to persist temp files - #- ./tmp:/tmp + - /path/to/vod/storage:/data/videos # update VIDEOS_DIR env var + - ./temp:/data/temp # update TEMP_DIR env var + - ./logs:/data/logs # queue logs + - ./config:/data/config # config and other miscellaneous files ports: - 4800:4000 ganymede-frontend: @@ -43,36 +49,13 @@ services: image: ghcr.io/zibbp/ganymede-frontend:latest restart: unless-stopped environment: - - API_URL=http://IP:PORT # Points to the API service - - CDN_URL=http://IP:PORT # Points to the CDN service + - API_URL=http://IP:PORT # Points to the API service; the container must be able to access this URL internally + - CDN_URL=http://IP:PORT # Points to the nginx service - SHOW_SSO_LOGIN_BUTTON=true # show/hide SSO login button on login page - FORCE_SSO_AUTH=false # force SSO auth for all users (bypasses login page and redirects to SSO) - REQUIRE_LOGIN=false # require login to view videos ports: - 4801:3000 - ganymede-temporal: - image: temporalio/auto-setup:1.23 - container_name: ganymede-temporal - depends_on: - - ganymede-db - environment: - - DB=postgres12 # this tells temporal to use postgres (not the db name) - - DB_PORT=5432 - - POSTGRES_USER=ganymede - - POSTGRES_PWD=PASSWORD - - POSTGRES_SEEDS=ganymede-db # name of the db service - ports: - - 7233:7233 - # -- Uncomment below to enable temporal web ui -- - # ganymede-temporal-ui: - # image: temporalio/ui:latest - # container_name: ganymede-temporal-ui - # depends_on: - # - ganymede-temporal - # environment: - # - TEMPORAL_ADDRESS=ganymede-temporal:7233 - # ports: - # - 8233:8080 ganymede-db: container_name: ganymede-db image: postgres:14 @@ -84,11 +67,19 @@ services: - POSTGRES_DB=ganymede-prd ports: - 4803:5432 + # Nginx is not really required, it provides nice-to-have caching. The API container will serve the VIDEO_DIR env var path if you want to use that instead (e.g. VIDEOS_DIR=/data/videos would be served at IP:4800/data/videos/channel/channel.jpg). ganymede-nginx: container_name: ganymede-nginx image: nginx volumes: - /path/to/nginx.conf:/etc/nginx/nginx.conf:ro - - /pah/to/vod/stoage:/mnt/vods + - /pah/to/vod/stoage:/data/videos ports: - 4802:8080 + # River UI is a frontend for the task system that Ganymede uses. This provides a more in-depth look at the task queue. + ganymede-river-ui: + image: ghcr.io/riverqueue/riverui:0.3 + environment: + - DATABASE_URL=postgres://ganymede:DB_PASSWORD@ganymede-db:5432/ganymede-prd # update with env settings from the ganymede-db container. If you're using the default database settings then just update the DB_PASSWORD env var. + ports: + - 4804:8080 diff --git a/docs/docs.go b/docs/docs.go index f36200a9..1694ff4b 100644 --- a/docs/docs.go +++ b/docs/docs.go @@ -4597,7 +4597,7 @@ const docTemplate = `{ "description": "The platform the VOD is from, takes an enum.", "allOf": [ { - "$ref": "#/definitions/utils.VodPlatform" + "$ref": "#/definitions/utils.VideoPlatform" } ] }, @@ -4985,7 +4985,7 @@ const docTemplate = `{ ], "allOf": [ { - "$ref": "#/definitions/utils.VodPlatform" + "$ref": "#/definitions/utils.VideoPlatform" } ] }, @@ -5692,7 +5692,7 @@ const docTemplate = `{ "Failed" ] }, - "utils.VodPlatform": { + "utils.VideoPlatform": { "type": "string", "enum": [ "twitch", diff --git a/docs/swagger.json b/docs/swagger.json index afa60809..226b2fc1 100644 --- a/docs/swagger.json +++ b/docs/swagger.json @@ -4590,7 +4590,7 @@ "description": "The platform the VOD is from, takes an enum.", "allOf": [ { - "$ref": "#/definitions/utils.VodPlatform" + "$ref": "#/definitions/utils.VideoPlatform" } ] }, @@ -4978,7 +4978,7 @@ ], "allOf": [ { - "$ref": "#/definitions/utils.VodPlatform" + "$ref": "#/definitions/utils.VideoPlatform" } ] }, @@ -5685,7 +5685,7 @@ "Failed" ] }, - "utils.VodPlatform": { + "utils.VideoPlatform": { "type": "string", "enum": [ "twitch", diff --git a/docs/swagger.yaml b/docs/swagger.yaml index 203bfa83..b64afbe4 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -14,7 +14,7 @@ definitions: git_hash: type: string program_versions: - $ref: '#/definitions/admin.ProgramVersions' + $ref: "#/definitions/admin.ProgramVersions" uptime: type: string version: @@ -34,9 +34,9 @@ definitions: archive.TwitchVodResponse: properties: queue: - $ref: '#/definitions/ent.Queue' + $ref: "#/definitions/ent.Queue" vod: - $ref: '#/definitions/ent.Vod' + $ref: "#/definitions/ent.Vod" type: object chat.Comment: properties: @@ -45,7 +45,7 @@ definitions: channel_id: type: string commenter: - $ref: '#/definitions/chat.Commenter' + $ref: "#/definitions/chat.Commenter" content_id: type: string content_offset_seconds: @@ -55,7 +55,7 @@ definitions: created_at: type: string message: - $ref: '#/definitions/chat.Message' + $ref: "#/definitions/chat.Message" more_replies: type: boolean source: @@ -96,7 +96,7 @@ definitions: chat.Fragment: properties: emoticon: - $ref: '#/definitions/chat.FragmentEmoticon' + $ref: "#/definitions/chat.FragmentEmoticon" text: type: string type: object @@ -132,7 +132,7 @@ definitions: properties: badges: items: - $ref: '#/definitions/chat.GanymedeBadge' + $ref: "#/definitions/chat.GanymedeBadge" type: array type: object chat.GanymedeEmote: @@ -156,7 +156,7 @@ definitions: properties: emotes: items: - $ref: '#/definitions/chat.GanymedeEmote' + $ref: "#/definitions/chat.GanymedeEmote" type: array type: object chat.Message: @@ -167,22 +167,22 @@ definitions: type: string emoticons: items: - $ref: '#/definitions/chat.EmoticonElement' + $ref: "#/definitions/chat.EmoticonElement" type: array fragments: items: - $ref: '#/definitions/chat.Fragment' + $ref: "#/definitions/chat.Fragment" type: array is_action: type: boolean user_badges: items: - $ref: '#/definitions/chat.UserBadge' + $ref: "#/definitions/chat.UserBadge" type: array user_color: type: string user_notice_params: - $ref: '#/definitions/chat.UserNoticeParams' + $ref: "#/definitions/chat.UserNoticeParams" type: object chat.UserBadge: properties: @@ -210,7 +210,7 @@ definitions: live_check_interval_seconds: type: integer notifications: - $ref: '#/definitions/config.Notification' + $ref: "#/definitions/config.Notification" oauth_enabled: type: boolean parameters: @@ -227,7 +227,7 @@ definitions: registration_enabled: type: boolean storage_templates: - $ref: '#/definitions/config.StorageTemplate' + $ref: "#/definitions/config.StorageTemplate" type: object config.Notification: properties: @@ -273,7 +273,7 @@ definitions: type: string edges: allOf: - - $ref: '#/definitions/ent.ChannelEdges' + - $ref: "#/definitions/ent.ChannelEdges" description: |- Edges holds the relations/edges for other nodes in the graph. The values are being populated by the ChannelQuery when eager-loading is set. @@ -298,12 +298,12 @@ definitions: live: description: Live holds the value of the live edge. items: - $ref: '#/definitions/ent.Live' + $ref: "#/definitions/ent.Live" type: array vods: description: Vods holds the value of the vods edge. items: - $ref: '#/definitions/ent.Vod' + $ref: "#/definitions/ent.Vod" type: array type: object ent.Live: @@ -328,7 +328,7 @@ definitions: type: boolean edges: allOf: - - $ref: '#/definitions/ent.LiveEdges' + - $ref: "#/definitions/ent.LiveEdges" description: |- Edges holds the relations/edges for other nodes in the graph. The values are being populated by the LiveQuery when eager-loading is set. @@ -361,7 +361,7 @@ definitions: properties: edges: allOf: - - $ref: '#/definitions/ent.LiveCategoryEdges' + - $ref: "#/definitions/ent.LiveCategoryEdges" description: |- Edges holds the relations/edges for other nodes in the graph. The values are being populated by the LiveCategoryQuery when eager-loading is set. @@ -376,7 +376,7 @@ definitions: properties: live: allOf: - - $ref: '#/definitions/ent.Live' + - $ref: "#/definitions/ent.Live" description: Live holds the value of the live edge. type: object ent.LiveEdges: @@ -384,11 +384,11 @@ definitions: categories: description: Categories holds the value of the categories edge. items: - $ref: '#/definitions/ent.LiveCategory' + $ref: "#/definitions/ent.LiveCategory" type: array channel: allOf: - - $ref: '#/definitions/ent.Channel' + - $ref: "#/definitions/ent.Channel" description: Channel holds the value of the channel edge. type: object ent.Playback: @@ -401,7 +401,7 @@ definitions: type: string status: allOf: - - $ref: '#/definitions/utils.PlaybackStatus' + - $ref: "#/definitions/utils.PlaybackStatus" description: Status holds the value of the "status" field. time: description: Time holds the value of the "time" field. @@ -426,7 +426,7 @@ definitions: type: string edges: allOf: - - $ref: '#/definitions/ent.PlaylistEdges' + - $ref: "#/definitions/ent.PlaylistEdges" description: |- Edges holds the relations/edges for other nodes in the graph. The values are being populated by the PlaylistQuery when eager-loading is set. @@ -448,7 +448,7 @@ definitions: vods: description: Vods holds the value of the vods edge. items: - $ref: '#/definitions/ent.Vod' + $ref: "#/definitions/ent.Vod" type: array type: object ent.Queue: @@ -464,7 +464,7 @@ definitions: type: string edges: allOf: - - $ref: '#/definitions/ent.QueueEdges' + - $ref: "#/definitions/ent.QueueEdges" description: |- Edges holds the relations/edges for other nodes in the graph. The values are being populated by the QueueQuery when eager-loading is set. @@ -485,48 +485,53 @@ definitions: type: boolean task_chat_convert: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" description: TaskChatConvert holds the value of the "task_chat_convert" field. task_chat_download: allOf: - - $ref: '#/definitions/utils.TaskStatus' - description: TaskChatDownload holds the value of the "task_chat_download" + - $ref: "#/definitions/utils.TaskStatus" + description: + TaskChatDownload holds the value of the "task_chat_download" field. task_chat_move: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" description: TaskChatMove holds the value of the "task_chat_move" field. task_chat_render: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" description: TaskChatRender holds the value of the "task_chat_render" field. task_video_convert: allOf: - - $ref: '#/definitions/utils.TaskStatus' - description: TaskVideoConvert holds the value of the "task_video_convert" + - $ref: "#/definitions/utils.TaskStatus" + description: + TaskVideoConvert holds the value of the "task_video_convert" field. task_video_download: allOf: - - $ref: '#/definitions/utils.TaskStatus' - description: TaskVideoDownload holds the value of the "task_video_download" + - $ref: "#/definitions/utils.TaskStatus" + description: + TaskVideoDownload holds the value of the "task_video_download" field. task_video_move: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" description: TaskVideoMove holds the value of the "task_video_move" field. task_vod_create_folder: allOf: - - $ref: '#/definitions/utils.TaskStatus' - description: TaskVodCreateFolder holds the value of the "task_vod_create_folder" + - $ref: "#/definitions/utils.TaskStatus" + description: + TaskVodCreateFolder holds the value of the "task_vod_create_folder" field. task_vod_download_thumbnail: allOf: - - $ref: '#/definitions/utils.TaskStatus' - description: TaskVodDownloadThumbnail holds the value of the "task_vod_download_thumbnail" + - $ref: "#/definitions/utils.TaskStatus" + description: + TaskVodDownloadThumbnail holds the value of the "task_vod_download_thumbnail" field. task_vod_save_info: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" description: TaskVodSaveInfo holds the value of the "task_vod_save_info" field. updated_at: description: UpdatedAt holds the value of the "updated_at" field. @@ -539,7 +544,7 @@ definitions: properties: vod: allOf: - - $ref: '#/definitions/ent.Vod' + - $ref: "#/definitions/ent.Vod" description: Vod holds the value of the vod edge. type: object ent.User: @@ -555,7 +560,7 @@ definitions: type: boolean role: allOf: - - $ref: '#/definitions/utils.Role' + - $ref: "#/definitions/utils.Role" description: Role holds the value of the "role" field. sub: description: Sub holds the value of the "sub" field. @@ -589,7 +594,7 @@ definitions: type: integer edges: allOf: - - $ref: '#/definitions/ent.VodEdges' + - $ref: "#/definitions/ent.VodEdges" description: |- Edges holds the relations/edges for other nodes in the graph. The values are being populated by the VodQuery when eager-loading is set. @@ -610,7 +615,7 @@ definitions: type: string platform: allOf: - - $ref: '#/definitions/utils.VodPlatform' + - $ref: "#/definitions/utils.VideoPlatform" description: The platform the VOD is from, takes an enum. processing: description: Whether the VOD is currently processing. @@ -629,7 +634,7 @@ definitions: type: string type: allOf: - - $ref: '#/definitions/utils.VodType' + - $ref: "#/definitions/utils.VodType" description: The type of VOD, takes an enum. updated_at: description: UpdatedAt holds the value of the "updated_at" field. @@ -641,7 +646,8 @@ definitions: description: Views holds the value of the "views" field. type: integer web_thumbnail_path: - description: WebThumbnailPath holds the value of the "web_thumbnail_path" + description: + WebThumbnailPath holds the value of the "web_thumbnail_path" field. type: string type: object @@ -649,16 +655,16 @@ definitions: properties: channel: allOf: - - $ref: '#/definitions/ent.Channel' + - $ref: "#/definitions/ent.Channel" description: Channel holds the value of the channel edge. playlists: description: Playlists holds the value of the playlists edge. items: - $ref: '#/definitions/ent.Playlist' + $ref: "#/definitions/ent.Playlist" type: array queue: allOf: - - $ref: '#/definitions/ent.Queue' + - $ref: "#/definitions/ent.Queue" description: Queue holds the value of the queue edge. type: object http.AddMultipleWatchedChannelRequest: @@ -685,27 +691,27 @@ definitions: type: boolean resolution: enum: - - best - - source - - 720p60 - - 480p30 - - 360p30 - - 160p30 + - best + - source + - 720p60 + - 480p30 + - 360p30 + - 160p30 type: string watch_live: type: boolean watch_vod: type: boolean required: - - channel_id - - resolution + - channel_id + - resolution type: object http.AddVodToPlaylistRequest: properties: vod_id: type: string required: - - vod_id + - vod_id type: object http.AddWatchedChannelRequest: properties: @@ -729,27 +735,27 @@ definitions: type: boolean resolution: enum: - - best - - source - - 720p60 - - 480p30 - - 360p30 - - 160p30 + - best + - source + - 720p60 + - 480p30 + - 360p30 + - 160p30 type: string watch_live: type: boolean watch_vod: type: boolean required: - - channel_id - - resolution + - channel_id + - resolution type: object http.ArchiveChannelRequest: properties: channel_name: type: string required: - - channel_name + - channel_name type: object http.ArchiveVodRequest: properties: @@ -757,21 +763,21 @@ definitions: type: boolean quality: allOf: - - $ref: '#/definitions/utils.VodQuality' + - $ref: "#/definitions/utils.VodQuality" enum: - - best - - source - - 720p60 - - 480p30 - - 360p30 - - 160p30 + - best + - source + - 720p60 + - 480p30 + - 360p30 + - 160p30 render_chat: type: boolean vod_id: type: string required: - - quality - - vod_id + - quality + - vod_id type: object http.ChangePasswordRequest: properties: @@ -783,9 +789,9 @@ definitions: old_password: type: string required: - - confirm_new_password - - new_password - - old_password + - confirm_new_password + - new_password + - old_password type: object http.ConvertChatRequest: properties: @@ -802,12 +808,12 @@ definitions: vod_id: type: string required: - - channel_id - - channel_name - - chat_start - - file_name - - vod_external_id - - vod_id + - channel_id + - channel_name + - chat_start + - file_name + - vod_external_id + - vod_id type: object http.CreateChannelRequest: properties: @@ -823,9 +829,9 @@ definitions: minLength: 2 type: string required: - - display_name - - image_path - - name + - display_name + - image_path + - name type: object http.CreatePlaylistRequest: properties: @@ -834,14 +840,14 @@ definitions: name: type: string required: - - name + - name type: object http.CreateQueueRequest: properties: vod_id: type: string required: - - vod_id + - vod_id type: object http.CreateVodRequest: properties: @@ -864,10 +870,10 @@ definitions: type: string platform: allOf: - - $ref: '#/definitions/utils.VodPlatform' + - $ref: "#/definitions/utils.VideoPlatform" enum: - - twitch - - youtube + - twitch + - youtube processing: type: boolean resolution: @@ -881,13 +887,13 @@ definitions: type: string type: allOf: - - $ref: '#/definitions/utils.VodType' + - $ref: "#/definitions/utils.VodType" enum: - - archive - - live - - highlight - - upload - - clip + - archive + - live + - highlight + - upload + - clip video_path: minLength: 1 type: string @@ -897,22 +903,22 @@ definitions: minLength: 1 type: string required: - - channel_id - - duration - - platform - - streamed_at - - title - - type - - video_path - - views - - web_thumbnail_path + - channel_id + - duration + - platform + - streamed_at + - title + - type + - video_path + - views + - web_thumbnail_path type: object http.GetFfprobeDataRequest: properties: path: type: string required: - - path + - path type: object http.LoginRequest: properties: @@ -921,8 +927,8 @@ definitions: username: type: string required: - - password - - username + - password + - username type: object http.RegisterRequest: properties: @@ -934,8 +940,8 @@ definitions: minLength: 3 type: string required: - - password - - username + - password + - username type: object http.RestartTaskRequest: properties: @@ -945,51 +951,51 @@ definitions: type: string task: enum: - - vod_create_folder - - vod_download_thumbnail - - vod_save_info - - video_download - - video_convert - - video_move - - chat_download - - chat_convert - - chat_render - - chat_move + - vod_create_folder + - vod_download_thumbnail + - vod_save_info + - video_download + - video_convert + - video_move + - chat_download + - chat_convert + - chat_render + - chat_move type: string required: - - queue_id - - task + - queue_id + - task type: object http.StartTaskRequest: properties: task: enum: - - check_live - - check_vod - - get_jwks - - twitch_auth - - queue_hold_check - - storage_migration + - check_live + - check_vod + - get_jwks + - twitch_auth + - queue_hold_check + - storage_migration type: string required: - - task + - task type: object http.UpdateChannelRequest: properties: role: enum: - - admin - - editor - - archiver - - user + - admin + - editor + - archiver + - user type: string username: maxLength: 50 minLength: 2 type: string required: - - role - - username + - role + - username type: object http.UpdateConfigRequest: properties: @@ -1009,8 +1015,8 @@ definitions: video_convert: type: string required: - - chat_render - - video_convert + - chat_render + - video_convert type: object registration_enabled: type: boolean @@ -1049,8 +1055,8 @@ definitions: vod_id: type: string required: - - time - - vod_id + - time + - vod_id type: object http.UpdateQueueRequest: properties: @@ -1066,110 +1072,110 @@ definitions: type: boolean task_chat_convert: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" enum: - - pending - - running - - success - - failed + - pending + - running + - success + - failed task_chat_download: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" enum: - - pending - - running - - success - - failed + - pending + - running + - success + - failed task_chat_move: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" enum: - - pending - - running - - success - - failed + - pending + - running + - success + - failed task_chat_render: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" enum: - - pending - - running - - success - - failed + - pending + - running + - success + - failed task_video_convert: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" enum: - - pending - - running - - success - - failed + - pending + - running + - success + - failed task_video_download: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" enum: - - pending - - running - - success - - failed + - pending + - running + - success + - failed task_video_move: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" enum: - - pending - - running - - success - - failed + - pending + - running + - success + - failed task_vod_create_folder: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" enum: - - pending - - running - - success - - failed + - pending + - running + - success + - failed task_vod_download_thumbnail: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" enum: - - pending - - running - - success - - failed + - pending + - running + - success + - failed task_vod_save_info: allOf: - - $ref: '#/definitions/utils.TaskStatus' + - $ref: "#/definitions/utils.TaskStatus" enum: - - pending - - running - - success - - failed + - pending + - running + - success + - failed video_processing: type: boolean required: - - task_chat_convert - - task_chat_download - - task_chat_move - - task_chat_render - - task_video_convert - - task_video_download - - task_video_move - - task_vod_create_folder - - task_vod_download_thumbnail - - task_vod_save_info + - task_chat_convert + - task_chat_download + - task_chat_move + - task_chat_render + - task_video_convert + - task_video_download + - task_video_move + - task_vod_create_folder + - task_vod_download_thumbnail + - task_vod_save_info type: object http.UpdateStatusRequest: properties: status: enum: - - in_progress - - finished + - in_progress + - finished type: string vod_id: type: string required: - - status - - vod_id + - status + - vod_id type: object http.UpdateStorageTemplateRequest: properties: @@ -1178,8 +1184,8 @@ definitions: folder_template: type: string required: - - file_template - - folder_template + - file_template + - folder_template type: object http.UpdateWatchedChannelRequest: properties: @@ -1201,19 +1207,19 @@ definitions: type: boolean resolution: enum: - - best - - source - - 720p60 - - 480p30 - - 360p30 - - 160p30 + - best + - source + - 720p60 + - 480p30 + - 360p30 + - 160p30 type: string watch_live: type: boolean watch_vod: type: boolean required: - - resolution + - resolution type: object twitch.Category: properties: @@ -1324,79 +1330,79 @@ definitions: type: object utils.PlaybackStatus: enum: - - in_progress - - finished + - in_progress + - finished type: string x-enum-varnames: - - InProgress - - Finished + - InProgress + - Finished utils.Role: enum: - - admin - - editor - - archiver - - user + - admin + - editor + - archiver + - user type: string x-enum-varnames: - - AdminRole - - EditorRole - - ArchiverRole - - UserRole + - AdminRole + - EditorRole + - ArchiverRole + - UserRole utils.TaskStatus: enum: - - success - - running - - pending - - failed + - success + - running + - pending + - failed type: string x-enum-varnames: - - Success - - Running - - Pending - - Failed - utils.VodPlatform: + - Success + - Running + - Pending + - Failed + utils.VideoPlatform: enum: - - twitch - - youtube + - twitch + - youtube type: string x-enum-varnames: - - PlatformTwitch - - PlatformYoutube + - PlatformTwitch + - PlatformYoutube utils.VodQuality: enum: - - best - - source - - 720p60 - - 480p30 - - 360p30 - - 160p30 + - best + - source + - 720p60 + - 480p30 + - 360p30 + - 160p30 type: string x-enum-varnames: - - Best - - Source - - R720P60 - - R480P30 - - R360P30 - - R160P30 + - Best + - Source + - R720P60 + - R480P30 + - R360P30 + - R160P30 utils.VodType: enum: - - archive - - live - - highlight - - upload - - clip + - archive + - live + - highlight + - upload + - clip type: string x-enum-varnames: - - Archive - - Live - - Highlight - - Upload - - Clip + - Archive + - Live + - Highlight + - Upload + - Clip vod.Pagination: properties: data: items: - $ref: '#/definitions/ent.Vod' + $ref: "#/definitions/ent.Vod" type: array limit: type: integer @@ -1423,159 +1429,160 @@ paths: /admin/info: get: consumes: - - application/json + - application/json description: Get ganymede info produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/admin.InfoResp' + $ref: "#/definitions/admin.InfoResp" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get ganymede info tags: - - admin + - admin /admin/stats: get: consumes: - - application/json + - application/json description: Get ganymede stats produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/admin.GetStatsResp' + $ref: "#/definitions/admin.GetStatsResp" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get ganymede stats tags: - - admin + - admin /archive/channel: post: consumes: - - application/json - description: Archive a twitch channel (creates channel in database and download + - application/json + description: + Archive a twitch channel (creates channel in database and download profile image) parameters: - - description: Channel - in: body - name: channel - required: true - schema: - $ref: '#/definitions/http.ArchiveChannelRequest' + - description: Channel + in: body + name: channel + required: true + schema: + $ref: "#/definitions/http.ArchiveChannelRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Channel' + $ref: "#/definitions/ent.Channel" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Archive a twitch channel tags: - - archive + - archive /archive/restart: post: consumes: - - application/json + - application/json description: Restart a task parameters: - - description: Queue ID - in: path - name: queue_id - required: true - type: string - - description: Task - in: body - name: task - required: true - schema: - $ref: '#/definitions/http.RestartTaskRequest' + - description: Queue ID + in: path + name: queue_id + required: true + type: string + - description: Task + in: body + name: task + required: true + schema: + $ref: "#/definitions/http.RestartTaskRequest" produces: - - application/json + - application/json responses: "200": description: OK "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Restart a task tags: - - archive + - archive /archive/vod: post: consumes: - - application/json + - application/json description: Archive a twitch vod parameters: - - description: Vod - in: body - name: vod - required: true - schema: - $ref: '#/definitions/http.ArchiveVodRequest' + - description: Vod + in: body + name: vod + required: true + schema: + $ref: "#/definitions/http.ArchiveVodRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/archive.TwitchVodResponse' + $ref: "#/definitions/archive.TwitchVodResponse" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Archive a twitch vod tags: - - archive + - archive /auth/change-password: post: consumes: - - application/json + - application/json description: Change password parameters: - - description: Change password - in: body - name: change-password - required: true - schema: - $ref: '#/definitions/http.ChangePasswordRequest' + - description: Change password + in: body + name: change-password + required: true + schema: + $ref: "#/definitions/http.ChangePasswordRequest" produces: - - application/json + - application/json responses: "200": description: OK @@ -1584,91 +1591,92 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "401": description: Unauthorized schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Change password tags: - - auth + - auth /auth/login: post: consumes: - - application/json - description: Login a user (sets access-token and refresh-token cookies). Access + - application/json + description: + Login a user (sets access-token and refresh-token cookies). Access token lasts for 1 hour. Refresh token lasts for 1 month. parameters: - - description: Login - in: body - name: login - required: true - schema: - $ref: '#/definitions/http.LoginRequest' + - description: Login + in: body + name: login + required: true + schema: + $ref: "#/definitions/http.LoginRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.User' + $ref: "#/definitions/ent.User" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "401": description: Unauthorized schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Login a user tags: - - auth + - auth /auth/me: get: consumes: - - application/json + - application/json description: Get current user produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.User' + $ref: "#/definitions/ent.User" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "401": description: Unauthorized schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get current user tags: - - auth + - auth /auth/oauth/callback: get: consumes: - - application/json + - application/json description: OAuth callback for OAuth provider produces: - - application/json + - application/json responses: "200": description: OK @@ -1677,52 +1685,52 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "401": description: Unauthorized schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: OAuth callback tags: - - auth + - auth /auth/oauth/login: get: consumes: - - application/json + - application/json description: Login a user with OAuth (sets access-token and refresh-token cookies) produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.User' + $ref: "#/definitions/ent.User" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "401": description: Unauthorized schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Login a user with OAuth tags: - - auth + - auth /auth/oauth/logout: get: consumes: - - application/json + - application/json description: Logout produces: - - application/json + - application/json responses: "200": description: OK @@ -1731,26 +1739,27 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "401": description: Unauthorized schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Logout tags: - - auth + - auth /auth/oauth/refresh: get: consumes: - - application/json - description: Refresh access-token and refresh-token (sets access-token and refresh-token + - application/json + description: + Refresh access-token and refresh-token (sets access-token and refresh-token cookies) produces: - - application/json + - application/json responses: "200": description: OK @@ -1759,26 +1768,27 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "401": description: Unauthorized schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Refresh access-token and refresh-token tags: - - auth + - auth /auth/refresh: post: consumes: - - application/json - description: Refresh access-token and refresh-token (sets access-token and refresh-token + - application/json + description: + Refresh access-token and refresh-token (sets access-token and refresh-token cookies) produces: - - application/json + - application/json responses: "200": description: OK @@ -1787,416 +1797,416 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "401": description: Unauthorized schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Refresh access-token and refresh-token tags: - - auth + - auth /auth/register: post: consumes: - - application/json + - application/json description: Register a user (does not log in) parameters: - - description: Register - in: body - name: register - required: true - schema: - $ref: '#/definitions/http.RegisterRequest' + - description: Register + in: body + name: register + required: true + schema: + $ref: "#/definitions/http.RegisterRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.User' + $ref: "#/definitions/ent.User" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "403": description: Forbidden schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Register a user tags: - - auth + - auth /channel: get: consumes: - - application/json + - application/json description: Returns all channels produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/ent.Channel' + $ref: "#/definitions/ent.Channel" type: array "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get all channels tags: - - channel + - channel post: consumes: - - application/json + - application/json description: Create a channel parameters: - - description: Channel - in: body - name: channel - required: true - schema: - $ref: '#/definitions/http.CreateChannelRequest' + - description: Channel + in: body + name: channel + required: true + schema: + $ref: "#/definitions/http.CreateChannelRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Channel' + $ref: "#/definitions/ent.Channel" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Create a channel tags: - - channel + - channel /channel/{id}: delete: consumes: - - application/json + - application/json description: Delete a channel parameters: - - description: Channel ID - in: path - name: id - required: true - type: string + - description: Channel ID + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Delete a channel tags: - - channel + - channel get: consumes: - - application/json + - application/json description: Returns a channel parameters: - - description: Channel ID - in: path - name: id - required: true - type: string + - description: Channel ID + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Channel' + $ref: "#/definitions/ent.Channel" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get a channel tags: - - channel + - channel put: consumes: - - application/json + - application/json description: Update a channel parameters: - - description: Channel ID - in: path - name: id - required: true - type: string - - description: Channel - in: body - name: channel - required: true - schema: - $ref: '#/definitions/http.CreateChannelRequest' + - description: Channel ID + in: path + name: id + required: true + type: string + - description: Channel + in: body + name: channel + required: true + schema: + $ref: "#/definitions/http.CreateChannelRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Channel' + $ref: "#/definitions/ent.Channel" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update a channel tags: - - channel + - channel /channel/name/{name}: get: consumes: - - application/json + - application/json description: Returns a channel by name parameters: - - description: Channel name - in: path - name: name - required: true - type: string + - description: Channel name + in: path + name: name + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Channel' + $ref: "#/definitions/ent.Channel" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get a channel by name tags: - - channel + - channel /config: get: consumes: - - application/json + - application/json description: Get config produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/config.Conf' + $ref: "#/definitions/config.Conf" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get config tags: - - config + - config put: consumes: - - application/json + - application/json description: Update config parameters: - - description: Config - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.UpdateConfigRequest' + - description: Config + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.UpdateConfigRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/http.UpdateConfigRequest' + $ref: "#/definitions/http.UpdateConfigRequest" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update config tags: - - config + - config /config/notification: get: consumes: - - application/json + - application/json description: Get notification config produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/config.Notification' + $ref: "#/definitions/config.Notification" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get notification config tags: - - config + - config put: consumes: - - application/json + - application/json description: Update notification config parameters: - - description: Config - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.UpdateNotificationRequest' + - description: Config + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.UpdateNotificationRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/http.UpdateNotificationRequest' + $ref: "#/definitions/http.UpdateNotificationRequest" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update notification config tags: - - config + - config /config/storage: get: consumes: - - application/json + - application/json description: Get storage template config produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/config.StorageTemplate' + $ref: "#/definitions/config.StorageTemplate" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get storage template config tags: - - config + - config put: consumes: - - application/json + - application/json description: Update storage template config parameters: - - description: Config - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.UpdateStorageTemplateRequest' + - description: Config + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.UpdateStorageTemplateRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/http.UpdateStorageTemplateRequest' + $ref: "#/definitions/http.UpdateStorageTemplateRequest" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update storage template config tags: - - config + - config /exec/ffprobe: post: consumes: - - application/json + - application/json description: Get ffprobe data parameters: - - description: GetFfprobeDataRequest - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.GetFfprobeDataRequest' + - description: GetFfprobeDataRequest + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.GetFfprobeDataRequest" produces: - - application/json + - application/json responses: "200": description: OK @@ -2206,140 +2216,140 @@ paths: "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get ffprobe data tags: - - exec + - exec /live: get: consumes: - - application/json + - application/json description: Get all watched channels produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/ent.Live' + $ref: "#/definitions/ent.Live" type: array "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get all watched channels tags: - - Live + - Live post: consumes: - - application/json + - application/json description: Add watched channel parameters: - - description: Add watched channel - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.AddWatchedChannelRequest' + - description: Add watched channel + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.AddWatchedChannelRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Live' + $ref: "#/definitions/ent.Live" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Add watched channel tags: - - Live + - Live /live/{id}: delete: consumes: - - application/json + - application/json description: Delete watched channel parameters: - - description: Channel ID - in: path - name: id - required: true - type: string + - description: Channel ID + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Live' + $ref: "#/definitions/ent.Live" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Delete watched channel tags: - - Live + - Live put: consumes: - - application/json + - application/json description: Update watched channel parameters: - - description: Channel ID - in: path - name: id - required: true - type: string - - description: Update watched channel - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.UpdateWatchedChannelRequest' + - description: Channel ID + in: path + name: id + required: true + type: string + - description: Update watched channel + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.UpdateWatchedChannelRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Live' + $ref: "#/definitions/ent.Live" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update watched channel tags: - - Live + - Live /live/archive: post: consumes: - - application/json + - application/json description: Adhoc archive a channel's live stream. produces: - - application/json + - application/json responses: "200": description: OK @@ -2348,31 +2358,32 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Archive a channel's live stream tags: - - Live + - Live /live/chat-convert: post: consumes: - - application/json - description: Adhoc convert chat endpoint. This is what happens when a live stream + - application/json + description: + Adhoc convert chat endpoint. This is what happens when a live stream chat is converted to a "vod" chat. parameters: - - description: Convert chat - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.ConvertChatRequest' + - description: Convert chat + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.ConvertChatRequest" produces: - - application/json + - application/json responses: "200": description: OK @@ -2381,24 +2392,25 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Convert chat tags: - - Live + - Live /live/check: get: consumes: - - application/json - description: Check watched channels if they are live. This is what runs every + - application/json + description: + Check watched channels if they are live. This is what runs every X seconds in the config. produces: - - application/json + - application/json responses: "200": description: OK @@ -2407,62 +2419,63 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Check watched channels tags: - - Live + - Live /live/multiple: post: consumes: - - application/json - description: This is useful to add multiple channels at once if they all have + - application/json + description: + This is useful to add multiple channels at once if they all have the same settings parameters: - - description: Add watched channel - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.AddMultipleWatchedChannelRequest' + - description: Add watched channel + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.AddMultipleWatchedChannelRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Live' + $ref: "#/definitions/ent.Live" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Add multiple watched channels at once tags: - - Live + - Live /notification/test: get: consumes: - - application/json + - application/json description: Test notification parameters: - - description: Type of notification to test - in: query - name: type - required: true - type: string + - description: Type of notification to test + in: query + name: type + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK @@ -2471,48 +2484,48 @@ paths: "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Test notification tags: - - notification + - notification /playback: get: consumes: - - application/json + - application/json description: Get all playback progress produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/ent.Playback' + $ref: "#/definitions/ent.Playback" type: array "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get all progress tags: - - Playback + - Playback /playback/{id}: delete: consumes: - - application/json + - application/json description: Delete playback progress parameters: - - description: vod id - in: path - name: id - required: true - type: string + - description: vod id + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK @@ -2521,30 +2534,30 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Delete progress tags: - - Playback + - Playback /playback/progress: post: consumes: - - application/json + - application/json description: Update playback progress parameters: - - description: progress - in: body - name: progress - required: true - schema: - $ref: '#/definitions/http.UpdateProgressRequest' + - description: progress + in: body + name: progress + required: true + schema: + $ref: "#/definitions/http.UpdateProgressRequest" produces: - - application/json + - application/json responses: "200": description: OK @@ -2553,61 +2566,61 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update progress tags: - - Playback + - Playback /playback/progress/{id}: get: consumes: - - application/json + - application/json description: Get playback progress parameters: - - description: vod id - in: path - name: id - required: true - type: string + - description: vod id + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Playback' + $ref: "#/definitions/ent.Playback" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get progress tags: - - Playback + - Playback /playback/status: post: consumes: - - application/json + - application/json description: Update playback status parameters: - - description: status - in: body - name: status - required: true - schema: - $ref: '#/definitions/http.UpdateStatusRequest' + - description: status + in: body + name: status + required: true + schema: + $ref: "#/definitions/http.UpdateStatusRequest" produces: - - application/json + - application/json responses: "200": description: OK @@ -2616,81 +2629,81 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update status tags: - - Playback + - Playback /playlist: get: consumes: - - application/json + - application/json description: Get playlists produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/ent.Playlist' + $ref: "#/definitions/ent.Playlist" type: array "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get playlists tags: - - Playlist + - Playlist post: consumes: - - application/json + - application/json description: Create playlist parameters: - - description: playlist - in: body - name: playlist - required: true - schema: - $ref: '#/definitions/http.CreatePlaylistRequest' + - description: playlist + in: body + name: playlist + required: true + schema: + $ref: "#/definitions/http.CreatePlaylistRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Playlist' + $ref: "#/definitions/ent.Playlist" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Create playlist tags: - - Playlist + - Playlist /playlist/{id}: delete: consumes: - - application/json + - application/json description: Delete playlist parameters: - - description: playlist id - in: path - name: id - required: true - type: string + - description: playlist id + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK @@ -2699,62 +2712,62 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Delete playlist tags: - - Playlist + - Playlist get: consumes: - - application/json + - application/json description: Get playlist parameters: - - description: playlist id - in: path - name: id - required: true - type: string + - description: playlist id + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Playlist' + $ref: "#/definitions/ent.Playlist" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get playlist tags: - - Playlist + - Playlist post: consumes: - - application/json + - application/json description: Add vod to playlist parameters: - - description: playlist id - in: path - name: id - required: true - type: string - - description: vod - in: body - name: vod - required: true - schema: - $ref: '#/definitions/http.AddVodToPlaylistRequest' + - description: playlist id + in: path + name: id + required: true + type: string + - description: vod + in: body + name: vod + required: true + schema: + $ref: "#/definitions/http.AddVodToPlaylistRequest" produces: - - application/json + - application/json responses: "200": description: OK @@ -2763,71 +2776,71 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Add vod to playlist tags: - - Playlist + - Playlist put: consumes: - - application/json + - application/json description: Update playlist parameters: - - description: playlist id - in: path - name: id - required: true - type: string - - description: playlist - in: body - name: playlist - required: true - schema: - $ref: '#/definitions/http.CreatePlaylistRequest' + - description: playlist id + in: path + name: id + required: true + type: string + - description: playlist + in: body + name: playlist + required: true + schema: + $ref: "#/definitions/http.CreatePlaylistRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Playlist' + $ref: "#/definitions/ent.Playlist" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update playlist tags: - - Playlist + - Playlist /playlist/{id}/vod: delete: consumes: - - application/json + - application/json description: Delete vod from playlist parameters: - - description: playlist id - in: path - name: id - required: true - type: string - - description: vod - in: body - name: vod - required: true - schema: - $ref: '#/definitions/http.AddVodToPlaylistRequest' + - description: playlist id + in: path + name: id + required: true + type: string + - description: vod + in: body + name: vod + required: true + schema: + $ref: "#/definitions/http.AddVodToPlaylistRequest" produces: - - application/json + - application/json responses: "200": description: OK @@ -2836,192 +2849,192 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Delete vod from playlist tags: - - Playlist + - Playlist /queue: get: consumes: - - application/json + - application/json description: Get queue items parameters: - - description: Get processing queue items - in: query - name: processing - type: string + - description: Get processing queue items + in: query + name: processing + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/ent.Queue' + $ref: "#/definitions/ent.Queue" type: array "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get queue items tags: - - queue + - queue post: consumes: - - application/json + - application/json description: Create a queue item parameters: - - description: Create queue item - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.CreateQueueRequest' + - description: Create queue item + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.CreateQueueRequest" produces: - - application/json + - application/json responses: "201": description: Created schema: - $ref: '#/definitions/ent.Queue' + $ref: "#/definitions/ent.Queue" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Create a queue item tags: - - queue + - queue /queue/{id}: delete: consumes: - - application/json + - application/json description: Delete queue item parameters: - - description: Queue item id - in: path - name: id - required: true - type: string + - description: Queue item id + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "204": description: No Content "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Delete queue item tags: - - queue + - queue get: consumes: - - application/json + - application/json description: Get queue item parameters: - - description: Queue item id - in: path - name: id - required: true - type: string + - description: Queue item id + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Queue' + $ref: "#/definitions/ent.Queue" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get queue item tags: - - queue + - queue put: consumes: - - application/json + - application/json description: Update queue item parameters: - - description: Queue item id - in: path - name: id - required: true - type: string - - description: Update queue item - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.UpdateQueueRequest' + - description: Queue item id + in: path + name: id + required: true + type: string + - description: Update queue item + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.UpdateQueueRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Queue' + $ref: "#/definitions/ent.Queue" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update queue item tags: - - queue + - queue /queue/{id}/tail: get: consumes: - - application/json + - application/json description: Read queue log file parameters: - - description: Queue item id - in: path - name: id - required: true - type: string - - description: 'Log type: video, video-convert, chat, chat-render, or chat-convert' - in: query - name: type - required: true - type: string + - description: Queue item id + in: path + name: id + required: true + type: string + - description: "Log type: video, video-convert, chat, chat-render, or chat-convert" + in: query + name: type + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK @@ -3030,616 +3043,617 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Read queue log file tags: - - queue + - queue /task/start: post: consumes: - - application/json + - application/json description: Start a task parameters: - - description: StartTaskRequest - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.StartTaskRequest' + - description: StartTaskRequest + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.StartTaskRequest" produces: - - application/json + - application/json responses: "200": description: OK "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Start a task tags: - - task + - task /twitch/categories: get: consumes: - - application/json + - application/json description: Get a list of twitch categories produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/twitch.Category' + $ref: "#/definitions/twitch.Category" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get a list of twitch categories tags: - - twitch + - twitch /twitch/channel: get: consumes: - - application/json + - application/json description: Get a twitch user/channel by name (uses twitch api) parameters: - - description: Twitch user login name - in: query - name: name - required: true - type: string + - description: Twitch user login name + in: query + name: name + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/twitch.Channel' + $ref: "#/definitions/twitch.Channel" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get a twitch channel tags: - - twitch + - twitch /twitch/gql/video: get: consumes: - - application/json + - application/json description: Get a twitch video by id (uses twitch graphql api) parameters: - - description: Twitch video id - in: query - name: id - required: true - type: string + - description: Twitch video id + in: query + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/twitch.Video' + $ref: "#/definitions/twitch.Video" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get a twitch video tags: - - twitch + - twitch /twitch/vod: get: consumes: - - application/json + - application/json description: Get a twitch vod by id (uses twitch api) parameters: - - description: Twitch vod id - in: query - name: id - required: true - type: string + - description: Twitch vod id + in: query + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/twitch.Vod' + $ref: "#/definitions/twitch.Vod" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get a twitch vod tags: - - twitch + - twitch /user: get: consumes: - - application/json + - application/json description: Get all users produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/ent.User' + $ref: "#/definitions/ent.User" type: array "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get all users tags: - - user + - user /user/{id}: delete: consumes: - - application/json + - application/json description: Delete user parameters: - - description: User ID - in: path - name: id - required: true - type: string + - description: User ID + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Delete user tags: - - user + - user get: consumes: - - application/json + - application/json description: Get user by id parameters: - - description: User ID - in: path - name: id - required: true - type: string + - description: User ID + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.User' + $ref: "#/definitions/ent.User" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Get user by id tags: - - user + - user put: consumes: - - application/json + - application/json description: Update user parameters: - - description: User ID - in: path - name: id - required: true - type: string - - description: User data - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.UpdateChannelRequest' + - description: User ID + in: path + name: id + required: true + type: string + - description: User data + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.UpdateChannelRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.User' + $ref: "#/definitions/ent.User" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update user tags: - - user + - user /vod: get: consumes: - - application/json + - application/json description: Get vods parameters: - - description: Channel ID - in: query - name: channel_id - type: string + - description: Channel ID + in: query + name: channel_id + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/ent.Vod' + $ref: "#/definitions/ent.Vod" type: array "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get vods tags: - - vods + - vods post: consumes: - - application/json + - application/json description: Create a vod parameters: - - description: Create vod request - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.CreateVodRequest' + - description: Create vod request + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.CreateVodRequest" produces: - - application/json + - application/json responses: "201": description: Created schema: - $ref: '#/definitions/ent.Vod' + $ref: "#/definitions/ent.Vod" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "409": description: Conflict schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Create a vod tags: - - vods + - vods /vod/{id}: delete: consumes: - - application/json + - application/json description: Delete a vod parameters: - - description: Vod ID - in: path - name: id - required: true - type: string - - description: Delete files - in: query - name: delete_files - type: string + - description: Vod ID + in: path + name: id + required: true + type: string + - description: Delete files + in: query + name: delete_files + type: string produces: - - application/json + - application/json responses: "200": description: OK "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Delete a vod tags: - - vods + - vods get: consumes: - - application/json + - application/json description: Get a vod parameters: - - description: Vod ID - in: path - name: id - required: true - type: string - - description: With channel - in: query - name: with_channel - type: string + - description: Vod ID + in: path + name: id + required: true + type: string + - description: With channel + in: query + name: with_channel + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Vod' + $ref: "#/definitions/ent.Vod" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get a vod tags: - - vods + - vods put: consumes: - - application/json + - application/json description: Update a vod parameters: - - description: Vod ID - in: path - name: id - required: true - type: string - - description: Vod - in: body - name: body - required: true - schema: - $ref: '#/definitions/http.CreateVodRequest' + - description: Vod ID + in: path + name: id + required: true + type: string + - description: Vod + in: body + name: body + required: true + schema: + $ref: "#/definitions/http.CreateVodRequest" produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/ent.Vod' + $ref: "#/definitions/ent.Vod" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" security: - - ApiKeyCookieAuth: [] + - ApiKeyCookieAuth: [] summary: Update a vod tags: - - vods + - vods /vod/{id}/chat: get: consumes: - - application/json + - application/json description: Get vod chat comments parameters: - - description: Vod ID - in: path - name: id - required: true - type: string - - description: Start time - in: query - name: start - type: string - - description: End time - in: query - name: end - type: string + - description: Vod ID + in: path + name: id + required: true + type: string + - description: Start time + in: query + name: start + type: string + - description: End time + in: query + name: end + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: items: items: - $ref: '#/definitions/chat.Comment' + $ref: "#/definitions/chat.Comment" type: array type: array "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get vod chat comments tags: - - vods + - vods /vod/{id}/chat/badges: get: consumes: - - application/json + - application/json description: Get vod chat badges parameters: - - description: Vod ID - in: path - name: id - required: true - type: string + - description: Vod ID + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/chat.GanymedeBadges' + $ref: "#/definitions/chat.GanymedeBadges" type: array "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get vod chat badges tags: - - vods + - vods /vod/{id}/chat/emotes: get: consumes: - - application/json + - application/json description: Get vod chat emotes parameters: - - description: Vod ID - in: path - name: id - required: true - type: string + - description: Vod ID + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/chat.GanymedeEmotes' + $ref: "#/definitions/chat.GanymedeEmotes" type: array "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get vod chat emotes tags: - - vods + - vods /vod/{id}/chat/seek: get: consumes: - - application/json - description: Get N number of vod chat comments before the start time (used for + - application/json + description: + Get N number of vod chat comments before the start time (used for seeking) parameters: - - description: Vod ID - in: path - name: id - required: true - type: string - - description: Start time - in: query - name: start - type: string - - description: Count - in: query - name: count - type: string + - description: Vod ID + in: path + name: id + required: true + type: string + - description: Start time + in: query + name: start + type: string + - description: Count + in: query + name: count + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/chat.Comment' + $ref: "#/definitions/chat.Comment" type: array "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get number of vod chat comments tags: - - vods + - vods /vod/{id}/chat/userid: get: consumes: - - application/json + - application/json description: Get user id from chat json file parameters: - - description: Vod ID - in: path - name: id - required: true - type: string + - description: Vod ID + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK @@ -3648,134 +3662,134 @@ paths: "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get user id from chat tags: - - vods + - vods /vod/{id}/playlist: get: consumes: - - application/json + - application/json description: Get vod playlists parameters: - - description: Vod ID - in: path - name: id - required: true - type: string + - description: Vod ID + in: path + name: id + required: true + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: items: items: - $ref: '#/definitions/ent.Playlist' + $ref: "#/definitions/ent.Playlist" type: array type: array "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "404": description: Not Found schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get vod playlists tags: - - vods + - vods /vod/pagination: get: consumes: - - application/json + - application/json description: Get vods pagination parameters: - - default: 10 - description: Limit - in: query - name: limit - type: integer - - default: 0 - description: Offset - in: query - name: offset - type: integer - - description: Channel ID - in: query - name: channel_id - type: string + - default: 10 + description: Limit + in: query + name: limit + type: integer + - default: 0 + description: Offset + in: query + name: offset + type: integer + - description: Channel ID + in: query + name: channel_id + type: string produces: - - application/json + - application/json responses: "200": description: OK schema: - $ref: '#/definitions/vod.Pagination' + $ref: "#/definitions/vod.Pagination" "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Get vods pagination tags: - - vods + - vods /vod/search: get: consumes: - - application/json + - application/json description: Search vods parameters: - - description: Search query - in: query - name: q - required: true - type: string - - default: 10 - description: Limit - in: query - name: limit - type: integer - - default: 0 - description: Offset - in: query - name: offset - type: integer + - description: Search query + in: query + name: q + required: true + type: string + - default: 10 + description: Limit + in: query + name: limit + type: integer + - default: 0 + description: Offset + in: query + name: offset + type: integer produces: - - application/json + - application/json responses: "200": description: OK schema: items: - $ref: '#/definitions/ent.Vod' + $ref: "#/definitions/ent.Vod" type: array "400": description: Bad Request schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" "500": description: Internal Server Error schema: - $ref: '#/definitions/utils.ErrorResponse' + $ref: "#/definitions/utils.ErrorResponse" summary: Search vods tags: - - vods + - vods securityDefinitions: ApiKeyCookieAuth: in: cookie diff --git a/ent/blockedvideos.go b/ent/blockedvideos.go new file mode 100644 index 00000000..d4f9b206 --- /dev/null +++ b/ent/blockedvideos.go @@ -0,0 +1,105 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + "time" + + "entgo.io/ent" + "entgo.io/ent/dialect/sql" + "github.com/zibbp/ganymede/ent/blockedvideos" +) + +// BlockedVideos is the model entity for the BlockedVideos schema. +type BlockedVideos struct { + config `json:"-"` + // ID of the ent. + // The ID of the blocked vod. + ID string `json:"id,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + selectValues sql.SelectValues +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*BlockedVideos) scanValues(columns []string) ([]any, error) { + values := make([]any, len(columns)) + for i := range columns { + switch columns[i] { + case blockedvideos.FieldID: + values[i] = new(sql.NullString) + case blockedvideos.FieldCreatedAt: + values[i] = new(sql.NullTime) + default: + values[i] = new(sql.UnknownType) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the BlockedVideos fields. +func (bv *BlockedVideos) assignValues(columns []string, values []any) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case blockedvideos.FieldID: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field id", values[i]) + } else if value.Valid { + bv.ID = value.String + } + case blockedvideos.FieldCreatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[i]) + } else if value.Valid { + bv.CreatedAt = value.Time + } + default: + bv.selectValues.Set(columns[i], values[i]) + } + } + return nil +} + +// Value returns the ent.Value that was dynamically selected and assigned to the BlockedVideos. +// This includes values selected through modifiers, order, etc. +func (bv *BlockedVideos) Value(name string) (ent.Value, error) { + return bv.selectValues.Get(name) +} + +// Update returns a builder for updating this BlockedVideos. +// Note that you need to call BlockedVideos.Unwrap() before calling this method if this BlockedVideos +// was returned from a transaction, and the transaction was committed or rolled back. +func (bv *BlockedVideos) Update() *BlockedVideosUpdateOne { + return NewBlockedVideosClient(bv.config).UpdateOne(bv) +} + +// Unwrap unwraps the BlockedVideos entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (bv *BlockedVideos) Unwrap() *BlockedVideos { + _tx, ok := bv.config.driver.(*txDriver) + if !ok { + panic("ent: BlockedVideos is not a transactional entity") + } + bv.config.driver = _tx.drv + return bv +} + +// String implements the fmt.Stringer. +func (bv *BlockedVideos) String() string { + var builder strings.Builder + builder.WriteString("BlockedVideos(") + builder.WriteString(fmt.Sprintf("id=%v, ", bv.ID)) + builder.WriteString("created_at=") + builder.WriteString(bv.CreatedAt.Format(time.ANSIC)) + builder.WriteByte(')') + return builder.String() +} + +// BlockedVideosSlice is a parsable slice of BlockedVideos. +type BlockedVideosSlice []*BlockedVideos diff --git a/ent/blockedvideos/blockedvideos.go b/ent/blockedvideos/blockedvideos.go new file mode 100644 index 00000000..dd40c0ff --- /dev/null +++ b/ent/blockedvideos/blockedvideos.go @@ -0,0 +1,54 @@ +// Code generated by ent, DO NOT EDIT. + +package blockedvideos + +import ( + "time" + + "entgo.io/ent/dialect/sql" +) + +const ( + // Label holds the string label denoting the blockedvideos type in the database. + Label = "blocked_videos" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // Table holds the table name of the blockedvideos in the database. + Table = "blocked_videos" +) + +// Columns holds all SQL columns for blockedvideos fields. +var Columns = []string{ + FieldID, + FieldCreatedAt, +} + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + return false +} + +var ( + // DefaultCreatedAt holds the default value on creation for the "created_at" field. + DefaultCreatedAt func() time.Time +) + +// OrderOption defines the ordering options for the BlockedVideos queries. +type OrderOption func(*sql.Selector) + +// ByID orders the results by the id field. +func ByID(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldID, opts...).ToFunc() +} + +// ByCreatedAt orders the results by the created_at field. +func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldCreatedAt, opts...).ToFunc() +} diff --git a/ent/blockedvideos/where.go b/ent/blockedvideos/where.go new file mode 100644 index 00000000..b4b40846 --- /dev/null +++ b/ent/blockedvideos/where.go @@ -0,0 +1,125 @@ +// Code generated by ent, DO NOT EDIT. + +package blockedvideos + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "github.com/zibbp/ganymede/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldEQ(FieldID, id)) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldEQ(FieldID, id)) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldNEQ(FieldID, id)) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldIn(FieldID, ids...)) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldNotIn(FieldID, ids...)) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldGT(FieldID, id)) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldGTE(FieldID, id)) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldLT(FieldID, id)) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldLTE(FieldID, id)) +} + +// IDEqualFold applies the EqualFold predicate on the ID field. +func IDEqualFold(id string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldEqualFold(FieldID, id)) +} + +// IDContainsFold applies the ContainsFold predicate on the ID field. +func IDContainsFold(id string) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldContainsFold(FieldID, id)) +} + +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldEQ(FieldCreatedAt, v)) +} + +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldEQ(FieldCreatedAt, v)) +} + +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldNEQ(FieldCreatedAt, v)) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldIn(FieldCreatedAt, vs...)) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldNotIn(FieldCreatedAt, vs...)) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldGT(FieldCreatedAt, v)) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldGTE(FieldCreatedAt, v)) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldLT(FieldCreatedAt, v)) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.FieldLTE(FieldCreatedAt, v)) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.BlockedVideos) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.AndPredicates(predicates...)) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.BlockedVideos) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.OrPredicates(predicates...)) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.BlockedVideos) predicate.BlockedVideos { + return predicate.BlockedVideos(sql.NotPredicates(p)) +} diff --git a/ent/blockedvideos_create.go b/ent/blockedvideos_create.go new file mode 100644 index 00000000..1bde605f --- /dev/null +++ b/ent/blockedvideos_create.go @@ -0,0 +1,474 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect" + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/zibbp/ganymede/ent/blockedvideos" +) + +// BlockedVideosCreate is the builder for creating a BlockedVideos entity. +type BlockedVideosCreate struct { + config + mutation *BlockedVideosMutation + hooks []Hook + conflict []sql.ConflictOption +} + +// SetCreatedAt sets the "created_at" field. +func (bvc *BlockedVideosCreate) SetCreatedAt(t time.Time) *BlockedVideosCreate { + bvc.mutation.SetCreatedAt(t) + return bvc +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (bvc *BlockedVideosCreate) SetNillableCreatedAt(t *time.Time) *BlockedVideosCreate { + if t != nil { + bvc.SetCreatedAt(*t) + } + return bvc +} + +// SetID sets the "id" field. +func (bvc *BlockedVideosCreate) SetID(s string) *BlockedVideosCreate { + bvc.mutation.SetID(s) + return bvc +} + +// Mutation returns the BlockedVideosMutation object of the builder. +func (bvc *BlockedVideosCreate) Mutation() *BlockedVideosMutation { + return bvc.mutation +} + +// Save creates the BlockedVideos in the database. +func (bvc *BlockedVideosCreate) Save(ctx context.Context) (*BlockedVideos, error) { + bvc.defaults() + return withHooks(ctx, bvc.sqlSave, bvc.mutation, bvc.hooks) +} + +// SaveX calls Save and panics if Save returns an error. +func (bvc *BlockedVideosCreate) SaveX(ctx context.Context) *BlockedVideos { + v, err := bvc.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (bvc *BlockedVideosCreate) Exec(ctx context.Context) error { + _, err := bvc.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (bvc *BlockedVideosCreate) ExecX(ctx context.Context) { + if err := bvc.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (bvc *BlockedVideosCreate) defaults() { + if _, ok := bvc.mutation.CreatedAt(); !ok { + v := blockedvideos.DefaultCreatedAt() + bvc.mutation.SetCreatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (bvc *BlockedVideosCreate) check() error { + if _, ok := bvc.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "BlockedVideos.created_at"`)} + } + return nil +} + +func (bvc *BlockedVideosCreate) sqlSave(ctx context.Context) (*BlockedVideos, error) { + if err := bvc.check(); err != nil { + return nil, err + } + _node, _spec := bvc.createSpec() + if err := sqlgraph.CreateNode(ctx, bvc.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + if _spec.ID.Value != nil { + if id, ok := _spec.ID.Value.(string); ok { + _node.ID = id + } else { + return nil, fmt.Errorf("unexpected BlockedVideos.ID type: %T", _spec.ID.Value) + } + } + bvc.mutation.id = &_node.ID + bvc.mutation.done = true + return _node, nil +} + +func (bvc *BlockedVideosCreate) createSpec() (*BlockedVideos, *sqlgraph.CreateSpec) { + var ( + _node = &BlockedVideos{config: bvc.config} + _spec = sqlgraph.NewCreateSpec(blockedvideos.Table, sqlgraph.NewFieldSpec(blockedvideos.FieldID, field.TypeString)) + ) + _spec.OnConflict = bvc.conflict + if id, ok := bvc.mutation.ID(); ok { + _node.ID = id + _spec.ID.Value = id + } + if value, ok := bvc.mutation.CreatedAt(); ok { + _spec.SetField(blockedvideos.FieldCreatedAt, field.TypeTime, value) + _node.CreatedAt = value + } + return _node, _spec +} + +// OnConflict allows configuring the `ON CONFLICT` / `ON DUPLICATE KEY` clause +// of the `INSERT` statement. For example: +// +// client.BlockedVideos.Create(). +// SetCreatedAt(v). +// OnConflict( +// // Update the row with the new values +// // the was proposed for insertion. +// sql.ResolveWithNewValues(), +// ). +// // Override some of the fields with custom +// // update values. +// Update(func(u *ent.BlockedVideosUpsert) { +// SetCreatedAt(v+v). +// }). +// Exec(ctx) +func (bvc *BlockedVideosCreate) OnConflict(opts ...sql.ConflictOption) *BlockedVideosUpsertOne { + bvc.conflict = opts + return &BlockedVideosUpsertOne{ + create: bvc, + } +} + +// OnConflictColumns calls `OnConflict` and configures the columns +// as conflict target. Using this option is equivalent to using: +// +// client.BlockedVideos.Create(). +// OnConflict(sql.ConflictColumns(columns...)). +// Exec(ctx) +func (bvc *BlockedVideosCreate) OnConflictColumns(columns ...string) *BlockedVideosUpsertOne { + bvc.conflict = append(bvc.conflict, sql.ConflictColumns(columns...)) + return &BlockedVideosUpsertOne{ + create: bvc, + } +} + +type ( + // BlockedVideosUpsertOne is the builder for "upsert"-ing + // one BlockedVideos node. + BlockedVideosUpsertOne struct { + create *BlockedVideosCreate + } + + // BlockedVideosUpsert is the "OnConflict" setter. + BlockedVideosUpsert struct { + *sql.UpdateSet + } +) + +// UpdateNewValues updates the mutable fields using the new values that were set on create except the ID field. +// Using this option is equivalent to using: +// +// client.BlockedVideos.Create(). +// OnConflict( +// sql.ResolveWithNewValues(), +// sql.ResolveWith(func(u *sql.UpdateSet) { +// u.SetIgnore(blockedvideos.FieldID) +// }), +// ). +// Exec(ctx) +func (u *BlockedVideosUpsertOne) UpdateNewValues() *BlockedVideosUpsertOne { + u.create.conflict = append(u.create.conflict, sql.ResolveWithNewValues()) + u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(s *sql.UpdateSet) { + if _, exists := u.create.mutation.ID(); exists { + s.SetIgnore(blockedvideos.FieldID) + } + if _, exists := u.create.mutation.CreatedAt(); exists { + s.SetIgnore(blockedvideos.FieldCreatedAt) + } + })) + return u +} + +// Ignore sets each column to itself in case of conflict. +// Using this option is equivalent to using: +// +// client.BlockedVideos.Create(). +// OnConflict(sql.ResolveWithIgnore()). +// Exec(ctx) +func (u *BlockedVideosUpsertOne) Ignore() *BlockedVideosUpsertOne { + u.create.conflict = append(u.create.conflict, sql.ResolveWithIgnore()) + return u +} + +// DoNothing configures the conflict_action to `DO NOTHING`. +// Supported only by SQLite and PostgreSQL. +func (u *BlockedVideosUpsertOne) DoNothing() *BlockedVideosUpsertOne { + u.create.conflict = append(u.create.conflict, sql.DoNothing()) + return u +} + +// Update allows overriding fields `UPDATE` values. See the BlockedVideosCreate.OnConflict +// documentation for more info. +func (u *BlockedVideosUpsertOne) Update(set func(*BlockedVideosUpsert)) *BlockedVideosUpsertOne { + u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(update *sql.UpdateSet) { + set(&BlockedVideosUpsert{UpdateSet: update}) + })) + return u +} + +// Exec executes the query. +func (u *BlockedVideosUpsertOne) Exec(ctx context.Context) error { + if len(u.create.conflict) == 0 { + return errors.New("ent: missing options for BlockedVideosCreate.OnConflict") + } + return u.create.Exec(ctx) +} + +// ExecX is like Exec, but panics if an error occurs. +func (u *BlockedVideosUpsertOne) ExecX(ctx context.Context) { + if err := u.create.Exec(ctx); err != nil { + panic(err) + } +} + +// Exec executes the UPSERT query and returns the inserted/updated ID. +func (u *BlockedVideosUpsertOne) ID(ctx context.Context) (id string, err error) { + if u.create.driver.Dialect() == dialect.MySQL { + // In case of "ON CONFLICT", there is no way to get back non-numeric ID + // fields from the database since MySQL does not support the RETURNING clause. + return id, errors.New("ent: BlockedVideosUpsertOne.ID is not supported by MySQL driver. Use BlockedVideosUpsertOne.Exec instead") + } + node, err := u.create.Save(ctx) + if err != nil { + return id, err + } + return node.ID, nil +} + +// IDX is like ID, but panics if an error occurs. +func (u *BlockedVideosUpsertOne) IDX(ctx context.Context) string { + id, err := u.ID(ctx) + if err != nil { + panic(err) + } + return id +} + +// BlockedVideosCreateBulk is the builder for creating many BlockedVideos entities in bulk. +type BlockedVideosCreateBulk struct { + config + err error + builders []*BlockedVideosCreate + conflict []sql.ConflictOption +} + +// Save creates the BlockedVideos entities in the database. +func (bvcb *BlockedVideosCreateBulk) Save(ctx context.Context) ([]*BlockedVideos, error) { + if bvcb.err != nil { + return nil, bvcb.err + } + specs := make([]*sqlgraph.CreateSpec, len(bvcb.builders)) + nodes := make([]*BlockedVideos, len(bvcb.builders)) + mutators := make([]Mutator, len(bvcb.builders)) + for i := range bvcb.builders { + func(i int, root context.Context) { + builder := bvcb.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*BlockedVideosMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + var err error + nodes[i], specs[i] = builder.createSpec() + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, bvcb.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + spec.OnConflict = bvcb.conflict + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, bvcb.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + mutation.done = true + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, bvcb.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (bvcb *BlockedVideosCreateBulk) SaveX(ctx context.Context) []*BlockedVideos { + v, err := bvcb.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (bvcb *BlockedVideosCreateBulk) Exec(ctx context.Context) error { + _, err := bvcb.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (bvcb *BlockedVideosCreateBulk) ExecX(ctx context.Context) { + if err := bvcb.Exec(ctx); err != nil { + panic(err) + } +} + +// OnConflict allows configuring the `ON CONFLICT` / `ON DUPLICATE KEY` clause +// of the `INSERT` statement. For example: +// +// client.BlockedVideos.CreateBulk(builders...). +// OnConflict( +// // Update the row with the new values +// // the was proposed for insertion. +// sql.ResolveWithNewValues(), +// ). +// // Override some of the fields with custom +// // update values. +// Update(func(u *ent.BlockedVideosUpsert) { +// SetCreatedAt(v+v). +// }). +// Exec(ctx) +func (bvcb *BlockedVideosCreateBulk) OnConflict(opts ...sql.ConflictOption) *BlockedVideosUpsertBulk { + bvcb.conflict = opts + return &BlockedVideosUpsertBulk{ + create: bvcb, + } +} + +// OnConflictColumns calls `OnConflict` and configures the columns +// as conflict target. Using this option is equivalent to using: +// +// client.BlockedVideos.Create(). +// OnConflict(sql.ConflictColumns(columns...)). +// Exec(ctx) +func (bvcb *BlockedVideosCreateBulk) OnConflictColumns(columns ...string) *BlockedVideosUpsertBulk { + bvcb.conflict = append(bvcb.conflict, sql.ConflictColumns(columns...)) + return &BlockedVideosUpsertBulk{ + create: bvcb, + } +} + +// BlockedVideosUpsertBulk is the builder for "upsert"-ing +// a bulk of BlockedVideos nodes. +type BlockedVideosUpsertBulk struct { + create *BlockedVideosCreateBulk +} + +// UpdateNewValues updates the mutable fields using the new values that +// were set on create. Using this option is equivalent to using: +// +// client.BlockedVideos.Create(). +// OnConflict( +// sql.ResolveWithNewValues(), +// sql.ResolveWith(func(u *sql.UpdateSet) { +// u.SetIgnore(blockedvideos.FieldID) +// }), +// ). +// Exec(ctx) +func (u *BlockedVideosUpsertBulk) UpdateNewValues() *BlockedVideosUpsertBulk { + u.create.conflict = append(u.create.conflict, sql.ResolveWithNewValues()) + u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(s *sql.UpdateSet) { + for _, b := range u.create.builders { + if _, exists := b.mutation.ID(); exists { + s.SetIgnore(blockedvideos.FieldID) + } + if _, exists := b.mutation.CreatedAt(); exists { + s.SetIgnore(blockedvideos.FieldCreatedAt) + } + } + })) + return u +} + +// Ignore sets each column to itself in case of conflict. +// Using this option is equivalent to using: +// +// client.BlockedVideos.Create(). +// OnConflict(sql.ResolveWithIgnore()). +// Exec(ctx) +func (u *BlockedVideosUpsertBulk) Ignore() *BlockedVideosUpsertBulk { + u.create.conflict = append(u.create.conflict, sql.ResolveWithIgnore()) + return u +} + +// DoNothing configures the conflict_action to `DO NOTHING`. +// Supported only by SQLite and PostgreSQL. +func (u *BlockedVideosUpsertBulk) DoNothing() *BlockedVideosUpsertBulk { + u.create.conflict = append(u.create.conflict, sql.DoNothing()) + return u +} + +// Update allows overriding fields `UPDATE` values. See the BlockedVideosCreateBulk.OnConflict +// documentation for more info. +func (u *BlockedVideosUpsertBulk) Update(set func(*BlockedVideosUpsert)) *BlockedVideosUpsertBulk { + u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(update *sql.UpdateSet) { + set(&BlockedVideosUpsert{UpdateSet: update}) + })) + return u +} + +// Exec executes the query. +func (u *BlockedVideosUpsertBulk) Exec(ctx context.Context) error { + if u.create.err != nil { + return u.create.err + } + for i, b := range u.create.builders { + if len(b.conflict) != 0 { + return fmt.Errorf("ent: OnConflict was set for builder %d. Set it on the BlockedVideosCreateBulk instead", i) + } + } + if len(u.create.conflict) == 0 { + return errors.New("ent: missing options for BlockedVideosCreateBulk.OnConflict") + } + return u.create.Exec(ctx) +} + +// ExecX is like Exec, but panics if an error occurs. +func (u *BlockedVideosUpsertBulk) ExecX(ctx context.Context) { + if err := u.create.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/ent/blockedvideos_delete.go b/ent/blockedvideos_delete.go new file mode 100644 index 00000000..3fbf012a --- /dev/null +++ b/ent/blockedvideos_delete.go @@ -0,0 +1,88 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/zibbp/ganymede/ent/blockedvideos" + "github.com/zibbp/ganymede/ent/predicate" +) + +// BlockedVideosDelete is the builder for deleting a BlockedVideos entity. +type BlockedVideosDelete struct { + config + hooks []Hook + mutation *BlockedVideosMutation +} + +// Where appends a list predicates to the BlockedVideosDelete builder. +func (bvd *BlockedVideosDelete) Where(ps ...predicate.BlockedVideos) *BlockedVideosDelete { + bvd.mutation.Where(ps...) + return bvd +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (bvd *BlockedVideosDelete) Exec(ctx context.Context) (int, error) { + return withHooks(ctx, bvd.sqlExec, bvd.mutation, bvd.hooks) +} + +// ExecX is like Exec, but panics if an error occurs. +func (bvd *BlockedVideosDelete) ExecX(ctx context.Context) int { + n, err := bvd.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (bvd *BlockedVideosDelete) sqlExec(ctx context.Context) (int, error) { + _spec := sqlgraph.NewDeleteSpec(blockedvideos.Table, sqlgraph.NewFieldSpec(blockedvideos.FieldID, field.TypeString)) + if ps := bvd.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + affected, err := sqlgraph.DeleteNodes(ctx, bvd.driver, _spec) + if err != nil && sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + bvd.mutation.done = true + return affected, err +} + +// BlockedVideosDeleteOne is the builder for deleting a single BlockedVideos entity. +type BlockedVideosDeleteOne struct { + bvd *BlockedVideosDelete +} + +// Where appends a list predicates to the BlockedVideosDelete builder. +func (bvdo *BlockedVideosDeleteOne) Where(ps ...predicate.BlockedVideos) *BlockedVideosDeleteOne { + bvdo.bvd.mutation.Where(ps...) + return bvdo +} + +// Exec executes the deletion query. +func (bvdo *BlockedVideosDeleteOne) Exec(ctx context.Context) error { + n, err := bvdo.bvd.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{blockedvideos.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (bvdo *BlockedVideosDeleteOne) ExecX(ctx context.Context) { + if err := bvdo.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/ent/blockedvideos_query.go b/ent/blockedvideos_query.go new file mode 100644 index 00000000..4519cc43 --- /dev/null +++ b/ent/blockedvideos_query.go @@ -0,0 +1,526 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + "math" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/zibbp/ganymede/ent/blockedvideos" + "github.com/zibbp/ganymede/ent/predicate" +) + +// BlockedVideosQuery is the builder for querying BlockedVideos entities. +type BlockedVideosQuery struct { + config + ctx *QueryContext + order []blockedvideos.OrderOption + inters []Interceptor + predicates []predicate.BlockedVideos + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the BlockedVideosQuery builder. +func (bvq *BlockedVideosQuery) Where(ps ...predicate.BlockedVideos) *BlockedVideosQuery { + bvq.predicates = append(bvq.predicates, ps...) + return bvq +} + +// Limit the number of records to be returned by this query. +func (bvq *BlockedVideosQuery) Limit(limit int) *BlockedVideosQuery { + bvq.ctx.Limit = &limit + return bvq +} + +// Offset to start from. +func (bvq *BlockedVideosQuery) Offset(offset int) *BlockedVideosQuery { + bvq.ctx.Offset = &offset + return bvq +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (bvq *BlockedVideosQuery) Unique(unique bool) *BlockedVideosQuery { + bvq.ctx.Unique = &unique + return bvq +} + +// Order specifies how the records should be ordered. +func (bvq *BlockedVideosQuery) Order(o ...blockedvideos.OrderOption) *BlockedVideosQuery { + bvq.order = append(bvq.order, o...) + return bvq +} + +// First returns the first BlockedVideos entity from the query. +// Returns a *NotFoundError when no BlockedVideos was found. +func (bvq *BlockedVideosQuery) First(ctx context.Context) (*BlockedVideos, error) { + nodes, err := bvq.Limit(1).All(setContextOp(ctx, bvq.ctx, "First")) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{blockedvideos.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (bvq *BlockedVideosQuery) FirstX(ctx context.Context) *BlockedVideos { + node, err := bvq.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first BlockedVideos ID from the query. +// Returns a *NotFoundError when no BlockedVideos ID was found. +func (bvq *BlockedVideosQuery) FirstID(ctx context.Context) (id string, err error) { + var ids []string + if ids, err = bvq.Limit(1).IDs(setContextOp(ctx, bvq.ctx, "FirstID")); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{blockedvideos.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (bvq *BlockedVideosQuery) FirstIDX(ctx context.Context) string { + id, err := bvq.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single BlockedVideos entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when more than one BlockedVideos entity is found. +// Returns a *NotFoundError when no BlockedVideos entities are found. +func (bvq *BlockedVideosQuery) Only(ctx context.Context) (*BlockedVideos, error) { + nodes, err := bvq.Limit(2).All(setContextOp(ctx, bvq.ctx, "Only")) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{blockedvideos.Label} + default: + return nil, &NotSingularError{blockedvideos.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (bvq *BlockedVideosQuery) OnlyX(ctx context.Context) *BlockedVideos { + node, err := bvq.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only BlockedVideos ID in the query. +// Returns a *NotSingularError when more than one BlockedVideos ID is found. +// Returns a *NotFoundError when no entities are found. +func (bvq *BlockedVideosQuery) OnlyID(ctx context.Context) (id string, err error) { + var ids []string + if ids, err = bvq.Limit(2).IDs(setContextOp(ctx, bvq.ctx, "OnlyID")); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{blockedvideos.Label} + default: + err = &NotSingularError{blockedvideos.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (bvq *BlockedVideosQuery) OnlyIDX(ctx context.Context) string { + id, err := bvq.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of BlockedVideosSlice. +func (bvq *BlockedVideosQuery) All(ctx context.Context) ([]*BlockedVideos, error) { + ctx = setContextOp(ctx, bvq.ctx, "All") + if err := bvq.prepareQuery(ctx); err != nil { + return nil, err + } + qr := querierAll[[]*BlockedVideos, *BlockedVideosQuery]() + return withInterceptors[[]*BlockedVideos](ctx, bvq, qr, bvq.inters) +} + +// AllX is like All, but panics if an error occurs. +func (bvq *BlockedVideosQuery) AllX(ctx context.Context) []*BlockedVideos { + nodes, err := bvq.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of BlockedVideos IDs. +func (bvq *BlockedVideosQuery) IDs(ctx context.Context) (ids []string, err error) { + if bvq.ctx.Unique == nil && bvq.path != nil { + bvq.Unique(true) + } + ctx = setContextOp(ctx, bvq.ctx, "IDs") + if err = bvq.Select(blockedvideos.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (bvq *BlockedVideosQuery) IDsX(ctx context.Context) []string { + ids, err := bvq.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (bvq *BlockedVideosQuery) Count(ctx context.Context) (int, error) { + ctx = setContextOp(ctx, bvq.ctx, "Count") + if err := bvq.prepareQuery(ctx); err != nil { + return 0, err + } + return withInterceptors[int](ctx, bvq, querierCount[*BlockedVideosQuery](), bvq.inters) +} + +// CountX is like Count, but panics if an error occurs. +func (bvq *BlockedVideosQuery) CountX(ctx context.Context) int { + count, err := bvq.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (bvq *BlockedVideosQuery) Exist(ctx context.Context) (bool, error) { + ctx = setContextOp(ctx, bvq.ctx, "Exist") + switch _, err := bvq.FirstID(ctx); { + case IsNotFound(err): + return false, nil + case err != nil: + return false, fmt.Errorf("ent: check existence: %w", err) + default: + return true, nil + } +} + +// ExistX is like Exist, but panics if an error occurs. +func (bvq *BlockedVideosQuery) ExistX(ctx context.Context) bool { + exist, err := bvq.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the BlockedVideosQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (bvq *BlockedVideosQuery) Clone() *BlockedVideosQuery { + if bvq == nil { + return nil + } + return &BlockedVideosQuery{ + config: bvq.config, + ctx: bvq.ctx.Clone(), + order: append([]blockedvideos.OrderOption{}, bvq.order...), + inters: append([]Interceptor{}, bvq.inters...), + predicates: append([]predicate.BlockedVideos{}, bvq.predicates...), + // clone intermediate query. + sql: bvq.sql.Clone(), + path: bvq.path, + } +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.BlockedVideos.Query(). +// GroupBy(blockedvideos.FieldCreatedAt). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +func (bvq *BlockedVideosQuery) GroupBy(field string, fields ...string) *BlockedVideosGroupBy { + bvq.ctx.Fields = append([]string{field}, fields...) + grbuild := &BlockedVideosGroupBy{build: bvq} + grbuild.flds = &bvq.ctx.Fields + grbuild.label = blockedvideos.Label + grbuild.scan = grbuild.Scan + return grbuild +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// } +// +// client.BlockedVideos.Query(). +// Select(blockedvideos.FieldCreatedAt). +// Scan(ctx, &v) +func (bvq *BlockedVideosQuery) Select(fields ...string) *BlockedVideosSelect { + bvq.ctx.Fields = append(bvq.ctx.Fields, fields...) + sbuild := &BlockedVideosSelect{BlockedVideosQuery: bvq} + sbuild.label = blockedvideos.Label + sbuild.flds, sbuild.scan = &bvq.ctx.Fields, sbuild.Scan + return sbuild +} + +// Aggregate returns a BlockedVideosSelect configured with the given aggregations. +func (bvq *BlockedVideosQuery) Aggregate(fns ...AggregateFunc) *BlockedVideosSelect { + return bvq.Select().Aggregate(fns...) +} + +func (bvq *BlockedVideosQuery) prepareQuery(ctx context.Context) error { + for _, inter := range bvq.inters { + if inter == nil { + return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)") + } + if trv, ok := inter.(Traverser); ok { + if err := trv.Traverse(ctx, bvq); err != nil { + return err + } + } + } + for _, f := range bvq.ctx.Fields { + if !blockedvideos.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if bvq.path != nil { + prev, err := bvq.path(ctx) + if err != nil { + return err + } + bvq.sql = prev + } + return nil +} + +func (bvq *BlockedVideosQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*BlockedVideos, error) { + var ( + nodes = []*BlockedVideos{} + _spec = bvq.querySpec() + ) + _spec.ScanValues = func(columns []string) ([]any, error) { + return (*BlockedVideos).scanValues(nil, columns) + } + _spec.Assign = func(columns []string, values []any) error { + node := &BlockedVideos{config: bvq.config} + nodes = append(nodes, node) + return node.assignValues(columns, values) + } + for i := range hooks { + hooks[i](ctx, _spec) + } + if err := sqlgraph.QueryNodes(ctx, bvq.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + return nodes, nil +} + +func (bvq *BlockedVideosQuery) sqlCount(ctx context.Context) (int, error) { + _spec := bvq.querySpec() + _spec.Node.Columns = bvq.ctx.Fields + if len(bvq.ctx.Fields) > 0 { + _spec.Unique = bvq.ctx.Unique != nil && *bvq.ctx.Unique + } + return sqlgraph.CountNodes(ctx, bvq.driver, _spec) +} + +func (bvq *BlockedVideosQuery) querySpec() *sqlgraph.QuerySpec { + _spec := sqlgraph.NewQuerySpec(blockedvideos.Table, blockedvideos.Columns, sqlgraph.NewFieldSpec(blockedvideos.FieldID, field.TypeString)) + _spec.From = bvq.sql + if unique := bvq.ctx.Unique; unique != nil { + _spec.Unique = *unique + } else if bvq.path != nil { + _spec.Unique = true + } + if fields := bvq.ctx.Fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, blockedvideos.FieldID) + for i := range fields { + if fields[i] != blockedvideos.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := bvq.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := bvq.ctx.Limit; limit != nil { + _spec.Limit = *limit + } + if offset := bvq.ctx.Offset; offset != nil { + _spec.Offset = *offset + } + if ps := bvq.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (bvq *BlockedVideosQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(bvq.driver.Dialect()) + t1 := builder.Table(blockedvideos.Table) + columns := bvq.ctx.Fields + if len(columns) == 0 { + columns = blockedvideos.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if bvq.sql != nil { + selector = bvq.sql + selector.Select(selector.Columns(columns...)...) + } + if bvq.ctx.Unique != nil && *bvq.ctx.Unique { + selector.Distinct() + } + for _, p := range bvq.predicates { + p(selector) + } + for _, p := range bvq.order { + p(selector) + } + if offset := bvq.ctx.Offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := bvq.ctx.Limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// BlockedVideosGroupBy is the group-by builder for BlockedVideos entities. +type BlockedVideosGroupBy struct { + selector + build *BlockedVideosQuery +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (bvgb *BlockedVideosGroupBy) Aggregate(fns ...AggregateFunc) *BlockedVideosGroupBy { + bvgb.fns = append(bvgb.fns, fns...) + return bvgb +} + +// Scan applies the selector query and scans the result into the given value. +func (bvgb *BlockedVideosGroupBy) Scan(ctx context.Context, v any) error { + ctx = setContextOp(ctx, bvgb.build.ctx, "GroupBy") + if err := bvgb.build.prepareQuery(ctx); err != nil { + return err + } + return scanWithInterceptors[*BlockedVideosQuery, *BlockedVideosGroupBy](ctx, bvgb.build, bvgb, bvgb.build.inters, v) +} + +func (bvgb *BlockedVideosGroupBy) sqlScan(ctx context.Context, root *BlockedVideosQuery, v any) error { + selector := root.sqlQuery(ctx).Select() + aggregation := make([]string, 0, len(bvgb.fns)) + for _, fn := range bvgb.fns { + aggregation = append(aggregation, fn(selector)) + } + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(*bvgb.flds)+len(bvgb.fns)) + for _, f := range *bvgb.flds { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + selector.GroupBy(selector.Columns(*bvgb.flds...)...) + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := bvgb.build.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +// BlockedVideosSelect is the builder for selecting fields of BlockedVideos entities. +type BlockedVideosSelect struct { + *BlockedVideosQuery + selector +} + +// Aggregate adds the given aggregation functions to the selector query. +func (bvs *BlockedVideosSelect) Aggregate(fns ...AggregateFunc) *BlockedVideosSelect { + bvs.fns = append(bvs.fns, fns...) + return bvs +} + +// Scan applies the selector query and scans the result into the given value. +func (bvs *BlockedVideosSelect) Scan(ctx context.Context, v any) error { + ctx = setContextOp(ctx, bvs.ctx, "Select") + if err := bvs.prepareQuery(ctx); err != nil { + return err + } + return scanWithInterceptors[*BlockedVideosQuery, *BlockedVideosSelect](ctx, bvs.BlockedVideosQuery, bvs, bvs.inters, v) +} + +func (bvs *BlockedVideosSelect) sqlScan(ctx context.Context, root *BlockedVideosQuery, v any) error { + selector := root.sqlQuery(ctx) + aggregation := make([]string, 0, len(bvs.fns)) + for _, fn := range bvs.fns { + aggregation = append(aggregation, fn(selector)) + } + switch n := len(*bvs.selector.flds); { + case n == 0 && len(aggregation) > 0: + selector.Select(aggregation...) + case n != 0 && len(aggregation) > 0: + selector.AppendSelect(aggregation...) + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := bvs.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/ent/blockedvideos_update.go b/ent/blockedvideos_update.go new file mode 100644 index 00000000..ce2d367a --- /dev/null +++ b/ent/blockedvideos_update.go @@ -0,0 +1,175 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/zibbp/ganymede/ent/blockedvideos" + "github.com/zibbp/ganymede/ent/predicate" +) + +// BlockedVideosUpdate is the builder for updating BlockedVideos entities. +type BlockedVideosUpdate struct { + config + hooks []Hook + mutation *BlockedVideosMutation +} + +// Where appends a list predicates to the BlockedVideosUpdate builder. +func (bvu *BlockedVideosUpdate) Where(ps ...predicate.BlockedVideos) *BlockedVideosUpdate { + bvu.mutation.Where(ps...) + return bvu +} + +// Mutation returns the BlockedVideosMutation object of the builder. +func (bvu *BlockedVideosUpdate) Mutation() *BlockedVideosMutation { + return bvu.mutation +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (bvu *BlockedVideosUpdate) Save(ctx context.Context) (int, error) { + return withHooks(ctx, bvu.sqlSave, bvu.mutation, bvu.hooks) +} + +// SaveX is like Save, but panics if an error occurs. +func (bvu *BlockedVideosUpdate) SaveX(ctx context.Context) int { + affected, err := bvu.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (bvu *BlockedVideosUpdate) Exec(ctx context.Context) error { + _, err := bvu.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (bvu *BlockedVideosUpdate) ExecX(ctx context.Context) { + if err := bvu.Exec(ctx); err != nil { + panic(err) + } +} + +func (bvu *BlockedVideosUpdate) sqlSave(ctx context.Context) (n int, err error) { + _spec := sqlgraph.NewUpdateSpec(blockedvideos.Table, blockedvideos.Columns, sqlgraph.NewFieldSpec(blockedvideos.FieldID, field.TypeString)) + if ps := bvu.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if n, err = sqlgraph.UpdateNodes(ctx, bvu.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{blockedvideos.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return 0, err + } + bvu.mutation.done = true + return n, nil +} + +// BlockedVideosUpdateOne is the builder for updating a single BlockedVideos entity. +type BlockedVideosUpdateOne struct { + config + fields []string + hooks []Hook + mutation *BlockedVideosMutation +} + +// Mutation returns the BlockedVideosMutation object of the builder. +func (bvuo *BlockedVideosUpdateOne) Mutation() *BlockedVideosMutation { + return bvuo.mutation +} + +// Where appends a list predicates to the BlockedVideosUpdate builder. +func (bvuo *BlockedVideosUpdateOne) Where(ps ...predicate.BlockedVideos) *BlockedVideosUpdateOne { + bvuo.mutation.Where(ps...) + return bvuo +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (bvuo *BlockedVideosUpdateOne) Select(field string, fields ...string) *BlockedVideosUpdateOne { + bvuo.fields = append([]string{field}, fields...) + return bvuo +} + +// Save executes the query and returns the updated BlockedVideos entity. +func (bvuo *BlockedVideosUpdateOne) Save(ctx context.Context) (*BlockedVideos, error) { + return withHooks(ctx, bvuo.sqlSave, bvuo.mutation, bvuo.hooks) +} + +// SaveX is like Save, but panics if an error occurs. +func (bvuo *BlockedVideosUpdateOne) SaveX(ctx context.Context) *BlockedVideos { + node, err := bvuo.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (bvuo *BlockedVideosUpdateOne) Exec(ctx context.Context) error { + _, err := bvuo.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (bvuo *BlockedVideosUpdateOne) ExecX(ctx context.Context) { + if err := bvuo.Exec(ctx); err != nil { + panic(err) + } +} + +func (bvuo *BlockedVideosUpdateOne) sqlSave(ctx context.Context) (_node *BlockedVideos, err error) { + _spec := sqlgraph.NewUpdateSpec(blockedvideos.Table, blockedvideos.Columns, sqlgraph.NewFieldSpec(blockedvideos.FieldID, field.TypeString)) + id, ok := bvuo.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "BlockedVideos.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := bvuo.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, blockedvideos.FieldID) + for _, f := range fields { + if !blockedvideos.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != blockedvideos.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := bvuo.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + _node = &BlockedVideos{config: bvuo.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, bvuo.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{blockedvideos.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + bvuo.mutation.done = true + return _node, nil +} diff --git a/ent/client.go b/ent/client.go index 10505643..a4d8897e 100644 --- a/ent/client.go +++ b/ent/client.go @@ -16,6 +16,7 @@ import ( "entgo.io/ent/dialect" "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/zibbp/ganymede/ent/blockedvideos" "github.com/zibbp/ganymede/ent/channel" "github.com/zibbp/ganymede/ent/chapter" "github.com/zibbp/ganymede/ent/live" @@ -35,6 +36,8 @@ type Client struct { config // Schema is the client for creating, migrating and dropping schema. Schema *migrate.Schema + // BlockedVideos is the client for interacting with the BlockedVideos builders. + BlockedVideos *BlockedVideosClient // Channel is the client for interacting with the Channel builders. Channel *ChannelClient // Chapter is the client for interacting with the Chapter builders. @@ -70,6 +73,7 @@ func NewClient(opts ...Option) *Client { func (c *Client) init() { c.Schema = migrate.NewSchema(c.driver) + c.BlockedVideos = NewBlockedVideosClient(c.config) c.Channel = NewChannelClient(c.config) c.Chapter = NewChapterClient(c.config) c.Live = NewLiveClient(c.config) @@ -174,6 +178,7 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) { return &Tx{ ctx: ctx, config: cfg, + BlockedVideos: NewBlockedVideosClient(cfg), Channel: NewChannelClient(cfg), Chapter: NewChapterClient(cfg), Live: NewLiveClient(cfg), @@ -205,6 +210,7 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error) return &Tx{ ctx: ctx, config: cfg, + BlockedVideos: NewBlockedVideosClient(cfg), Channel: NewChannelClient(cfg), Chapter: NewChapterClient(cfg), Live: NewLiveClient(cfg), @@ -223,7 +229,7 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error) // Debug returns a new debug-client. It's used to get verbose logging on specific operations. // // client.Debug(). -// Channel. +// BlockedVideos. // Query(). // Count(ctx) func (c *Client) Debug() *Client { @@ -246,8 +252,9 @@ func (c *Client) Close() error { // In order to add hooks to a specific client, call: `client.Node.Use(...)`. func (c *Client) Use(hooks ...Hook) { for _, n := range []interface{ Use(...Hook) }{ - c.Channel, c.Chapter, c.Live, c.LiveCategory, c.LiveTitleRegex, c.MutedSegment, - c.Playback, c.Playlist, c.Queue, c.TwitchCategory, c.User, c.Vod, + c.BlockedVideos, c.Channel, c.Chapter, c.Live, c.LiveCategory, c.LiveTitleRegex, + c.MutedSegment, c.Playback, c.Playlist, c.Queue, c.TwitchCategory, c.User, + c.Vod, } { n.Use(hooks...) } @@ -257,8 +264,9 @@ func (c *Client) Use(hooks ...Hook) { // In order to add interceptors to a specific client, call: `client.Node.Intercept(...)`. func (c *Client) Intercept(interceptors ...Interceptor) { for _, n := range []interface{ Intercept(...Interceptor) }{ - c.Channel, c.Chapter, c.Live, c.LiveCategory, c.LiveTitleRegex, c.MutedSegment, - c.Playback, c.Playlist, c.Queue, c.TwitchCategory, c.User, c.Vod, + c.BlockedVideos, c.Channel, c.Chapter, c.Live, c.LiveCategory, c.LiveTitleRegex, + c.MutedSegment, c.Playback, c.Playlist, c.Queue, c.TwitchCategory, c.User, + c.Vod, } { n.Intercept(interceptors...) } @@ -267,6 +275,8 @@ func (c *Client) Intercept(interceptors ...Interceptor) { // Mutate implements the ent.Mutator interface. func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) { switch m := m.(type) { + case *BlockedVideosMutation: + return c.BlockedVideos.mutate(ctx, m) case *ChannelMutation: return c.Channel.mutate(ctx, m) case *ChapterMutation: @@ -296,6 +306,139 @@ func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) { } } +// BlockedVideosClient is a client for the BlockedVideos schema. +type BlockedVideosClient struct { + config +} + +// NewBlockedVideosClient returns a client for the BlockedVideos from the given config. +func NewBlockedVideosClient(c config) *BlockedVideosClient { + return &BlockedVideosClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `blockedvideos.Hooks(f(g(h())))`. +func (c *BlockedVideosClient) Use(hooks ...Hook) { + c.hooks.BlockedVideos = append(c.hooks.BlockedVideos, hooks...) +} + +// Intercept adds a list of query interceptors to the interceptors stack. +// A call to `Intercept(f, g, h)` equals to `blockedvideos.Intercept(f(g(h())))`. +func (c *BlockedVideosClient) Intercept(interceptors ...Interceptor) { + c.inters.BlockedVideos = append(c.inters.BlockedVideos, interceptors...) +} + +// Create returns a builder for creating a BlockedVideos entity. +func (c *BlockedVideosClient) Create() *BlockedVideosCreate { + mutation := newBlockedVideosMutation(c.config, OpCreate) + return &BlockedVideosCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of BlockedVideos entities. +func (c *BlockedVideosClient) CreateBulk(builders ...*BlockedVideosCreate) *BlockedVideosCreateBulk { + return &BlockedVideosCreateBulk{config: c.config, builders: builders} +} + +// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates +// a builder and applies setFunc on it. +func (c *BlockedVideosClient) MapCreateBulk(slice any, setFunc func(*BlockedVideosCreate, int)) *BlockedVideosCreateBulk { + rv := reflect.ValueOf(slice) + if rv.Kind() != reflect.Slice { + return &BlockedVideosCreateBulk{err: fmt.Errorf("calling to BlockedVideosClient.MapCreateBulk with wrong type %T, need slice", slice)} + } + builders := make([]*BlockedVideosCreate, rv.Len()) + for i := 0; i < rv.Len(); i++ { + builders[i] = c.Create() + setFunc(builders[i], i) + } + return &BlockedVideosCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for BlockedVideos. +func (c *BlockedVideosClient) Update() *BlockedVideosUpdate { + mutation := newBlockedVideosMutation(c.config, OpUpdate) + return &BlockedVideosUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *BlockedVideosClient) UpdateOne(bv *BlockedVideos) *BlockedVideosUpdateOne { + mutation := newBlockedVideosMutation(c.config, OpUpdateOne, withBlockedVideos(bv)) + return &BlockedVideosUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *BlockedVideosClient) UpdateOneID(id string) *BlockedVideosUpdateOne { + mutation := newBlockedVideosMutation(c.config, OpUpdateOne, withBlockedVideosID(id)) + return &BlockedVideosUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for BlockedVideos. +func (c *BlockedVideosClient) Delete() *BlockedVideosDelete { + mutation := newBlockedVideosMutation(c.config, OpDelete) + return &BlockedVideosDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a builder for deleting the given entity. +func (c *BlockedVideosClient) DeleteOne(bv *BlockedVideos) *BlockedVideosDeleteOne { + return c.DeleteOneID(bv.ID) +} + +// DeleteOneID returns a builder for deleting the given entity by its id. +func (c *BlockedVideosClient) DeleteOneID(id string) *BlockedVideosDeleteOne { + builder := c.Delete().Where(blockedvideos.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &BlockedVideosDeleteOne{builder} +} + +// Query returns a query builder for BlockedVideos. +func (c *BlockedVideosClient) Query() *BlockedVideosQuery { + return &BlockedVideosQuery{ + config: c.config, + ctx: &QueryContext{Type: TypeBlockedVideos}, + inters: c.Interceptors(), + } +} + +// Get returns a BlockedVideos entity by its id. +func (c *BlockedVideosClient) Get(ctx context.Context, id string) (*BlockedVideos, error) { + return c.Query().Where(blockedvideos.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *BlockedVideosClient) GetX(ctx context.Context, id string) *BlockedVideos { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// Hooks returns the client hooks. +func (c *BlockedVideosClient) Hooks() []Hook { + return c.hooks.BlockedVideos +} + +// Interceptors returns the client interceptors. +func (c *BlockedVideosClient) Interceptors() []Interceptor { + return c.inters.BlockedVideos +} + +func (c *BlockedVideosClient) mutate(ctx context.Context, m *BlockedVideosMutation) (Value, error) { + switch m.Op() { + case OpCreate: + return (&BlockedVideosCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx) + case OpUpdate: + return (&BlockedVideosUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx) + case OpUpdateOne: + return (&BlockedVideosUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx) + case OpDelete, OpDeleteOne: + return (&BlockedVideosDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx) + default: + return nil, fmt.Errorf("ent: unknown BlockedVideos mutation op: %q", m.Op()) + } +} + // ChannelClient is a client for the Channel schema. type ChannelClient struct { config @@ -2151,11 +2294,12 @@ func (c *VodClient) mutate(ctx context.Context, m *VodMutation) (Value, error) { // hooks and interceptors per client, for fast access. type ( hooks struct { - Channel, Chapter, Live, LiveCategory, LiveTitleRegex, MutedSegment, Playback, - Playlist, Queue, TwitchCategory, User, Vod []ent.Hook + BlockedVideos, Channel, Chapter, Live, LiveCategory, LiveTitleRegex, + MutedSegment, Playback, Playlist, Queue, TwitchCategory, User, Vod []ent.Hook } inters struct { - Channel, Chapter, Live, LiveCategory, LiveTitleRegex, MutedSegment, Playback, - Playlist, Queue, TwitchCategory, User, Vod []ent.Interceptor + BlockedVideos, Channel, Chapter, Live, LiveCategory, LiveTitleRegex, + MutedSegment, Playback, Playlist, Queue, TwitchCategory, User, + Vod []ent.Interceptor } ) diff --git a/ent/ent.go b/ent/ent.go index 06ec4793..d1787f47 100644 --- a/ent/ent.go +++ b/ent/ent.go @@ -12,6 +12,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/zibbp/ganymede/ent/blockedvideos" "github.com/zibbp/ganymede/ent/channel" "github.com/zibbp/ganymede/ent/chapter" "github.com/zibbp/ganymede/ent/live" @@ -84,6 +85,7 @@ var ( func checkColumn(table, column string) error { initCheck.Do(func() { columnCheck = sql.NewColumnCheck(map[string]func(string) bool{ + blockedvideos.Table: blockedvideos.ValidColumn, channel.Table: channel.ValidColumn, chapter.Table: chapter.ValidColumn, live.Table: live.ValidColumn, diff --git a/ent/hook/hook.go b/ent/hook/hook.go index 6cfe5540..db107e62 100644 --- a/ent/hook/hook.go +++ b/ent/hook/hook.go @@ -9,6 +9,18 @@ import ( "github.com/zibbp/ganymede/ent" ) +// The BlockedVideosFunc type is an adapter to allow the use of ordinary +// function as BlockedVideos mutator. +type BlockedVideosFunc func(context.Context, *ent.BlockedVideosMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f BlockedVideosFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + if mv, ok := m.(*ent.BlockedVideosMutation); ok { + return f(ctx, mv) + } + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.BlockedVideosMutation", m) +} + // The ChannelFunc type is an adapter to allow the use of ordinary // function as Channel mutator. type ChannelFunc func(context.Context, *ent.ChannelMutation) (ent.Value, error) diff --git a/ent/live.go b/ent/live.go index 3659653f..495a0799 100644 --- a/ent/live.go +++ b/ent/live.go @@ -43,6 +43,8 @@ type Live struct { RenderChat bool `json:"render_chat,omitempty"` // Restrict fetching videos to a certain age. VideoAge int64 `json:"video_age,omitempty"` + // Whether the categories should be applied to livestreams. + ApplyCategoriesToLive bool `json:"apply_categories_to_live,omitempty"` // UpdatedAt holds the value of the "updated_at" field. UpdatedAt time.Time `json:"updated_at,omitempty"` // CreatedAt holds the value of the "created_at" field. @@ -101,7 +103,7 @@ func (*Live) scanValues(columns []string) ([]any, error) { values := make([]any, len(columns)) for i := range columns { switch columns[i] { - case live.FieldWatchLive, live.FieldWatchVod, live.FieldDownloadArchives, live.FieldDownloadHighlights, live.FieldDownloadUploads, live.FieldDownloadSubOnly, live.FieldIsLive, live.FieldArchiveChat, live.FieldRenderChat: + case live.FieldWatchLive, live.FieldWatchVod, live.FieldDownloadArchives, live.FieldDownloadHighlights, live.FieldDownloadUploads, live.FieldDownloadSubOnly, live.FieldIsLive, live.FieldArchiveChat, live.FieldRenderChat, live.FieldApplyCategoriesToLive: values[i] = new(sql.NullBool) case live.FieldVideoAge: values[i] = new(sql.NullInt64) @@ -206,6 +208,12 @@ func (l *Live) assignValues(columns []string, values []any) error { } else if value.Valid { l.VideoAge = value.Int64 } + case live.FieldApplyCategoriesToLive: + if value, ok := values[i].(*sql.NullBool); !ok { + return fmt.Errorf("unexpected type %T for field apply_categories_to_live", values[i]) + } else if value.Valid { + l.ApplyCategoriesToLive = value.Bool + } case live.FieldUpdatedAt: if value, ok := values[i].(*sql.NullTime); !ok { return fmt.Errorf("unexpected type %T for field updated_at", values[i]) @@ -312,6 +320,9 @@ func (l *Live) String() string { builder.WriteString("video_age=") builder.WriteString(fmt.Sprintf("%v", l.VideoAge)) builder.WriteString(", ") + builder.WriteString("apply_categories_to_live=") + builder.WriteString(fmt.Sprintf("%v", l.ApplyCategoriesToLive)) + builder.WriteString(", ") builder.WriteString("updated_at=") builder.WriteString(l.UpdatedAt.Format(time.ANSIC)) builder.WriteString(", ") diff --git a/ent/live/live.go b/ent/live/live.go index a87257b9..a1de2ca0 100644 --- a/ent/live/live.go +++ b/ent/live/live.go @@ -39,6 +39,8 @@ const ( FieldRenderChat = "render_chat" // FieldVideoAge holds the string denoting the video_age field in the database. FieldVideoAge = "video_age" + // FieldApplyCategoriesToLive holds the string denoting the apply_categories_to_live field in the database. + FieldApplyCategoriesToLive = "apply_categories_to_live" // FieldUpdatedAt holds the string denoting the updated_at field in the database. FieldUpdatedAt = "updated_at" // FieldCreatedAt holds the string denoting the created_at field in the database. @@ -89,6 +91,7 @@ var Columns = []string{ FieldLastLive, FieldRenderChat, FieldVideoAge, + FieldApplyCategoriesToLive, FieldUpdatedAt, FieldCreatedAt, } @@ -139,6 +142,8 @@ var ( DefaultRenderChat bool // DefaultVideoAge holds the default value on creation for the "video_age" field. DefaultVideoAge int64 + // DefaultApplyCategoriesToLive holds the default value on creation for the "apply_categories_to_live" field. + DefaultApplyCategoriesToLive bool // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. DefaultUpdatedAt func() time.Time // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. @@ -217,6 +222,11 @@ func ByVideoAge(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldVideoAge, opts...).ToFunc() } +// ByApplyCategoriesToLive orders the results by the apply_categories_to_live field. +func ByApplyCategoriesToLive(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldApplyCategoriesToLive, opts...).ToFunc() +} + // ByUpdatedAt orders the results by the updated_at field. func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc() diff --git a/ent/live/where.go b/ent/live/where.go index 9621c449..887e620a 100644 --- a/ent/live/where.go +++ b/ent/live/where.go @@ -116,6 +116,11 @@ func VideoAge(v int64) predicate.Live { return predicate.Live(sql.FieldEQ(FieldVideoAge, v)) } +// ApplyCategoriesToLive applies equality check predicate on the "apply_categories_to_live" field. It's identical to ApplyCategoriesToLiveEQ. +func ApplyCategoriesToLive(v bool) predicate.Live { + return predicate.Live(sql.FieldEQ(FieldApplyCategoriesToLive, v)) +} + // UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ. func UpdatedAt(v time.Time) predicate.Live { return predicate.Live(sql.FieldEQ(FieldUpdatedAt, v)) @@ -371,6 +376,16 @@ func VideoAgeLTE(v int64) predicate.Live { return predicate.Live(sql.FieldLTE(FieldVideoAge, v)) } +// ApplyCategoriesToLiveEQ applies the EQ predicate on the "apply_categories_to_live" field. +func ApplyCategoriesToLiveEQ(v bool) predicate.Live { + return predicate.Live(sql.FieldEQ(FieldApplyCategoriesToLive, v)) +} + +// ApplyCategoriesToLiveNEQ applies the NEQ predicate on the "apply_categories_to_live" field. +func ApplyCategoriesToLiveNEQ(v bool) predicate.Live { + return predicate.Live(sql.FieldNEQ(FieldApplyCategoriesToLive, v)) +} + // UpdatedAtEQ applies the EQ predicate on the "updated_at" field. func UpdatedAtEQ(v time.Time) predicate.Live { return predicate.Live(sql.FieldEQ(FieldUpdatedAt, v)) diff --git a/ent/live_create.go b/ent/live_create.go index 80800726..d91cc48a 100644 --- a/ent/live_create.go +++ b/ent/live_create.go @@ -195,6 +195,20 @@ func (lc *LiveCreate) SetNillableVideoAge(i *int64) *LiveCreate { return lc } +// SetApplyCategoriesToLive sets the "apply_categories_to_live" field. +func (lc *LiveCreate) SetApplyCategoriesToLive(b bool) *LiveCreate { + lc.mutation.SetApplyCategoriesToLive(b) + return lc +} + +// SetNillableApplyCategoriesToLive sets the "apply_categories_to_live" field if the given value is not nil. +func (lc *LiveCreate) SetNillableApplyCategoriesToLive(b *bool) *LiveCreate { + if b != nil { + lc.SetApplyCategoriesToLive(*b) + } + return lc +} + // SetUpdatedAt sets the "updated_at" field. func (lc *LiveCreate) SetUpdatedAt(t time.Time) *LiveCreate { lc.mutation.SetUpdatedAt(t) @@ -361,6 +375,10 @@ func (lc *LiveCreate) defaults() { v := live.DefaultVideoAge lc.mutation.SetVideoAge(v) } + if _, ok := lc.mutation.ApplyCategoriesToLive(); !ok { + v := live.DefaultApplyCategoriesToLive + lc.mutation.SetApplyCategoriesToLive(v) + } if _, ok := lc.mutation.UpdatedAt(); !ok { v := live.DefaultUpdatedAt() lc.mutation.SetUpdatedAt(v) @@ -410,6 +428,9 @@ func (lc *LiveCreate) check() error { if _, ok := lc.mutation.VideoAge(); !ok { return &ValidationError{Name: "video_age", err: errors.New(`ent: missing required field "Live.video_age"`)} } + if _, ok := lc.mutation.ApplyCategoriesToLive(); !ok { + return &ValidationError{Name: "apply_categories_to_live", err: errors.New(`ent: missing required field "Live.apply_categories_to_live"`)} + } if _, ok := lc.mutation.UpdatedAt(); !ok { return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Live.updated_at"`)} } @@ -503,6 +524,10 @@ func (lc *LiveCreate) createSpec() (*Live, *sqlgraph.CreateSpec) { _spec.SetField(live.FieldVideoAge, field.TypeInt64, value) _node.VideoAge = value } + if value, ok := lc.mutation.ApplyCategoriesToLive(); ok { + _spec.SetField(live.FieldApplyCategoriesToLive, field.TypeBool, value) + _node.ApplyCategoriesToLive = value + } if value, ok := lc.mutation.UpdatedAt(); ok { _spec.SetField(live.FieldUpdatedAt, field.TypeTime, value) _node.UpdatedAt = value @@ -768,6 +793,18 @@ func (u *LiveUpsert) AddVideoAge(v int64) *LiveUpsert { return u } +// SetApplyCategoriesToLive sets the "apply_categories_to_live" field. +func (u *LiveUpsert) SetApplyCategoriesToLive(v bool) *LiveUpsert { + u.Set(live.FieldApplyCategoriesToLive, v) + return u +} + +// UpdateApplyCategoriesToLive sets the "apply_categories_to_live" field to the value that was provided on create. +func (u *LiveUpsert) UpdateApplyCategoriesToLive() *LiveUpsert { + u.SetExcluded(live.FieldApplyCategoriesToLive) + return u +} + // SetUpdatedAt sets the "updated_at" field. func (u *LiveUpsert) SetUpdatedAt(v time.Time) *LiveUpsert { u.Set(live.FieldUpdatedAt, v) @@ -1013,6 +1050,20 @@ func (u *LiveUpsertOne) UpdateVideoAge() *LiveUpsertOne { }) } +// SetApplyCategoriesToLive sets the "apply_categories_to_live" field. +func (u *LiveUpsertOne) SetApplyCategoriesToLive(v bool) *LiveUpsertOne { + return u.Update(func(s *LiveUpsert) { + s.SetApplyCategoriesToLive(v) + }) +} + +// UpdateApplyCategoriesToLive sets the "apply_categories_to_live" field to the value that was provided on create. +func (u *LiveUpsertOne) UpdateApplyCategoriesToLive() *LiveUpsertOne { + return u.Update(func(s *LiveUpsert) { + s.UpdateApplyCategoriesToLive() + }) +} + // SetUpdatedAt sets the "updated_at" field. func (u *LiveUpsertOne) SetUpdatedAt(v time.Time) *LiveUpsertOne { return u.Update(func(s *LiveUpsert) { @@ -1427,6 +1478,20 @@ func (u *LiveUpsertBulk) UpdateVideoAge() *LiveUpsertBulk { }) } +// SetApplyCategoriesToLive sets the "apply_categories_to_live" field. +func (u *LiveUpsertBulk) SetApplyCategoriesToLive(v bool) *LiveUpsertBulk { + return u.Update(func(s *LiveUpsert) { + s.SetApplyCategoriesToLive(v) + }) +} + +// UpdateApplyCategoriesToLive sets the "apply_categories_to_live" field to the value that was provided on create. +func (u *LiveUpsertBulk) UpdateApplyCategoriesToLive() *LiveUpsertBulk { + return u.Update(func(s *LiveUpsert) { + s.UpdateApplyCategoriesToLive() + }) +} + // SetUpdatedAt sets the "updated_at" field. func (u *LiveUpsertBulk) SetUpdatedAt(v time.Time) *LiveUpsertBulk { return u.Update(func(s *LiveUpsert) { diff --git a/ent/live_update.go b/ent/live_update.go index d94ab99e..d58af1cf 100644 --- a/ent/live_update.go +++ b/ent/live_update.go @@ -213,6 +213,20 @@ func (lu *LiveUpdate) AddVideoAge(i int64) *LiveUpdate { return lu } +// SetApplyCategoriesToLive sets the "apply_categories_to_live" field. +func (lu *LiveUpdate) SetApplyCategoriesToLive(b bool) *LiveUpdate { + lu.mutation.SetApplyCategoriesToLive(b) + return lu +} + +// SetNillableApplyCategoriesToLive sets the "apply_categories_to_live" field if the given value is not nil. +func (lu *LiveUpdate) SetNillableApplyCategoriesToLive(b *bool) *LiveUpdate { + if b != nil { + lu.SetApplyCategoriesToLive(*b) + } + return lu +} + // SetUpdatedAt sets the "updated_at" field. func (lu *LiveUpdate) SetUpdatedAt(t time.Time) *LiveUpdate { lu.mutation.SetUpdatedAt(t) @@ -411,6 +425,9 @@ func (lu *LiveUpdate) sqlSave(ctx context.Context) (n int, err error) { if value, ok := lu.mutation.AddedVideoAge(); ok { _spec.AddField(live.FieldVideoAge, field.TypeInt64, value) } + if value, ok := lu.mutation.ApplyCategoriesToLive(); ok { + _spec.SetField(live.FieldApplyCategoriesToLive, field.TypeBool, value) + } if value, ok := lu.mutation.UpdatedAt(); ok { _spec.SetField(live.FieldUpdatedAt, field.TypeTime, value) } @@ -734,6 +751,20 @@ func (luo *LiveUpdateOne) AddVideoAge(i int64) *LiveUpdateOne { return luo } +// SetApplyCategoriesToLive sets the "apply_categories_to_live" field. +func (luo *LiveUpdateOne) SetApplyCategoriesToLive(b bool) *LiveUpdateOne { + luo.mutation.SetApplyCategoriesToLive(b) + return luo +} + +// SetNillableApplyCategoriesToLive sets the "apply_categories_to_live" field if the given value is not nil. +func (luo *LiveUpdateOne) SetNillableApplyCategoriesToLive(b *bool) *LiveUpdateOne { + if b != nil { + luo.SetApplyCategoriesToLive(*b) + } + return luo +} + // SetUpdatedAt sets the "updated_at" field. func (luo *LiveUpdateOne) SetUpdatedAt(t time.Time) *LiveUpdateOne { luo.mutation.SetUpdatedAt(t) @@ -962,6 +993,9 @@ func (luo *LiveUpdateOne) sqlSave(ctx context.Context) (_node *Live, err error) if value, ok := luo.mutation.AddedVideoAge(); ok { _spec.AddField(live.FieldVideoAge, field.TypeInt64, value) } + if value, ok := luo.mutation.ApplyCategoriesToLive(); ok { + _spec.SetField(live.FieldApplyCategoriesToLive, field.TypeBool, value) + } if value, ok := luo.mutation.UpdatedAt(); ok { _spec.SetField(live.FieldUpdatedAt, field.TypeTime, value) } diff --git a/ent/migrate/schema.go b/ent/migrate/schema.go index 39d38fa1..c5a72cc8 100644 --- a/ent/migrate/schema.go +++ b/ent/migrate/schema.go @@ -8,6 +8,17 @@ import ( ) var ( + // BlockedVideosColumns holds the columns for the "blocked_videos" table. + BlockedVideosColumns = []*schema.Column{ + {Name: "id", Type: field.TypeString}, + {Name: "created_at", Type: field.TypeTime}, + } + // BlockedVideosTable holds the schema information for the "blocked_videos" table. + BlockedVideosTable = &schema.Table{ + Name: "blocked_videos", + Columns: BlockedVideosColumns, + PrimaryKey: []*schema.Column{BlockedVideosColumns[0]}, + } // ChannelsColumns holds the columns for the "channels" table. ChannelsColumns = []*schema.Column{ {Name: "id", Type: field.TypeUUID}, @@ -64,6 +75,7 @@ var ( {Name: "last_live", Type: field.TypeTime}, {Name: "render_chat", Type: field.TypeBool, Default: true}, {Name: "video_age", Type: field.TypeInt64, Default: 0}, + {Name: "apply_categories_to_live", Type: field.TypeBool, Default: false}, {Name: "updated_at", Type: field.TypeTime}, {Name: "created_at", Type: field.TypeTime}, {Name: "channel_live", Type: field.TypeUUID}, @@ -76,7 +88,7 @@ var ( ForeignKeys: []*schema.ForeignKey{ { Symbol: "lives_channels_live", - Columns: []*schema.Column{LivesColumns[15]}, + Columns: []*schema.Column{LivesColumns[16]}, RefColumns: []*schema.Column{ChannelsColumns[0]}, OnDelete: schema.NoAction, }, @@ -195,6 +207,7 @@ var ( {Name: "task_chat_render", Type: field.TypeEnum, Nullable: true, Enums: []string{"success", "running", "pending", "failed"}, Default: "pending"}, {Name: "task_chat_move", Type: field.TypeEnum, Nullable: true, Enums: []string{"success", "running", "pending", "failed"}, Default: "pending"}, {Name: "chat_start", Type: field.TypeTime, Nullable: true}, + {Name: "archive_chat", Type: field.TypeBool, Nullable: true, Default: true}, {Name: "render_chat", Type: field.TypeBool, Nullable: true, Default: true}, {Name: "workflow_id", Type: field.TypeString, Nullable: true}, {Name: "workflow_run_id", Type: field.TypeString, Nullable: true}, @@ -210,7 +223,7 @@ var ( ForeignKeys: []*schema.ForeignKey{ { Symbol: "queues_vods_queue", - Columns: []*schema.Column{QueuesColumns[22]}, + Columns: []*schema.Column{QueuesColumns[23]}, RefColumns: []*schema.Column{VodsColumns[0]}, OnDelete: schema.NoAction, }, @@ -253,6 +266,7 @@ var ( VodsColumns = []*schema.Column{ {Name: "id", Type: field.TypeUUID}, {Name: "ext_id", Type: field.TypeString}, + {Name: "ext_stream_id", Type: field.TypeString, Nullable: true}, {Name: "platform", Type: field.TypeEnum, Enums: []string{"twitch", "youtube"}, Default: "twitch"}, {Name: "type", Type: field.TypeEnum, Enums: []string{"archive", "live", "highlight", "upload", "clip"}, Default: "archive"}, {Name: "title", Type: field.TypeString}, @@ -294,7 +308,7 @@ var ( ForeignKeys: []*schema.ForeignKey{ { Symbol: "vods_channels_vods", - Columns: []*schema.Column{VodsColumns[33]}, + Columns: []*schema.Column{VodsColumns[34]}, RefColumns: []*schema.Column{ChannelsColumns[0]}, OnDelete: schema.NoAction, }, @@ -327,6 +341,7 @@ var ( } // Tables holds all the tables in the schema. Tables = []*schema.Table{ + BlockedVideosTable, ChannelsTable, ChaptersTable, LivesTable, diff --git a/ent/mutation.go b/ent/mutation.go index e4ee63d2..e64d25c3 100644 --- a/ent/mutation.go +++ b/ent/mutation.go @@ -12,6 +12,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/google/uuid" + "github.com/zibbp/ganymede/ent/blockedvideos" "github.com/zibbp/ganymede/ent/channel" "github.com/zibbp/ganymede/ent/chapter" "github.com/zibbp/ganymede/ent/live" @@ -37,6 +38,7 @@ const ( OpUpdateOne = ent.OpUpdateOne // Node types. + TypeBlockedVideos = "BlockedVideos" TypeChannel = "Channel" TypeChapter = "Chapter" TypeLive = "Live" @@ -51,6 +53,338 @@ const ( TypeVod = "Vod" ) +// BlockedVideosMutation represents an operation that mutates the BlockedVideos nodes in the graph. +type BlockedVideosMutation struct { + config + op Op + typ string + id *string + created_at *time.Time + clearedFields map[string]struct{} + done bool + oldValue func(context.Context) (*BlockedVideos, error) + predicates []predicate.BlockedVideos +} + +var _ ent.Mutation = (*BlockedVideosMutation)(nil) + +// blockedvideosOption allows management of the mutation configuration using functional options. +type blockedvideosOption func(*BlockedVideosMutation) + +// newBlockedVideosMutation creates new mutation for the BlockedVideos entity. +func newBlockedVideosMutation(c config, op Op, opts ...blockedvideosOption) *BlockedVideosMutation { + m := &BlockedVideosMutation{ + config: c, + op: op, + typ: TypeBlockedVideos, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withBlockedVideosID sets the ID field of the mutation. +func withBlockedVideosID(id string) blockedvideosOption { + return func(m *BlockedVideosMutation) { + var ( + err error + once sync.Once + value *BlockedVideos + ) + m.oldValue = func(ctx context.Context) (*BlockedVideos, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().BlockedVideos.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withBlockedVideos sets the old BlockedVideos of the mutation. +func withBlockedVideos(node *BlockedVideos) blockedvideosOption { + return func(m *BlockedVideosMutation) { + m.oldValue = func(context.Context) (*BlockedVideos, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m BlockedVideosMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m BlockedVideosMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// SetID sets the value of the id field. Note that this +// operation is only accepted on creation of BlockedVideos entities. +func (m *BlockedVideosMutation) SetID(id string) { + m.id = &id +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *BlockedVideosMutation) ID() (id string, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *BlockedVideosMutation) IDs(ctx context.Context) ([]string, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []string{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().BlockedVideos.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetCreatedAt sets the "created_at" field. +func (m *BlockedVideosMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *BlockedVideosMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the BlockedVideos entity. +// If the BlockedVideos object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *BlockedVideosMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *BlockedVideosMutation) ResetCreatedAt() { + m.created_at = nil +} + +// Where appends a list predicates to the BlockedVideosMutation builder. +func (m *BlockedVideosMutation) Where(ps ...predicate.BlockedVideos) { + m.predicates = append(m.predicates, ps...) +} + +// WhereP appends storage-level predicates to the BlockedVideosMutation builder. Using this method, +// users can use type-assertion to append predicates that do not depend on any generated package. +func (m *BlockedVideosMutation) WhereP(ps ...func(*sql.Selector)) { + p := make([]predicate.BlockedVideos, len(ps)) + for i := range ps { + p[i] = ps[i] + } + m.Where(p...) +} + +// Op returns the operation name. +func (m *BlockedVideosMutation) Op() Op { + return m.op +} + +// SetOp allows setting the mutation operation. +func (m *BlockedVideosMutation) SetOp(op Op) { + m.op = op +} + +// Type returns the node type of this mutation (BlockedVideos). +func (m *BlockedVideosMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *BlockedVideosMutation) Fields() []string { + fields := make([]string, 0, 1) + if m.created_at != nil { + fields = append(fields, blockedvideos.FieldCreatedAt) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *BlockedVideosMutation) Field(name string) (ent.Value, bool) { + switch name { + case blockedvideos.FieldCreatedAt: + return m.CreatedAt() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *BlockedVideosMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case blockedvideos.FieldCreatedAt: + return m.OldCreatedAt(ctx) + } + return nil, fmt.Errorf("unknown BlockedVideos field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *BlockedVideosMutation) SetField(name string, value ent.Value) error { + switch name { + case blockedvideos.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + } + return fmt.Errorf("unknown BlockedVideos field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *BlockedVideosMutation) AddedFields() []string { + return nil +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *BlockedVideosMutation) AddedField(name string) (ent.Value, bool) { + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *BlockedVideosMutation) AddField(name string, value ent.Value) error { + switch name { + } + return fmt.Errorf("unknown BlockedVideos numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *BlockedVideosMutation) ClearedFields() []string { + return nil +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *BlockedVideosMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *BlockedVideosMutation) ClearField(name string) error { + return fmt.Errorf("unknown BlockedVideos nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *BlockedVideosMutation) ResetField(name string) error { + switch name { + case blockedvideos.FieldCreatedAt: + m.ResetCreatedAt() + return nil + } + return fmt.Errorf("unknown BlockedVideos field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *BlockedVideosMutation) AddedEdges() []string { + edges := make([]string, 0, 0) + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *BlockedVideosMutation) AddedIDs(name string) []ent.Value { + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *BlockedVideosMutation) RemovedEdges() []string { + edges := make([]string, 0, 0) + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *BlockedVideosMutation) RemovedIDs(name string) []ent.Value { + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *BlockedVideosMutation) ClearedEdges() []string { + edges := make([]string, 0, 0) + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *BlockedVideosMutation) EdgeCleared(name string) bool { + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *BlockedVideosMutation) ClearEdge(name string) error { + return fmt.Errorf("unknown BlockedVideos unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *BlockedVideosMutation) ResetEdge(name string) error { + return fmt.Errorf("unknown BlockedVideos edge %s", name) +} + // ChannelMutation represents an operation that mutates the Channel nodes in the graph. type ChannelMutation struct { config @@ -1729,36 +2063,37 @@ func (m *ChapterMutation) ResetEdge(name string) error { // LiveMutation represents an operation that mutates the Live nodes in the graph. type LiveMutation struct { config - op Op - typ string - id *uuid.UUID - watch_live *bool - watch_vod *bool - download_archives *bool - download_highlights *bool - download_uploads *bool - download_sub_only *bool - is_live *bool - archive_chat *bool - resolution *string - last_live *time.Time - render_chat *bool - video_age *int64 - addvideo_age *int64 - updated_at *time.Time - created_at *time.Time - clearedFields map[string]struct{} - channel *uuid.UUID - clearedchannel bool - categories map[uuid.UUID]struct{} - removedcategories map[uuid.UUID]struct{} - clearedcategories bool - title_regex map[uuid.UUID]struct{} - removedtitle_regex map[uuid.UUID]struct{} - clearedtitle_regex bool - done bool - oldValue func(context.Context) (*Live, error) - predicates []predicate.Live + op Op + typ string + id *uuid.UUID + watch_live *bool + watch_vod *bool + download_archives *bool + download_highlights *bool + download_uploads *bool + download_sub_only *bool + is_live *bool + archive_chat *bool + resolution *string + last_live *time.Time + render_chat *bool + video_age *int64 + addvideo_age *int64 + apply_categories_to_live *bool + updated_at *time.Time + created_at *time.Time + clearedFields map[string]struct{} + channel *uuid.UUID + clearedchannel bool + categories map[uuid.UUID]struct{} + removedcategories map[uuid.UUID]struct{} + clearedcategories bool + title_regex map[uuid.UUID]struct{} + removedtitle_regex map[uuid.UUID]struct{} + clearedtitle_regex bool + done bool + oldValue func(context.Context) (*Live, error) + predicates []predicate.Live } var _ ent.Mutation = (*LiveMutation)(nil) @@ -2330,6 +2665,42 @@ func (m *LiveMutation) ResetVideoAge() { m.addvideo_age = nil } +// SetApplyCategoriesToLive sets the "apply_categories_to_live" field. +func (m *LiveMutation) SetApplyCategoriesToLive(b bool) { + m.apply_categories_to_live = &b +} + +// ApplyCategoriesToLive returns the value of the "apply_categories_to_live" field in the mutation. +func (m *LiveMutation) ApplyCategoriesToLive() (r bool, exists bool) { + v := m.apply_categories_to_live + if v == nil { + return + } + return *v, true +} + +// OldApplyCategoriesToLive returns the old "apply_categories_to_live" field's value of the Live entity. +// If the Live object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *LiveMutation) OldApplyCategoriesToLive(ctx context.Context) (v bool, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldApplyCategoriesToLive is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldApplyCategoriesToLive requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldApplyCategoriesToLive: %w", err) + } + return oldValue.ApplyCategoriesToLive, nil +} + +// ResetApplyCategoriesToLive resets all changes to the "apply_categories_to_live" field. +func (m *LiveMutation) ResetApplyCategoriesToLive() { + m.apply_categories_to_live = nil +} + // SetUpdatedAt sets the "updated_at" field. func (m *LiveMutation) SetUpdatedAt(t time.Time) { m.updated_at = &t @@ -2583,7 +2954,7 @@ func (m *LiveMutation) Type() string { // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *LiveMutation) Fields() []string { - fields := make([]string, 0, 14) + fields := make([]string, 0, 15) if m.watch_live != nil { fields = append(fields, live.FieldWatchLive) } @@ -2620,6 +2991,9 @@ func (m *LiveMutation) Fields() []string { if m.video_age != nil { fields = append(fields, live.FieldVideoAge) } + if m.apply_categories_to_live != nil { + fields = append(fields, live.FieldApplyCategoriesToLive) + } if m.updated_at != nil { fields = append(fields, live.FieldUpdatedAt) } @@ -2658,6 +3032,8 @@ func (m *LiveMutation) Field(name string) (ent.Value, bool) { return m.RenderChat() case live.FieldVideoAge: return m.VideoAge() + case live.FieldApplyCategoriesToLive: + return m.ApplyCategoriesToLive() case live.FieldUpdatedAt: return m.UpdatedAt() case live.FieldCreatedAt: @@ -2695,6 +3071,8 @@ func (m *LiveMutation) OldField(ctx context.Context, name string) (ent.Value, er return m.OldRenderChat(ctx) case live.FieldVideoAge: return m.OldVideoAge(ctx) + case live.FieldApplyCategoriesToLive: + return m.OldApplyCategoriesToLive(ctx) case live.FieldUpdatedAt: return m.OldUpdatedAt(ctx) case live.FieldCreatedAt: @@ -2792,6 +3170,13 @@ func (m *LiveMutation) SetField(name string, value ent.Value) error { } m.SetVideoAge(v) return nil + case live.FieldApplyCategoriesToLive: + v, ok := value.(bool) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetApplyCategoriesToLive(v) + return nil case live.FieldUpdatedAt: v, ok := value.(time.Time) if !ok { @@ -2915,6 +3300,9 @@ func (m *LiveMutation) ResetField(name string) error { case live.FieldVideoAge: m.ResetVideoAge() return nil + case live.FieldApplyCategoriesToLive: + m.ResetApplyCategoriesToLive() + return nil case live.FieldUpdatedAt: m.ResetUpdatedAt() return nil @@ -5845,6 +6233,7 @@ type QueueMutation struct { task_chat_render *utils.TaskStatus task_chat_move *utils.TaskStatus chat_start *time.Time + archive_chat *bool render_chat *bool workflow_id *string workflow_run_id *string @@ -6681,6 +7070,55 @@ func (m *QueueMutation) ResetChatStart() { delete(m.clearedFields, queue.FieldChatStart) } +// SetArchiveChat sets the "archive_chat" field. +func (m *QueueMutation) SetArchiveChat(b bool) { + m.archive_chat = &b +} + +// ArchiveChat returns the value of the "archive_chat" field in the mutation. +func (m *QueueMutation) ArchiveChat() (r bool, exists bool) { + v := m.archive_chat + if v == nil { + return + } + return *v, true +} + +// OldArchiveChat returns the old "archive_chat" field's value of the Queue entity. +// If the Queue object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *QueueMutation) OldArchiveChat(ctx context.Context) (v bool, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldArchiveChat is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldArchiveChat requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldArchiveChat: %w", err) + } + return oldValue.ArchiveChat, nil +} + +// ClearArchiveChat clears the value of the "archive_chat" field. +func (m *QueueMutation) ClearArchiveChat() { + m.archive_chat = nil + m.clearedFields[queue.FieldArchiveChat] = struct{}{} +} + +// ArchiveChatCleared returns if the "archive_chat" field was cleared in this mutation. +func (m *QueueMutation) ArchiveChatCleared() bool { + _, ok := m.clearedFields[queue.FieldArchiveChat] + return ok +} + +// ResetArchiveChat resets all changes to the "archive_chat" field. +func (m *QueueMutation) ResetArchiveChat() { + m.archive_chat = nil + delete(m.clearedFields, queue.FieldArchiveChat) +} + // SetRenderChat sets the "render_chat" field. func (m *QueueMutation) SetRenderChat(b bool) { m.render_chat = &b @@ -6973,7 +7411,7 @@ func (m *QueueMutation) Type() string { // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *QueueMutation) Fields() []string { - fields := make([]string, 0, 21) + fields := make([]string, 0, 22) if m.live_archive != nil { fields = append(fields, queue.FieldLiveArchive) } @@ -7022,6 +7460,9 @@ func (m *QueueMutation) Fields() []string { if m.chat_start != nil { fields = append(fields, queue.FieldChatStart) } + if m.archive_chat != nil { + fields = append(fields, queue.FieldArchiveChat) + } if m.render_chat != nil { fields = append(fields, queue.FieldRenderChat) } @@ -7077,6 +7518,8 @@ func (m *QueueMutation) Field(name string) (ent.Value, bool) { return m.TaskChatMove() case queue.FieldChatStart: return m.ChatStart() + case queue.FieldArchiveChat: + return m.ArchiveChat() case queue.FieldRenderChat: return m.RenderChat() case queue.FieldWorkflowID: @@ -7128,6 +7571,8 @@ func (m *QueueMutation) OldField(ctx context.Context, name string) (ent.Value, e return m.OldTaskChatMove(ctx) case queue.FieldChatStart: return m.OldChatStart(ctx) + case queue.FieldArchiveChat: + return m.OldArchiveChat(ctx) case queue.FieldRenderChat: return m.OldRenderChat(ctx) case queue.FieldWorkflowID: @@ -7259,6 +7704,13 @@ func (m *QueueMutation) SetField(name string, value ent.Value) error { } m.SetChatStart(v) return nil + case queue.FieldArchiveChat: + v, ok := value.(bool) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetArchiveChat(v) + return nil case queue.FieldRenderChat: v, ok := value.(bool) if !ok { @@ -7357,6 +7809,9 @@ func (m *QueueMutation) ClearedFields() []string { if m.FieldCleared(queue.FieldChatStart) { fields = append(fields, queue.FieldChatStart) } + if m.FieldCleared(queue.FieldArchiveChat) { + fields = append(fields, queue.FieldArchiveChat) + } if m.FieldCleared(queue.FieldRenderChat) { fields = append(fields, queue.FieldRenderChat) } @@ -7413,6 +7868,9 @@ func (m *QueueMutation) ClearField(name string) error { case queue.FieldChatStart: m.ClearChatStart() return nil + case queue.FieldArchiveChat: + m.ClearArchiveChat() + return nil case queue.FieldRenderChat: m.ClearRenderChat() return nil @@ -7478,6 +7936,9 @@ func (m *QueueMutation) ResetField(name string) error { case queue.FieldChatStart: m.ResetChatStart() return nil + case queue.FieldArchiveChat: + m.ResetArchiveChat() + return nil case queue.FieldRenderChat: m.ResetRenderChat() return nil @@ -8937,7 +9398,8 @@ type VodMutation struct { typ string id *uuid.UUID ext_id *string - platform *utils.VodPlatform + ext_stream_id *string + platform *utils.VideoPlatform _type *utils.VodType title *string duration *int @@ -9130,13 +9592,62 @@ func (m *VodMutation) ResetExtID() { m.ext_id = nil } +// SetExtStreamID sets the "ext_stream_id" field. +func (m *VodMutation) SetExtStreamID(s string) { + m.ext_stream_id = &s +} + +// ExtStreamID returns the value of the "ext_stream_id" field in the mutation. +func (m *VodMutation) ExtStreamID() (r string, exists bool) { + v := m.ext_stream_id + if v == nil { + return + } + return *v, true +} + +// OldExtStreamID returns the old "ext_stream_id" field's value of the Vod entity. +// If the Vod object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *VodMutation) OldExtStreamID(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldExtStreamID is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldExtStreamID requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldExtStreamID: %w", err) + } + return oldValue.ExtStreamID, nil +} + +// ClearExtStreamID clears the value of the "ext_stream_id" field. +func (m *VodMutation) ClearExtStreamID() { + m.ext_stream_id = nil + m.clearedFields[vod.FieldExtStreamID] = struct{}{} +} + +// ExtStreamIDCleared returns if the "ext_stream_id" field was cleared in this mutation. +func (m *VodMutation) ExtStreamIDCleared() bool { + _, ok := m.clearedFields[vod.FieldExtStreamID] + return ok +} + +// ResetExtStreamID resets all changes to the "ext_stream_id" field. +func (m *VodMutation) ResetExtStreamID() { + m.ext_stream_id = nil + delete(m.clearedFields, vod.FieldExtStreamID) +} + // SetPlatform sets the "platform" field. -func (m *VodMutation) SetPlatform(up utils.VodPlatform) { +func (m *VodMutation) SetPlatform(up utils.VideoPlatform) { m.platform = &up } // Platform returns the value of the "platform" field in the mutation. -func (m *VodMutation) Platform() (r utils.VodPlatform, exists bool) { +func (m *VodMutation) Platform() (r utils.VideoPlatform, exists bool) { v := m.platform if v == nil { return @@ -9147,7 +9658,7 @@ func (m *VodMutation) Platform() (r utils.VodPlatform, exists bool) { // OldPlatform returns the old "platform" field's value of the Vod entity. // If the Vod object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *VodMutation) OldPlatform(ctx context.Context) (v utils.VodPlatform, err error) { +func (m *VodMutation) OldPlatform(ctx context.Context) (v utils.VideoPlatform, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldPlatform is only allowed on UpdateOne operations") } @@ -10814,10 +11325,13 @@ func (m *VodMutation) Type() string { // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *VodMutation) Fields() []string { - fields := make([]string, 0, 32) + fields := make([]string, 0, 33) if m.ext_id != nil { fields = append(fields, vod.FieldExtID) } + if m.ext_stream_id != nil { + fields = append(fields, vod.FieldExtStreamID) + } if m.platform != nil { fields = append(fields, vod.FieldPlatform) } @@ -10921,6 +11435,8 @@ func (m *VodMutation) Field(name string) (ent.Value, bool) { switch name { case vod.FieldExtID: return m.ExtID() + case vod.FieldExtStreamID: + return m.ExtStreamID() case vod.FieldPlatform: return m.Platform() case vod.FieldType: @@ -10994,6 +11510,8 @@ func (m *VodMutation) OldField(ctx context.Context, name string) (ent.Value, err switch name { case vod.FieldExtID: return m.OldExtID(ctx) + case vod.FieldExtStreamID: + return m.OldExtStreamID(ctx) case vod.FieldPlatform: return m.OldPlatform(ctx) case vod.FieldType: @@ -11072,8 +11590,15 @@ func (m *VodMutation) SetField(name string, value ent.Value) error { } m.SetExtID(v) return nil + case vod.FieldExtStreamID: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetExtStreamID(v) + return nil case vod.FieldPlatform: - v, ok := value.(utils.VodPlatform) + v, ok := value.(utils.VideoPlatform) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } @@ -11358,6 +11883,9 @@ func (m *VodMutation) AddField(name string, value ent.Value) error { // mutation. func (m *VodMutation) ClearedFields() []string { var fields []string + if m.FieldCleared(vod.FieldExtStreamID) { + fields = append(fields, vod.FieldExtStreamID) + } if m.FieldCleared(vod.FieldResolution) { fields = append(fields, vod.FieldResolution) } @@ -11426,6 +11954,9 @@ func (m *VodMutation) FieldCleared(name string) bool { // error if the field is not defined in the schema. func (m *VodMutation) ClearField(name string) error { switch name { + case vod.FieldExtStreamID: + m.ClearExtStreamID() + return nil case vod.FieldResolution: m.ClearResolution() return nil @@ -11491,6 +12022,9 @@ func (m *VodMutation) ResetField(name string) error { case vod.FieldExtID: m.ResetExtID() return nil + case vod.FieldExtStreamID: + m.ResetExtStreamID() + return nil case vod.FieldPlatform: m.ResetPlatform() return nil diff --git a/ent/predicate/predicate.go b/ent/predicate/predicate.go index f19131ce..88af71e5 100644 --- a/ent/predicate/predicate.go +++ b/ent/predicate/predicate.go @@ -6,6 +6,9 @@ import ( "entgo.io/ent/dialect/sql" ) +// BlockedVideos is the predicate function for blockedvideos builders. +type BlockedVideos func(*sql.Selector) + // Channel is the predicate function for channel builders. type Channel func(*sql.Selector) diff --git a/ent/queue.go b/ent/queue.go index baa8dfd7..276d0753 100644 --- a/ent/queue.go +++ b/ent/queue.go @@ -52,6 +52,8 @@ type Queue struct { TaskChatMove utils.TaskStatus `json:"task_chat_move,omitempty"` // ChatStart holds the value of the "chat_start" field. ChatStart time.Time `json:"chat_start,omitempty"` + // ArchiveChat holds the value of the "archive_chat" field. + ArchiveChat bool `json:"archive_chat,omitempty"` // RenderChat holds the value of the "render_chat" field. RenderChat bool `json:"render_chat,omitempty"` // WorkflowID holds the value of the "workflow_id" field. @@ -94,7 +96,7 @@ func (*Queue) scanValues(columns []string) ([]any, error) { values := make([]any, len(columns)) for i := range columns { switch columns[i] { - case queue.FieldLiveArchive, queue.FieldOnHold, queue.FieldVideoProcessing, queue.FieldChatProcessing, queue.FieldProcessing, queue.FieldRenderChat: + case queue.FieldLiveArchive, queue.FieldOnHold, queue.FieldVideoProcessing, queue.FieldChatProcessing, queue.FieldProcessing, queue.FieldArchiveChat, queue.FieldRenderChat: values[i] = new(sql.NullBool) case queue.FieldTaskVodCreateFolder, queue.FieldTaskVodDownloadThumbnail, queue.FieldTaskVodSaveInfo, queue.FieldTaskVideoDownload, queue.FieldTaskVideoConvert, queue.FieldTaskVideoMove, queue.FieldTaskChatDownload, queue.FieldTaskChatConvert, queue.FieldTaskChatRender, queue.FieldTaskChatMove, queue.FieldWorkflowID, queue.FieldWorkflowRunID: values[i] = new(sql.NullString) @@ -221,6 +223,12 @@ func (q *Queue) assignValues(columns []string, values []any) error { } else if value.Valid { q.ChatStart = value.Time } + case queue.FieldArchiveChat: + if value, ok := values[i].(*sql.NullBool); !ok { + return fmt.Errorf("unexpected type %T for field archive_chat", values[i]) + } else if value.Valid { + q.ArchiveChat = value.Bool + } case queue.FieldRenderChat: if value, ok := values[i].(*sql.NullBool); !ok { return fmt.Errorf("unexpected type %T for field render_chat", values[i]) @@ -347,6 +355,9 @@ func (q *Queue) String() string { builder.WriteString("chat_start=") builder.WriteString(q.ChatStart.Format(time.ANSIC)) builder.WriteString(", ") + builder.WriteString("archive_chat=") + builder.WriteString(fmt.Sprintf("%v", q.ArchiveChat)) + builder.WriteString(", ") builder.WriteString("render_chat=") builder.WriteString(fmt.Sprintf("%v", q.RenderChat)) builder.WriteString(", ") diff --git a/ent/queue/queue.go b/ent/queue/queue.go index 81f8615d..8ecb59fd 100644 --- a/ent/queue/queue.go +++ b/ent/queue/queue.go @@ -49,6 +49,8 @@ const ( FieldTaskChatMove = "task_chat_move" // FieldChatStart holds the string denoting the chat_start field in the database. FieldChatStart = "chat_start" + // FieldArchiveChat holds the string denoting the archive_chat field in the database. + FieldArchiveChat = "archive_chat" // FieldRenderChat holds the string denoting the render_chat field in the database. FieldRenderChat = "render_chat" // FieldWorkflowID holds the string denoting the workflow_id field in the database. @@ -91,6 +93,7 @@ var Columns = []string{ FieldTaskChatRender, FieldTaskChatMove, FieldChatStart, + FieldArchiveChat, FieldRenderChat, FieldWorkflowID, FieldWorkflowRunID, @@ -130,6 +133,8 @@ var ( DefaultChatProcessing bool // DefaultProcessing holds the default value on creation for the "processing" field. DefaultProcessing bool + // DefaultArchiveChat holds the default value on creation for the "archive_chat" field. + DefaultArchiveChat bool // DefaultRenderChat holds the default value on creation for the "render_chat" field. DefaultRenderChat bool // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. @@ -350,6 +355,11 @@ func ByChatStart(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldChatStart, opts...).ToFunc() } +// ByArchiveChat orders the results by the archive_chat field. +func ByArchiveChat(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldArchiveChat, opts...).ToFunc() +} + // ByRenderChat orders the results by the render_chat field. func ByRenderChat(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldRenderChat, opts...).ToFunc() diff --git a/ent/queue/where.go b/ent/queue/where.go index b1f9e67d..28c5bf41 100644 --- a/ent/queue/where.go +++ b/ent/queue/where.go @@ -87,6 +87,11 @@ func ChatStart(v time.Time) predicate.Queue { return predicate.Queue(sql.FieldEQ(FieldChatStart, v)) } +// ArchiveChat applies equality check predicate on the "archive_chat" field. It's identical to ArchiveChatEQ. +func ArchiveChat(v bool) predicate.Queue { + return predicate.Queue(sql.FieldEQ(FieldArchiveChat, v)) +} + // RenderChat applies equality check predicate on the "render_chat" field. It's identical to RenderChatEQ. func RenderChat(v bool) predicate.Queue { return predicate.Queue(sql.FieldEQ(FieldRenderChat, v)) @@ -612,6 +617,26 @@ func ChatStartNotNil() predicate.Queue { return predicate.Queue(sql.FieldNotNull(FieldChatStart)) } +// ArchiveChatEQ applies the EQ predicate on the "archive_chat" field. +func ArchiveChatEQ(v bool) predicate.Queue { + return predicate.Queue(sql.FieldEQ(FieldArchiveChat, v)) +} + +// ArchiveChatNEQ applies the NEQ predicate on the "archive_chat" field. +func ArchiveChatNEQ(v bool) predicate.Queue { + return predicate.Queue(sql.FieldNEQ(FieldArchiveChat, v)) +} + +// ArchiveChatIsNil applies the IsNil predicate on the "archive_chat" field. +func ArchiveChatIsNil() predicate.Queue { + return predicate.Queue(sql.FieldIsNull(FieldArchiveChat)) +} + +// ArchiveChatNotNil applies the NotNil predicate on the "archive_chat" field. +func ArchiveChatNotNil() predicate.Queue { + return predicate.Queue(sql.FieldNotNull(FieldArchiveChat)) +} + // RenderChatEQ applies the EQ predicate on the "render_chat" field. func RenderChatEQ(v bool) predicate.Queue { return predicate.Queue(sql.FieldEQ(FieldRenderChat, v)) diff --git a/ent/queue_create.go b/ent/queue_create.go index bb638ca6..7e24e27e 100644 --- a/ent/queue_create.go +++ b/ent/queue_create.go @@ -250,6 +250,20 @@ func (qc *QueueCreate) SetNillableChatStart(t *time.Time) *QueueCreate { return qc } +// SetArchiveChat sets the "archive_chat" field. +func (qc *QueueCreate) SetArchiveChat(b bool) *QueueCreate { + qc.mutation.SetArchiveChat(b) + return qc +} + +// SetNillableArchiveChat sets the "archive_chat" field if the given value is not nil. +func (qc *QueueCreate) SetNillableArchiveChat(b *bool) *QueueCreate { + if b != nil { + qc.SetArchiveChat(*b) + } + return qc +} + // SetRenderChat sets the "render_chat" field. func (qc *QueueCreate) SetRenderChat(b bool) *QueueCreate { qc.mutation.SetRenderChat(b) @@ -440,6 +454,10 @@ func (qc *QueueCreate) defaults() { v := queue.DefaultTaskChatMove qc.mutation.SetTaskChatMove(v) } + if _, ok := qc.mutation.ArchiveChat(); !ok { + v := queue.DefaultArchiveChat + qc.mutation.SetArchiveChat(v) + } if _, ok := qc.mutation.RenderChat(); !ok { v := queue.DefaultRenderChat qc.mutation.SetRenderChat(v) @@ -634,6 +652,10 @@ func (qc *QueueCreate) createSpec() (*Queue, *sqlgraph.CreateSpec) { _spec.SetField(queue.FieldChatStart, field.TypeTime, value) _node.ChatStart = value } + if value, ok := qc.mutation.ArchiveChat(); ok { + _spec.SetField(queue.FieldArchiveChat, field.TypeBool, value) + _node.ArchiveChat = value + } if value, ok := qc.mutation.RenderChat(); ok { _spec.SetField(queue.FieldRenderChat, field.TypeBool, value) _node.RenderChat = value @@ -981,6 +1003,24 @@ func (u *QueueUpsert) ClearChatStart() *QueueUpsert { return u } +// SetArchiveChat sets the "archive_chat" field. +func (u *QueueUpsert) SetArchiveChat(v bool) *QueueUpsert { + u.Set(queue.FieldArchiveChat, v) + return u +} + +// UpdateArchiveChat sets the "archive_chat" field to the value that was provided on create. +func (u *QueueUpsert) UpdateArchiveChat() *QueueUpsert { + u.SetExcluded(queue.FieldArchiveChat) + return u +} + +// ClearArchiveChat clears the value of the "archive_chat" field. +func (u *QueueUpsert) ClearArchiveChat() *QueueUpsert { + u.SetNull(queue.FieldArchiveChat) + return u +} + // SetRenderChat sets the "render_chat" field. func (u *QueueUpsert) SetRenderChat(v bool) *QueueUpsert { u.Set(queue.FieldRenderChat, v) @@ -1399,6 +1439,27 @@ func (u *QueueUpsertOne) ClearChatStart() *QueueUpsertOne { }) } +// SetArchiveChat sets the "archive_chat" field. +func (u *QueueUpsertOne) SetArchiveChat(v bool) *QueueUpsertOne { + return u.Update(func(s *QueueUpsert) { + s.SetArchiveChat(v) + }) +} + +// UpdateArchiveChat sets the "archive_chat" field to the value that was provided on create. +func (u *QueueUpsertOne) UpdateArchiveChat() *QueueUpsertOne { + return u.Update(func(s *QueueUpsert) { + s.UpdateArchiveChat() + }) +} + +// ClearArchiveChat clears the value of the "archive_chat" field. +func (u *QueueUpsertOne) ClearArchiveChat() *QueueUpsertOne { + return u.Update(func(s *QueueUpsert) { + s.ClearArchiveChat() + }) +} + // SetRenderChat sets the "render_chat" field. func (u *QueueUpsertOne) SetRenderChat(v bool) *QueueUpsertOne { return u.Update(func(s *QueueUpsert) { @@ -1995,6 +2056,27 @@ func (u *QueueUpsertBulk) ClearChatStart() *QueueUpsertBulk { }) } +// SetArchiveChat sets the "archive_chat" field. +func (u *QueueUpsertBulk) SetArchiveChat(v bool) *QueueUpsertBulk { + return u.Update(func(s *QueueUpsert) { + s.SetArchiveChat(v) + }) +} + +// UpdateArchiveChat sets the "archive_chat" field to the value that was provided on create. +func (u *QueueUpsertBulk) UpdateArchiveChat() *QueueUpsertBulk { + return u.Update(func(s *QueueUpsert) { + s.UpdateArchiveChat() + }) +} + +// ClearArchiveChat clears the value of the "archive_chat" field. +func (u *QueueUpsertBulk) ClearArchiveChat() *QueueUpsertBulk { + return u.Update(func(s *QueueUpsert) { + s.ClearArchiveChat() + }) +} + // SetRenderChat sets the "render_chat" field. func (u *QueueUpsertBulk) SetRenderChat(v bool) *QueueUpsertBulk { return u.Update(func(s *QueueUpsert) { diff --git a/ent/queue_update.go b/ent/queue_update.go index b3b2e86d..b5f43118 100644 --- a/ent/queue_update.go +++ b/ent/queue_update.go @@ -321,6 +321,26 @@ func (qu *QueueUpdate) ClearChatStart() *QueueUpdate { return qu } +// SetArchiveChat sets the "archive_chat" field. +func (qu *QueueUpdate) SetArchiveChat(b bool) *QueueUpdate { + qu.mutation.SetArchiveChat(b) + return qu +} + +// SetNillableArchiveChat sets the "archive_chat" field if the given value is not nil. +func (qu *QueueUpdate) SetNillableArchiveChat(b *bool) *QueueUpdate { + if b != nil { + qu.SetArchiveChat(*b) + } + return qu +} + +// ClearArchiveChat clears the value of the "archive_chat" field. +func (qu *QueueUpdate) ClearArchiveChat() *QueueUpdate { + qu.mutation.ClearArchiveChat() + return qu +} + // SetRenderChat sets the "render_chat" field. func (qu *QueueUpdate) SetRenderChat(b bool) *QueueUpdate { qu.mutation.SetRenderChat(b) @@ -596,6 +616,12 @@ func (qu *QueueUpdate) sqlSave(ctx context.Context) (n int, err error) { if qu.mutation.ChatStartCleared() { _spec.ClearField(queue.FieldChatStart, field.TypeTime) } + if value, ok := qu.mutation.ArchiveChat(); ok { + _spec.SetField(queue.FieldArchiveChat, field.TypeBool, value) + } + if qu.mutation.ArchiveChatCleared() { + _spec.ClearField(queue.FieldArchiveChat, field.TypeBool) + } if value, ok := qu.mutation.RenderChat(); ok { _spec.SetField(queue.FieldRenderChat, field.TypeBool, value) } @@ -956,6 +982,26 @@ func (quo *QueueUpdateOne) ClearChatStart() *QueueUpdateOne { return quo } +// SetArchiveChat sets the "archive_chat" field. +func (quo *QueueUpdateOne) SetArchiveChat(b bool) *QueueUpdateOne { + quo.mutation.SetArchiveChat(b) + return quo +} + +// SetNillableArchiveChat sets the "archive_chat" field if the given value is not nil. +func (quo *QueueUpdateOne) SetNillableArchiveChat(b *bool) *QueueUpdateOne { + if b != nil { + quo.SetArchiveChat(*b) + } + return quo +} + +// ClearArchiveChat clears the value of the "archive_chat" field. +func (quo *QueueUpdateOne) ClearArchiveChat() *QueueUpdateOne { + quo.mutation.ClearArchiveChat() + return quo +} + // SetRenderChat sets the "render_chat" field. func (quo *QueueUpdateOne) SetRenderChat(b bool) *QueueUpdateOne { quo.mutation.SetRenderChat(b) @@ -1261,6 +1307,12 @@ func (quo *QueueUpdateOne) sqlSave(ctx context.Context) (_node *Queue, err error if quo.mutation.ChatStartCleared() { _spec.ClearField(queue.FieldChatStart, field.TypeTime) } + if value, ok := quo.mutation.ArchiveChat(); ok { + _spec.SetField(queue.FieldArchiveChat, field.TypeBool, value) + } + if quo.mutation.ArchiveChatCleared() { + _spec.ClearField(queue.FieldArchiveChat, field.TypeBool) + } if value, ok := quo.mutation.RenderChat(); ok { _spec.SetField(queue.FieldRenderChat, field.TypeBool, value) } diff --git a/ent/runtime.go b/ent/runtime.go index 983d6ef5..58e2371a 100644 --- a/ent/runtime.go +++ b/ent/runtime.go @@ -6,6 +6,7 @@ import ( "time" "github.com/google/uuid" + "github.com/zibbp/ganymede/ent/blockedvideos" "github.com/zibbp/ganymede/ent/channel" "github.com/zibbp/ganymede/ent/chapter" "github.com/zibbp/ganymede/ent/live" @@ -25,6 +26,12 @@ import ( // (default values, validators, hooks and policies) and stitches it // to their package variables. func init() { + blockedvideosFields := schema.BlockedVideos{}.Fields() + _ = blockedvideosFields + // blockedvideosDescCreatedAt is the schema descriptor for created_at field. + blockedvideosDescCreatedAt := blockedvideosFields[1].Descriptor() + // blockedvideos.DefaultCreatedAt holds the default value on creation for the created_at field. + blockedvideos.DefaultCreatedAt = blockedvideosDescCreatedAt.Default.(func() time.Time) channelFields := schema.Channel{}.Fields() _ = channelFields // channelDescRetention is the schema descriptor for retention field. @@ -101,14 +108,18 @@ func init() { liveDescVideoAge := liveFields[12].Descriptor() // live.DefaultVideoAge holds the default value on creation for the video_age field. live.DefaultVideoAge = liveDescVideoAge.Default.(int64) + // liveDescApplyCategoriesToLive is the schema descriptor for apply_categories_to_live field. + liveDescApplyCategoriesToLive := liveFields[13].Descriptor() + // live.DefaultApplyCategoriesToLive holds the default value on creation for the apply_categories_to_live field. + live.DefaultApplyCategoriesToLive = liveDescApplyCategoriesToLive.Default.(bool) // liveDescUpdatedAt is the schema descriptor for updated_at field. - liveDescUpdatedAt := liveFields[13].Descriptor() + liveDescUpdatedAt := liveFields[14].Descriptor() // live.DefaultUpdatedAt holds the default value on creation for the updated_at field. live.DefaultUpdatedAt = liveDescUpdatedAt.Default.(func() time.Time) // live.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. live.UpdateDefaultUpdatedAt = liveDescUpdatedAt.UpdateDefault.(func() time.Time) // liveDescCreatedAt is the schema descriptor for created_at field. - liveDescCreatedAt := liveFields[14].Descriptor() + liveDescCreatedAt := liveFields[15].Descriptor() // live.DefaultCreatedAt holds the default value on creation for the created_at field. live.DefaultCreatedAt = liveDescCreatedAt.Default.(func() time.Time) // liveDescID is the schema descriptor for id field. @@ -199,18 +210,22 @@ func init() { queueDescProcessing := queueFields[5].Descriptor() // queue.DefaultProcessing holds the default value on creation for the processing field. queue.DefaultProcessing = queueDescProcessing.Default.(bool) + // queueDescArchiveChat is the schema descriptor for archive_chat field. + queueDescArchiveChat := queueFields[17].Descriptor() + // queue.DefaultArchiveChat holds the default value on creation for the archive_chat field. + queue.DefaultArchiveChat = queueDescArchiveChat.Default.(bool) // queueDescRenderChat is the schema descriptor for render_chat field. - queueDescRenderChat := queueFields[17].Descriptor() + queueDescRenderChat := queueFields[18].Descriptor() // queue.DefaultRenderChat holds the default value on creation for the render_chat field. queue.DefaultRenderChat = queueDescRenderChat.Default.(bool) // queueDescUpdatedAt is the schema descriptor for updated_at field. - queueDescUpdatedAt := queueFields[20].Descriptor() + queueDescUpdatedAt := queueFields[21].Descriptor() // queue.DefaultUpdatedAt holds the default value on creation for the updated_at field. queue.DefaultUpdatedAt = queueDescUpdatedAt.Default.(func() time.Time) // queue.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. queue.UpdateDefaultUpdatedAt = queueDescUpdatedAt.UpdateDefault.(func() time.Time) // queueDescCreatedAt is the schema descriptor for created_at field. - queueDescCreatedAt := queueFields[21].Descriptor() + queueDescCreatedAt := queueFields[22].Descriptor() // queue.DefaultCreatedAt holds the default value on creation for the created_at field. queue.DefaultCreatedAt = queueDescCreatedAt.Default.(func() time.Time) // queueDescID is the schema descriptor for id field. @@ -252,37 +267,37 @@ func init() { vodFields := schema.Vod{}.Fields() _ = vodFields // vodDescDuration is the schema descriptor for duration field. - vodDescDuration := vodFields[5].Descriptor() + vodDescDuration := vodFields[6].Descriptor() // vod.DefaultDuration holds the default value on creation for the duration field. vod.DefaultDuration = vodDescDuration.Default.(int) // vodDescViews is the schema descriptor for views field. - vodDescViews := vodFields[6].Descriptor() + vodDescViews := vodFields[7].Descriptor() // vod.DefaultViews holds the default value on creation for the views field. vod.DefaultViews = vodDescViews.Default.(int) // vodDescProcessing is the schema descriptor for processing field. - vodDescProcessing := vodFields[8].Descriptor() + vodDescProcessing := vodFields[9].Descriptor() // vod.DefaultProcessing holds the default value on creation for the processing field. vod.DefaultProcessing = vodDescProcessing.Default.(bool) // vodDescLocked is the schema descriptor for locked field. - vodDescLocked := vodFields[28].Descriptor() + vodDescLocked := vodFields[29].Descriptor() // vod.DefaultLocked holds the default value on creation for the locked field. vod.DefaultLocked = vodDescLocked.Default.(bool) // vodDescLocalViews is the schema descriptor for local_views field. - vodDescLocalViews := vodFields[29].Descriptor() + vodDescLocalViews := vodFields[30].Descriptor() // vod.DefaultLocalViews holds the default value on creation for the local_views field. vod.DefaultLocalViews = vodDescLocalViews.Default.(int) // vodDescStreamedAt is the schema descriptor for streamed_at field. - vodDescStreamedAt := vodFields[30].Descriptor() + vodDescStreamedAt := vodFields[31].Descriptor() // vod.DefaultStreamedAt holds the default value on creation for the streamed_at field. vod.DefaultStreamedAt = vodDescStreamedAt.Default.(func() time.Time) // vodDescUpdatedAt is the schema descriptor for updated_at field. - vodDescUpdatedAt := vodFields[31].Descriptor() + vodDescUpdatedAt := vodFields[32].Descriptor() // vod.DefaultUpdatedAt holds the default value on creation for the updated_at field. vod.DefaultUpdatedAt = vodDescUpdatedAt.Default.(func() time.Time) // vod.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. vod.UpdateDefaultUpdatedAt = vodDescUpdatedAt.UpdateDefault.(func() time.Time) // vodDescCreatedAt is the schema descriptor for created_at field. - vodDescCreatedAt := vodFields[32].Descriptor() + vodDescCreatedAt := vodFields[33].Descriptor() // vod.DefaultCreatedAt holds the default value on creation for the created_at field. vod.DefaultCreatedAt = vodDescCreatedAt.Default.(func() time.Time) // vodDescID is the schema descriptor for id field. diff --git a/ent/schema/blockedvideos.go b/ent/schema/blockedvideos.go new file mode 100644 index 00000000..799fa19b --- /dev/null +++ b/ent/schema/blockedvideos.go @@ -0,0 +1,26 @@ +package schema + +import ( + "time" + + "entgo.io/ent" + "entgo.io/ent/schema/field" +) + +// BlockedVideos holds the schema definition for the BlockedVideos entity. +type BlockedVideos struct { + ent.Schema +} + +// Fields of the BlockedVideos. +func (BlockedVideos) Fields() []ent.Field { + return []ent.Field{ + field.String("id").Comment("The ID of the blocked vod."), + field.Time("created_at").Default(time.Now).Immutable(), + } +} + +// Edges of the BlockedVideos. +func (BlockedVideos) Edges() []ent.Edge { + return nil +} diff --git a/ent/schema/live.go b/ent/schema/live.go index 183f8792..6c17412c 100644 --- a/ent/schema/live.go +++ b/ent/schema/live.go @@ -34,6 +34,7 @@ func (Live) Fields() []ent.Field { field.Time("last_live").Default(time.Now).Comment("The time the channel last went live."), field.Bool("render_chat").Default(true).Comment("Whether the chat should be rendered."), field.Int64("video_age").Default(0).Comment("Restrict fetching videos to a certain age."), + field.Bool("apply_categories_to_live").Default(false).Comment("Whether the categories should be applied to livestreams."), field.Time("updated_at").Default(time.Now).UpdateDefault(time.Now), field.Time("created_at").Default(time.Now).Immutable(), } diff --git a/ent/schema/queue.go b/ent/schema/queue.go index fc91c4a0..e748a774 100644 --- a/ent/schema/queue.go +++ b/ent/schema/queue.go @@ -35,6 +35,7 @@ func (Queue) Fields() []ent.Field { field.Enum("task_chat_render").GoType(utils.TaskStatus("")).Default(string(utils.Pending)).Optional(), field.Enum("task_chat_move").GoType(utils.TaskStatus("")).Default(string(utils.Pending)).Optional(), field.Time("chat_start").Optional(), + field.Bool("archive_chat").Optional().Default(true), field.Bool("render_chat").Optional().Default(true), field.String("workflow_id").Optional(), field.String("workflow_run_id").Optional(), diff --git a/ent/schema/vod.go b/ent/schema/vod.go index ea61704b..1bda4172 100644 --- a/ent/schema/vod.go +++ b/ent/schema/vod.go @@ -19,8 +19,9 @@ type Vod struct { func (Vod) Fields() []ent.Field { return []ent.Field{ field.UUID("id", uuid.UUID{}).Default(uuid.New), - field.String("ext_id"), - field.Enum("platform").GoType(utils.VodPlatform("")).Default(string(utils.PlatformTwitch)).Comment("The platform the VOD is from, takes an enum."), + field.String("ext_id").Comment("The ID of the video on the external platform."), + field.String("ext_stream_id").Optional().Comment("The ID of the stream on the external platform, if applicable."), + field.Enum("platform").GoType(utils.VideoPlatform("")).Default(string(utils.PlatformTwitch)).Comment("The platform the VOD is from, takes an enum."), field.Enum("type").GoType(utils.VodType("")).Default(string(utils.Archive)).Comment("The type of VOD, takes an enum."), field.String("title"), field.Int("duration").Default(1), diff --git a/ent/tx.go b/ent/tx.go index 16664bd8..92e4abb1 100644 --- a/ent/tx.go +++ b/ent/tx.go @@ -12,6 +12,8 @@ import ( // Tx is a transactional client that is created by calling Client.Tx(). type Tx struct { config + // BlockedVideos is the client for interacting with the BlockedVideos builders. + BlockedVideos *BlockedVideosClient // Channel is the client for interacting with the Channel builders. Channel *ChannelClient // Chapter is the client for interacting with the Chapter builders. @@ -167,6 +169,7 @@ func (tx *Tx) Client() *Client { } func (tx *Tx) init() { + tx.BlockedVideos = NewBlockedVideosClient(tx.config) tx.Channel = NewChannelClient(tx.config) tx.Chapter = NewChapterClient(tx.config) tx.Live = NewLiveClient(tx.config) @@ -188,7 +191,7 @@ func (tx *Tx) init() { // of them in order to commit or rollback the transaction. // // If a closed transaction is embedded in one of the generated entities, and the entity -// applies a query, for example: Channel.QueryXXX(), the query will be executed +// applies a query, for example: BlockedVideos.QueryXXX(), the query will be executed // through the driver which created this transaction. // // Note that txDriver is not goroutine safe. diff --git a/ent/vod.go b/ent/vod.go index 50df91df..374c3315 100644 --- a/ent/vod.go +++ b/ent/vod.go @@ -21,10 +21,12 @@ type Vod struct { config `json:"-"` // ID of the ent. ID uuid.UUID `json:"id,omitempty"` - // ExtID holds the value of the "ext_id" field. + // The ID of the video on the external platform. ExtID string `json:"ext_id,omitempty"` + // The ID of the stream on the external platform, if applicable. + ExtStreamID string `json:"ext_stream_id,omitempty"` // The platform the VOD is from, takes an enum. - Platform utils.VodPlatform `json:"platform,omitempty"` + Platform utils.VideoPlatform `json:"platform,omitempty"` // The type of VOD, takes an enum. Type utils.VodType `json:"type,omitempty"` // Title holds the value of the "title" field. @@ -167,7 +169,7 @@ func (*Vod) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullBool) case vod.FieldDuration, vod.FieldViews, vod.FieldLocalViews: values[i] = new(sql.NullInt64) - case vod.FieldExtID, vod.FieldPlatform, vod.FieldType, vod.FieldTitle, vod.FieldResolution, vod.FieldThumbnailPath, vod.FieldWebThumbnailPath, vod.FieldVideoPath, vod.FieldVideoHlsPath, vod.FieldChatPath, vod.FieldLiveChatPath, vod.FieldLiveChatConvertPath, vod.FieldChatVideoPath, vod.FieldInfoPath, vod.FieldCaptionPath, vod.FieldFolderName, vod.FieldFileName, vod.FieldTmpVideoDownloadPath, vod.FieldTmpVideoConvertPath, vod.FieldTmpChatDownloadPath, vod.FieldTmpLiveChatDownloadPath, vod.FieldTmpLiveChatConvertPath, vod.FieldTmpChatRenderPath, vod.FieldTmpVideoHlsPath: + case vod.FieldExtID, vod.FieldExtStreamID, vod.FieldPlatform, vod.FieldType, vod.FieldTitle, vod.FieldResolution, vod.FieldThumbnailPath, vod.FieldWebThumbnailPath, vod.FieldVideoPath, vod.FieldVideoHlsPath, vod.FieldChatPath, vod.FieldLiveChatPath, vod.FieldLiveChatConvertPath, vod.FieldChatVideoPath, vod.FieldInfoPath, vod.FieldCaptionPath, vod.FieldFolderName, vod.FieldFileName, vod.FieldTmpVideoDownloadPath, vod.FieldTmpVideoConvertPath, vod.FieldTmpChatDownloadPath, vod.FieldTmpLiveChatDownloadPath, vod.FieldTmpLiveChatConvertPath, vod.FieldTmpChatRenderPath, vod.FieldTmpVideoHlsPath: values[i] = new(sql.NullString) case vod.FieldStreamedAt, vod.FieldUpdatedAt, vod.FieldCreatedAt: values[i] = new(sql.NullTime) @@ -202,11 +204,17 @@ func (v *Vod) assignValues(columns []string, values []any) error { } else if value.Valid { v.ExtID = value.String } + case vod.FieldExtStreamID: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field ext_stream_id", values[i]) + } else if value.Valid { + v.ExtStreamID = value.String + } case vod.FieldPlatform: if value, ok := values[i].(*sql.NullString); !ok { return fmt.Errorf("unexpected type %T for field platform", values[i]) } else if value.Valid { - v.Platform = utils.VodPlatform(value.String) + v.Platform = utils.VideoPlatform(value.String) } case vod.FieldType: if value, ok := values[i].(*sql.NullString); !ok { @@ -459,6 +467,9 @@ func (v *Vod) String() string { builder.WriteString("ext_id=") builder.WriteString(v.ExtID) builder.WriteString(", ") + builder.WriteString("ext_stream_id=") + builder.WriteString(v.ExtStreamID) + builder.WriteString(", ") builder.WriteString("platform=") builder.WriteString(fmt.Sprintf("%v", v.Platform)) builder.WriteString(", ") diff --git a/ent/vod/vod.go b/ent/vod/vod.go index a1bea5cf..b5cf18c4 100644 --- a/ent/vod/vod.go +++ b/ent/vod/vod.go @@ -19,6 +19,8 @@ const ( FieldID = "id" // FieldExtID holds the string denoting the ext_id field in the database. FieldExtID = "ext_id" + // FieldExtStreamID holds the string denoting the ext_stream_id field in the database. + FieldExtStreamID = "ext_stream_id" // FieldPlatform holds the string denoting the platform field in the database. FieldPlatform = "platform" // FieldType holds the string denoting the type field in the database. @@ -132,6 +134,7 @@ const ( var Columns = []string{ FieldID, FieldExtID, + FieldExtStreamID, FieldPlatform, FieldType, FieldTitle, @@ -215,10 +218,10 @@ var ( DefaultID func() uuid.UUID ) -const DefaultPlatform utils.VodPlatform = "twitch" +const DefaultPlatform utils.VideoPlatform = "twitch" // PlatformValidator is a validator for the "platform" field enum values. It is called by the builders before save. -func PlatformValidator(pl utils.VodPlatform) error { +func PlatformValidator(pl utils.VideoPlatform) error { switch pl { case "twitch", "youtube": return nil @@ -252,6 +255,11 @@ func ByExtID(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldExtID, opts...).ToFunc() } +// ByExtStreamID orders the results by the ext_stream_id field. +func ByExtStreamID(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldExtStreamID, opts...).ToFunc() +} + // ByPlatform orders the results by the platform field. func ByPlatform(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldPlatform, opts...).ToFunc() diff --git a/ent/vod/where.go b/ent/vod/where.go index 6e0dadbf..c30dd718 100644 --- a/ent/vod/where.go +++ b/ent/vod/where.go @@ -62,6 +62,11 @@ func ExtID(v string) predicate.Vod { return predicate.Vod(sql.FieldEQ(FieldExtID, v)) } +// ExtStreamID applies equality check predicate on the "ext_stream_id" field. It's identical to ExtStreamIDEQ. +func ExtStreamID(v string) predicate.Vod { + return predicate.Vod(sql.FieldEQ(FieldExtStreamID, v)) +} + // Title applies equality check predicate on the "title" field. It's identical to TitleEQ. func Title(v string) predicate.Vod { return predicate.Vod(sql.FieldEQ(FieldTitle, v)) @@ -272,20 +277,95 @@ func ExtIDContainsFold(v string) predicate.Vod { return predicate.Vod(sql.FieldContainsFold(FieldExtID, v)) } +// ExtStreamIDEQ applies the EQ predicate on the "ext_stream_id" field. +func ExtStreamIDEQ(v string) predicate.Vod { + return predicate.Vod(sql.FieldEQ(FieldExtStreamID, v)) +} + +// ExtStreamIDNEQ applies the NEQ predicate on the "ext_stream_id" field. +func ExtStreamIDNEQ(v string) predicate.Vod { + return predicate.Vod(sql.FieldNEQ(FieldExtStreamID, v)) +} + +// ExtStreamIDIn applies the In predicate on the "ext_stream_id" field. +func ExtStreamIDIn(vs ...string) predicate.Vod { + return predicate.Vod(sql.FieldIn(FieldExtStreamID, vs...)) +} + +// ExtStreamIDNotIn applies the NotIn predicate on the "ext_stream_id" field. +func ExtStreamIDNotIn(vs ...string) predicate.Vod { + return predicate.Vod(sql.FieldNotIn(FieldExtStreamID, vs...)) +} + +// ExtStreamIDGT applies the GT predicate on the "ext_stream_id" field. +func ExtStreamIDGT(v string) predicate.Vod { + return predicate.Vod(sql.FieldGT(FieldExtStreamID, v)) +} + +// ExtStreamIDGTE applies the GTE predicate on the "ext_stream_id" field. +func ExtStreamIDGTE(v string) predicate.Vod { + return predicate.Vod(sql.FieldGTE(FieldExtStreamID, v)) +} + +// ExtStreamIDLT applies the LT predicate on the "ext_stream_id" field. +func ExtStreamIDLT(v string) predicate.Vod { + return predicate.Vod(sql.FieldLT(FieldExtStreamID, v)) +} + +// ExtStreamIDLTE applies the LTE predicate on the "ext_stream_id" field. +func ExtStreamIDLTE(v string) predicate.Vod { + return predicate.Vod(sql.FieldLTE(FieldExtStreamID, v)) +} + +// ExtStreamIDContains applies the Contains predicate on the "ext_stream_id" field. +func ExtStreamIDContains(v string) predicate.Vod { + return predicate.Vod(sql.FieldContains(FieldExtStreamID, v)) +} + +// ExtStreamIDHasPrefix applies the HasPrefix predicate on the "ext_stream_id" field. +func ExtStreamIDHasPrefix(v string) predicate.Vod { + return predicate.Vod(sql.FieldHasPrefix(FieldExtStreamID, v)) +} + +// ExtStreamIDHasSuffix applies the HasSuffix predicate on the "ext_stream_id" field. +func ExtStreamIDHasSuffix(v string) predicate.Vod { + return predicate.Vod(sql.FieldHasSuffix(FieldExtStreamID, v)) +} + +// ExtStreamIDIsNil applies the IsNil predicate on the "ext_stream_id" field. +func ExtStreamIDIsNil() predicate.Vod { + return predicate.Vod(sql.FieldIsNull(FieldExtStreamID)) +} + +// ExtStreamIDNotNil applies the NotNil predicate on the "ext_stream_id" field. +func ExtStreamIDNotNil() predicate.Vod { + return predicate.Vod(sql.FieldNotNull(FieldExtStreamID)) +} + +// ExtStreamIDEqualFold applies the EqualFold predicate on the "ext_stream_id" field. +func ExtStreamIDEqualFold(v string) predicate.Vod { + return predicate.Vod(sql.FieldEqualFold(FieldExtStreamID, v)) +} + +// ExtStreamIDContainsFold applies the ContainsFold predicate on the "ext_stream_id" field. +func ExtStreamIDContainsFold(v string) predicate.Vod { + return predicate.Vod(sql.FieldContainsFold(FieldExtStreamID, v)) +} + // PlatformEQ applies the EQ predicate on the "platform" field. -func PlatformEQ(v utils.VodPlatform) predicate.Vod { +func PlatformEQ(v utils.VideoPlatform) predicate.Vod { vc := v return predicate.Vod(sql.FieldEQ(FieldPlatform, vc)) } // PlatformNEQ applies the NEQ predicate on the "platform" field. -func PlatformNEQ(v utils.VodPlatform) predicate.Vod { +func PlatformNEQ(v utils.VideoPlatform) predicate.Vod { vc := v return predicate.Vod(sql.FieldNEQ(FieldPlatform, vc)) } // PlatformIn applies the In predicate on the "platform" field. -func PlatformIn(vs ...utils.VodPlatform) predicate.Vod { +func PlatformIn(vs ...utils.VideoPlatform) predicate.Vod { v := make([]any, len(vs)) for i := range v { v[i] = vs[i] @@ -294,7 +374,7 @@ func PlatformIn(vs ...utils.VodPlatform) predicate.Vod { } // PlatformNotIn applies the NotIn predicate on the "platform" field. -func PlatformNotIn(vs ...utils.VodPlatform) predicate.Vod { +func PlatformNotIn(vs ...utils.VideoPlatform) predicate.Vod { v := make([]any, len(vs)) for i := range v { v[i] = vs[i] diff --git a/ent/vod_create.go b/ent/vod_create.go index 476bc71e..69337b29 100644 --- a/ent/vod_create.go +++ b/ent/vod_create.go @@ -36,14 +36,28 @@ func (vc *VodCreate) SetExtID(s string) *VodCreate { return vc } +// SetExtStreamID sets the "ext_stream_id" field. +func (vc *VodCreate) SetExtStreamID(s string) *VodCreate { + vc.mutation.SetExtStreamID(s) + return vc +} + +// SetNillableExtStreamID sets the "ext_stream_id" field if the given value is not nil. +func (vc *VodCreate) SetNillableExtStreamID(s *string) *VodCreate { + if s != nil { + vc.SetExtStreamID(*s) + } + return vc +} + // SetPlatform sets the "platform" field. -func (vc *VodCreate) SetPlatform(up utils.VodPlatform) *VodCreate { +func (vc *VodCreate) SetPlatform(up utils.VideoPlatform) *VodCreate { vc.mutation.SetPlatform(up) return vc } // SetNillablePlatform sets the "platform" field if the given value is not nil. -func (vc *VodCreate) SetNillablePlatform(up *utils.VodPlatform) *VodCreate { +func (vc *VodCreate) SetNillablePlatform(up *utils.VideoPlatform) *VodCreate { if up != nil { vc.SetPlatform(*up) } @@ -713,6 +727,10 @@ func (vc *VodCreate) createSpec() (*Vod, *sqlgraph.CreateSpec) { _spec.SetField(vod.FieldExtID, field.TypeString, value) _node.ExtID = value } + if value, ok := vc.mutation.ExtStreamID(); ok { + _spec.SetField(vod.FieldExtStreamID, field.TypeString, value) + _node.ExtStreamID = value + } if value, ok := vc.mutation.Platform(); ok { _spec.SetField(vod.FieldPlatform, field.TypeEnum, value) _node.Platform = value @@ -982,8 +1000,26 @@ func (u *VodUpsert) UpdateExtID() *VodUpsert { return u } +// SetExtStreamID sets the "ext_stream_id" field. +func (u *VodUpsert) SetExtStreamID(v string) *VodUpsert { + u.Set(vod.FieldExtStreamID, v) + return u +} + +// UpdateExtStreamID sets the "ext_stream_id" field to the value that was provided on create. +func (u *VodUpsert) UpdateExtStreamID() *VodUpsert { + u.SetExcluded(vod.FieldExtStreamID) + return u +} + +// ClearExtStreamID clears the value of the "ext_stream_id" field. +func (u *VodUpsert) ClearExtStreamID() *VodUpsert { + u.SetNull(vod.FieldExtStreamID) + return u +} + // SetPlatform sets the "platform" field. -func (u *VodUpsert) SetPlatform(v utils.VodPlatform) *VodUpsert { +func (u *VodUpsert) SetPlatform(v utils.VideoPlatform) *VodUpsert { u.Set(vod.FieldPlatform, v) return u } @@ -1533,8 +1569,29 @@ func (u *VodUpsertOne) UpdateExtID() *VodUpsertOne { }) } +// SetExtStreamID sets the "ext_stream_id" field. +func (u *VodUpsertOne) SetExtStreamID(v string) *VodUpsertOne { + return u.Update(func(s *VodUpsert) { + s.SetExtStreamID(v) + }) +} + +// UpdateExtStreamID sets the "ext_stream_id" field to the value that was provided on create. +func (u *VodUpsertOne) UpdateExtStreamID() *VodUpsertOne { + return u.Update(func(s *VodUpsert) { + s.UpdateExtStreamID() + }) +} + +// ClearExtStreamID clears the value of the "ext_stream_id" field. +func (u *VodUpsertOne) ClearExtStreamID() *VodUpsertOne { + return u.Update(func(s *VodUpsert) { + s.ClearExtStreamID() + }) +} + // SetPlatform sets the "platform" field. -func (u *VodUpsertOne) SetPlatform(v utils.VodPlatform) *VodUpsertOne { +func (u *VodUpsertOne) SetPlatform(v utils.VideoPlatform) *VodUpsertOne { return u.Update(func(s *VodUpsert) { s.SetPlatform(v) }) @@ -2332,8 +2389,29 @@ func (u *VodUpsertBulk) UpdateExtID() *VodUpsertBulk { }) } +// SetExtStreamID sets the "ext_stream_id" field. +func (u *VodUpsertBulk) SetExtStreamID(v string) *VodUpsertBulk { + return u.Update(func(s *VodUpsert) { + s.SetExtStreamID(v) + }) +} + +// UpdateExtStreamID sets the "ext_stream_id" field to the value that was provided on create. +func (u *VodUpsertBulk) UpdateExtStreamID() *VodUpsertBulk { + return u.Update(func(s *VodUpsert) { + s.UpdateExtStreamID() + }) +} + +// ClearExtStreamID clears the value of the "ext_stream_id" field. +func (u *VodUpsertBulk) ClearExtStreamID() *VodUpsertBulk { + return u.Update(func(s *VodUpsert) { + s.ClearExtStreamID() + }) +} + // SetPlatform sets the "platform" field. -func (u *VodUpsertBulk) SetPlatform(v utils.VodPlatform) *VodUpsertBulk { +func (u *VodUpsertBulk) SetPlatform(v utils.VideoPlatform) *VodUpsertBulk { return u.Update(func(s *VodUpsert) { s.SetPlatform(v) }) diff --git a/ent/vod_update.go b/ent/vod_update.go index 51f8565d..7bf5c4c2 100644 --- a/ent/vod_update.go +++ b/ent/vod_update.go @@ -49,14 +49,34 @@ func (vu *VodUpdate) SetNillableExtID(s *string) *VodUpdate { return vu } +// SetExtStreamID sets the "ext_stream_id" field. +func (vu *VodUpdate) SetExtStreamID(s string) *VodUpdate { + vu.mutation.SetExtStreamID(s) + return vu +} + +// SetNillableExtStreamID sets the "ext_stream_id" field if the given value is not nil. +func (vu *VodUpdate) SetNillableExtStreamID(s *string) *VodUpdate { + if s != nil { + vu.SetExtStreamID(*s) + } + return vu +} + +// ClearExtStreamID clears the value of the "ext_stream_id" field. +func (vu *VodUpdate) ClearExtStreamID() *VodUpdate { + vu.mutation.ClearExtStreamID() + return vu +} + // SetPlatform sets the "platform" field. -func (vu *VodUpdate) SetPlatform(up utils.VodPlatform) *VodUpdate { +func (vu *VodUpdate) SetPlatform(up utils.VideoPlatform) *VodUpdate { vu.mutation.SetPlatform(up) return vu } // SetNillablePlatform sets the "platform" field if the given value is not nil. -func (vu *VodUpdate) SetNillablePlatform(up *utils.VodPlatform) *VodUpdate { +func (vu *VodUpdate) SetNillablePlatform(up *utils.VideoPlatform) *VodUpdate { if up != nil { vu.SetPlatform(*up) } @@ -814,6 +834,12 @@ func (vu *VodUpdate) sqlSave(ctx context.Context) (n int, err error) { if value, ok := vu.mutation.ExtID(); ok { _spec.SetField(vod.FieldExtID, field.TypeString, value) } + if value, ok := vu.mutation.ExtStreamID(); ok { + _spec.SetField(vod.FieldExtStreamID, field.TypeString, value) + } + if vu.mutation.ExtStreamIDCleared() { + _spec.ClearField(vod.FieldExtStreamID, field.TypeString) + } if value, ok := vu.mutation.Platform(); ok { _spec.SetField(vod.FieldPlatform, field.TypeEnum, value) } @@ -1194,14 +1220,34 @@ func (vuo *VodUpdateOne) SetNillableExtID(s *string) *VodUpdateOne { return vuo } +// SetExtStreamID sets the "ext_stream_id" field. +func (vuo *VodUpdateOne) SetExtStreamID(s string) *VodUpdateOne { + vuo.mutation.SetExtStreamID(s) + return vuo +} + +// SetNillableExtStreamID sets the "ext_stream_id" field if the given value is not nil. +func (vuo *VodUpdateOne) SetNillableExtStreamID(s *string) *VodUpdateOne { + if s != nil { + vuo.SetExtStreamID(*s) + } + return vuo +} + +// ClearExtStreamID clears the value of the "ext_stream_id" field. +func (vuo *VodUpdateOne) ClearExtStreamID() *VodUpdateOne { + vuo.mutation.ClearExtStreamID() + return vuo +} + // SetPlatform sets the "platform" field. -func (vuo *VodUpdateOne) SetPlatform(up utils.VodPlatform) *VodUpdateOne { +func (vuo *VodUpdateOne) SetPlatform(up utils.VideoPlatform) *VodUpdateOne { vuo.mutation.SetPlatform(up) return vuo } // SetNillablePlatform sets the "platform" field if the given value is not nil. -func (vuo *VodUpdateOne) SetNillablePlatform(up *utils.VodPlatform) *VodUpdateOne { +func (vuo *VodUpdateOne) SetNillablePlatform(up *utils.VideoPlatform) *VodUpdateOne { if up != nil { vuo.SetPlatform(*up) } @@ -1989,6 +2035,12 @@ func (vuo *VodUpdateOne) sqlSave(ctx context.Context) (_node *Vod, err error) { if value, ok := vuo.mutation.ExtID(); ok { _spec.SetField(vod.FieldExtID, field.TypeString, value) } + if value, ok := vuo.mutation.ExtStreamID(); ok { + _spec.SetField(vod.FieldExtStreamID, field.TypeString, value) + } + if vuo.mutation.ExtStreamIDCleared() { + _spec.ClearField(vod.FieldExtStreamID, field.TypeString) + } if value, ok := vuo.mutation.Platform(); ok { _spec.SetField(vod.FieldPlatform, field.TypeEnum, value) } diff --git a/entrypoint.sh b/entrypoint.sh index 0aff3f64..10bf40e2 100644 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -12,29 +12,36 @@ User gid: $(id -g abc) ------------------------------------- " +# define default directories +LOGS_DIR=${LOGS_DIR:-"/data/logs"} +CONFIG_DIR=${CONFIG_DIR:-"/data/config"} +VIDEOS_DIR=${VIDEOS_DIR:-"/data/videos"} +TEMP_DIR=${TEMP_DIR:-"/data/temp"} + # set permissions -chown -R abc:abc /logs -chown -R abc:abc /data -chown -R abc:abc /tmp -chown abc:abc /vods +chown -R abc:abc ${LOGS_DIR} +chown -R abc:abc ${CONFIG_DIR} +chown -R abc:abc ${TEMP_DIR} +chown abc:abc ${VIDEOS_DIR} # fonts mkdir -p /var/cache/fontconfig chown abc:abc /var/cache/fontconfig -su-exec abc fc-cache -f +gosu abc fc-cache -f # dotnet envs export DOTNET_BUNDLE_EXTRACT_BASE_DIR=/tmp export FONTCONFIG_CACHE=/var/cache/fontconfig -su-exec abc /opt/app/ganymede-api & +# start api and worker as user abc +gosu abc /opt/app/ganymede-api & api_pid=$! # delay 5 seconds to wait for api to start sleep 5 -su-exec abc /opt/app/ganymede-worker & +gosu abc /opt/app/ganymede-worker & worker_pid=$! # wait -wait $api_pid $worker_pid +wait $api_pid $worker_pid \ No newline at end of file diff --git a/go.mod b/go.mod index f21bf682..fb1709ab 100644 --- a/go.mod +++ b/go.mod @@ -1,109 +1,137 @@ module github.com/zibbp/ganymede -go 1.22.1 +go 1.22.5 require ( entgo.io/ent v0.13.1 github.com/MicahParks/keyfunc v1.9.0 - github.com/coreos/go-oidc/v3 v3.10.0 + github.com/coreos/go-oidc/v3 v3.11.0 github.com/go-co-op/gocron v1.37.0 - github.com/go-playground/validator/v10 v10.20.0 + github.com/go-jose/go-jose/v4 v4.0.3 + github.com/go-playground/validator/v10 v10.22.0 github.com/golang-jwt/jwt/v4 v4.5.0 github.com/google/uuid v1.6.0 - github.com/kelseyhightower/envconfig v1.4.0 github.com/labstack/echo/v4 v4.12.0 github.com/lib/pq v1.10.9 github.com/patrickmn/go-cache v2.1.0+incompatible - github.com/prometheus/client_golang v1.19.0 - github.com/rs/zerolog v1.32.0 - github.com/spf13/viper v1.18.2 + github.com/prometheus/client_golang v1.19.1 + github.com/riverqueue/river v0.11.2 + github.com/riverqueue/river/rivertype v0.11.2 + github.com/rs/zerolog v1.33.0 + github.com/sethvargo/go-envconfig v1.1.0 github.com/swaggo/swag v1.16.3 - go.temporal.io/api v1.34.0 - go.temporal.io/sdk v1.26.1 - golang.org/x/crypto v0.23.0 - golang.org/x/oauth2 v0.20.0 - gopkg.in/square/go-jose.v2 v2.6.0 + golang.org/x/crypto v0.25.0 + golang.org/x/oauth2 v0.21.0 ) require ( + dario.cat/mergo v1.0.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect github.com/KyleBanks/depth v1.2.1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/Microsoft/hcsshim v0.11.5 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect - github.com/facebookgo/clock v0.0.0-20150410010913-600d898af40a // indirect - github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/cenkalti/backoff/v4 v4.2.1 // indirect + github.com/containerd/containerd v1.7.18 // indirect + github.com/containerd/errdefs v0.1.0 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/cpuguy83/dockercfg v0.3.1 // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/docker v27.0.3+incompatible // indirect + github.com/docker/go-connections v0.5.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/gabriel-vasile/mimetype v1.4.4 // indirect github.com/ghodss/yaml v1.0.0 // indirect - github.com/go-jose/go-jose/v4 v4.0.1 // indirect + github.com/go-logr/logr v1.4.1 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-openapi/jsonpointer v0.21.0 // indirect github.com/go-openapi/jsonreference v0.21.0 // indirect github.com/go-openapi/spec v0.21.0 // indirect github.com/go-openapi/swag v0.23.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/mock v1.6.0 // indirect - github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect - github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/puddle/v2 v2.2.1 // indirect github.com/josharian/intern v1.0.0 // indirect + github.com/klauspost/compress v1.17.4 // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/magiconair/properties v1.8.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect - github.com/pborman/uuid v1.2.1 // indirect - github.com/robfig/cron v1.2.0 // indirect - github.com/sagikazarmark/locafero v0.4.0 // indirect - github.com/sagikazarmark/slog-shim v0.1.0 // indirect - github.com/sourcegraph/conc v0.3.0 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect + github.com/moby/sys/sequential v0.5.0 // indirect + github.com/moby/sys/user v0.1.0 // indirect + github.com/moby/term v0.5.0 // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.0 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect + github.com/riverqueue/river/riverdriver v0.11.2 // indirect + github.com/riverqueue/river/rivershared v0.11.2 // indirect + github.com/shirou/gopsutil/v3 v3.23.12 // indirect + github.com/shoenig/go-m1cpu v0.1.6 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect github.com/stretchr/objx v0.5.2 // indirect - github.com/swaggo/files/v2 v2.0.0 // indirect + github.com/swaggo/files/v2 v2.0.1 // indirect + github.com/testcontainers/testcontainers-go v0.32.0 // indirect + github.com/testcontainers/testcontainers-go/modules/postgres v0.32.0 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect + github.com/yusufpapurcu/wmi v1.2.3 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect + go.opentelemetry.io/otel v1.24.0 // indirect + go.opentelemetry.io/otel/metric v1.24.0 // indirect + go.opentelemetry.io/otel/trace v1.24.0 // indirect go.uber.org/atomic v1.11.0 // indirect - go.uber.org/multierr v1.11.0 // indirect - golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f // indirect - golang.org/x/sync v0.7.0 // indirect - golang.org/x/tools v0.20.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240521202816-d264139d666e // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect - google.golang.org/grpc v1.64.0 // indirect + go.uber.org/goleak v1.3.0 // indirect + golang.org/x/sync v0.8.0 // indirect + golang.org/x/tools v0.23.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20231016165738-49dd2c1f3d0b // indirect + google.golang.org/grpc v1.59.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect ) require ( - ariga.io/atlas v0.21.1 // indirect + ariga.io/atlas v0.25.0 // indirect github.com/agext/levenshtein v1.2.3 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/go-openapi/inflect v0.21.0 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/golang-jwt/jwt v3.2.2+incompatible // indirect + github.com/golang-jwt/jwt/v5 v5.2.1 github.com/google/go-cmp v0.6.0 // indirect - github.com/hashicorp/hcl v1.0.0 // indirect - github.com/hashicorp/hcl/v2 v2.20.1 // indirect + github.com/hashicorp/hcl/v2 v2.21.0 // indirect + github.com/jackc/pgx/v5 v5.6.0 github.com/labstack/gommon v0.4.2 // indirect github.com/leodido/go-urn v1.4.0 // indirect - github.com/magiconair/properties v1.8.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/mattn/go-sqlite3 v1.14.22 + github.com/mattn/go-sqlite3 v1.14.22 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect - github.com/mitchellh/mapstructure v1.5.0 // indirect - github.com/pelletier/go-toml/v2 v2.2.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_model v0.6.1 // indirect - github.com/prometheus/common v0.53.0 // indirect - github.com/prometheus/procfs v0.14.0 // indirect - github.com/robfig/cron/v3 v3.0.1 // indirect - github.com/spf13/afero v1.11.0 // indirect - github.com/spf13/cast v1.6.0 // indirect - github.com/spf13/pflag v1.0.5 // indirect + github.com/prometheus/common v0.55.0 // indirect + github.com/prometheus/procfs v0.15.1 // indirect + github.com/riverqueue/river/riverdriver/riverpgxv5 v0.11.2 + github.com/robfig/cron/v3 v3.0.1 github.com/stretchr/testify v1.9.0 - github.com/subosito/gotenv v1.6.0 // indirect github.com/swaggo/echo-swagger v1.4.1 github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect - github.com/zclconf/go-cty v1.14.4 // indirect - golang.org/x/mod v0.17.0 // indirect - golang.org/x/net v0.25.0 // indirect - golang.org/x/sys v0.20.0 // indirect - golang.org/x/text v0.15.0 // indirect + github.com/zclconf/go-cty v1.15.0 // indirect + golang.org/x/mod v0.20.0 // indirect + golang.org/x/net v0.27.0 // indirect + golang.org/x/sys v0.22.0 // indirect + golang.org/x/text v0.16.0 // indirect golang.org/x/time v0.5.0 // indirect - google.golang.org/protobuf v1.34.1 - gopkg.in/ini.v1 v1.67.0 // indirect + google.golang.org/protobuf v1.34.2 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 24da0251..9f9af4d8 100644 --- a/go.sum +++ b/go.sum @@ -1,55 +1,72 @@ -ariga.io/atlas v0.21.1 h1:Eg9XYhKTH3UHoqP7tKMWFV+Z5JnpVOJCgO3MHrUtKmk= -ariga.io/atlas v0.21.1/go.mod h1:VPlcXdd4w2KqKnH54yEZcry79UAhpaWaxEsmn5JRNoE= -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +ariga.io/atlas v0.25.0 h1:5bGawA2jx4krrhehfUBGSoqb1olC7qEIndzDj3NFSJw= +ariga.io/atlas v0.25.0/go.mod h1:KPLc7Zj+nzoXfWshrcY1RwlOh94dsATQEy4UPrF2RkM= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= entgo.io/ent v0.13.1 h1:uD8QwN1h6SNphdCCzmkMN3feSUzNnVvV/WIkHKMbzOE= entgo.io/ent v0.13.1/go.mod h1:qCEmo+biw3ccBn9OyL4ZK5dfpwg++l1Gxwac5B1206A= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= github.com/MicahParks/keyfunc v1.9.0 h1:lhKd5xrFHLNOWrDc4Tyb/Q1AJ4LCzQ48GVJyVIID3+o= github.com/MicahParks/keyfunc v1.9.0/go.mod h1:IdnCilugA0O/99dW+/MkvlyrsX8+L8+x95xuVNtM5jw= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Microsoft/hcsshim v0.11.5 h1:haEcLNpj9Ka1gd3B3tAEs9CpE0c+1IhoL59w/exYU38= +github.com/Microsoft/hcsshim v0.11.5/go.mod h1:MV8xMfmECjl5HdO7U/3/hFVnkmSBjAjmA09d4bExKcU= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= -github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/coreos/go-oidc/v3 v3.10.0 h1:tDnXHnLyiTVyT/2zLDGj09pFPkhND8Gl8lnTRhoEaJU= -github.com/coreos/go-oidc/v3 v3.10.0/go.mod h1:5j11xcw0D3+SGxn6Z/WFADsgcWVMyNAlSQupk0KK3ac= +github.com/containerd/containerd v1.7.18 h1:jqjZTQNfXGoEaZdW1WwPU0RqSn1Bm2Ay/KJPUuO8nao= +github.com/containerd/containerd v1.7.18/go.mod h1:IYEk9/IO6wAPUz2bCMVUbsfXjzw5UNP5fLz4PsUygQ4= +github.com/containerd/errdefs v0.1.0 h1:m0wCRBiu1WJT/Fr+iOoQHMQS/eP5myQ8lCv4Dz5ZURM= +github.com/containerd/errdefs v0.1.0/go.mod h1:YgWiiHtLmSeBrvpw+UfPijzbLaB77mEG1WwJTDETIV0= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/coreos/go-oidc/v3 v3.11.0 h1:Ia3MxdwpSw702YW0xgfmP1GVCMA9aEFWu12XUZ3/OtI= +github.com/coreos/go-oidc/v3 v3.11.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/facebookgo/clock v0.0.0-20150410010913-600d898af40a h1:yDWHCSQ40h88yih2JAcL6Ls/kVkSE8GFACTGVnMPruw= -github.com/facebookgo/clock v0.0.0-20150410010913-600d898af40a/go.mod h1:7Ga40egUymuWXxAe151lTNnCv97MddSOVsjpPPkityA= -github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= -github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= -github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= -github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= -github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= -github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v27.0.3+incompatible h1:aBGI9TeQ4MPlhquTQKq9XbK79rKFVwXNUAYz9aXyEBE= +github.com/docker/docker v27.0.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/gabriel-vasile/mimetype v1.4.4 h1:QjV6pZ7/XZ7ryI2KuyeEDE8wnh7fHP9YnQy+R0LnH8I= +github.com/gabriel-vasile/mimetype v1.4.4/go.mod h1:JwLei5XPtWdGiMFB5Pjle1oEeoSeEuJfJE+TtfvdB/s= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-co-op/gocron v1.37.0 h1:ZYDJGtQ4OMhTLKOKMIch+/CY70Brbb1dGdooLEhh7b0= github.com/go-co-op/gocron v1.37.0/go.mod h1:3L/n6BkO7ABj+TrfSVXLRzsP26zmikL4ISkLQ0O8iNY= -github.com/go-jose/go-jose/v4 v4.0.1 h1:QVEPDE3OluqXBQZDcnNvQrInro2h0e4eqNbnZSWqS6U= -github.com/go-jose/go-jose/v4 v4.0.1/go.mod h1:WVf9LFMHh/QVrmqrOfqun0C45tMe3RoiKJMPvgWwLfY= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-jose/go-jose/v4 v4.0.3 h1:o8aphO8Hv6RPmH+GfzVuyf7YXSBibp+8YyHdOoDESGo= +github.com/go-jose/go-jose/v4 v4.0.3/go.mod h1:NKb5HO1EZccyMpiZNbdUw/14tiXNyUJh188dfnMCAfc= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-openapi/inflect v0.21.0 h1:FoBjBTQEcbg2cJUWX6uwL9OyIW8eqc9k4KhN4lfbeYk= github.com/go-openapi/inflect v0.21.0/go.mod h1:INezMuUu7SJQc2AyR3WO0DqqYUJSj8Kb4hBd7WtjlAw= github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= @@ -66,9 +83,8 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8= -github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-playground/validator/v10 v10.22.0 h1:k6HsTZ0sTnROkhS//R0O+55JgM8C4Bx7ia+JlgcnOao= +github.com/go-playground/validator/v10 v10.22.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= @@ -79,37 +95,35 @@ github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzq github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= -github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= +github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= -github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1YCS1PXdKYWi8FsN0= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/hcl/v2 v2.20.1 h1:M6hgdyz7HYt1UN9e61j+qKJBqR3orTWbI1HKBJEdxtc= -github.com/hashicorp/hcl/v2 v2.20.1/go.mod h1:TZDqQ4kNKCbh1iJp99FdPiUaVDDUPivbqxZulxDYqL4= +github.com/hashicorp/hcl/v2 v2.21.0 h1:lve4q/o/2rqwYOgUg3y3V2YPyD1/zkCLGjIV74Jit14= +github.com/hashicorp/hcl/v2 v2.21.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= +github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa h1:s+4MhCQ6YrzisK6hFJUX53drDT4UsSW3DEhKn0ifuHw= +github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY= +github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw= +github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= +github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= -github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= +github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= @@ -127,6 +141,8 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= @@ -137,76 +153,93 @@ github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/ github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= -github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= +github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc= github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= -github.com/pborman/uuid v1.2.1 h1:+ZZIw58t/ozdjRaXh/3awHfmWRbzYxJoAdNJxe/3pvw= -github.com/pborman/uuid v1.2.1/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= -github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg= -github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU= -github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE= +github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho= github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= -github.com/prometheus/common v0.53.0 h1:U2pL9w9nmJwJDa4qqLQ3ZaePJ6ZTwt7cMD3AG3+aLCE= -github.com/prometheus/common v0.53.0/go.mod h1:BrxBKv3FWBIGXw89Mg1AeBq7FSyRzXWI3l3e7W3RN5U= -github.com/prometheus/procfs v0.14.0 h1:Lw4VdGGoKEZilJsayHf0B+9YgLGREba2C6xr+Fdfq6s= -github.com/prometheus/procfs v0.14.0/go.mod h1:XL+Iwz8k8ZabyZfMFHPiilCniixqQarAy5Mu67pHlNQ= -github.com/robfig/cron v1.2.0 h1:ZjScXvvxeQ63Dbyxy76Fj3AT3Ut0aKsyd2/tl3DTMuQ= -github.com/robfig/cron v1.2.0/go.mod h1:JGuDeoQd7Z6yL4zQhZ3OPEVHB7fL6Ka6skscFHfmt2k= +github.com/prometheus/common v0.55.0 h1:KEi6DK7lXW/m7Ig5i47x0vRzuBsHuvJdi5ee6Y3G1dc= +github.com/prometheus/common v0.55.0/go.mod h1:2SECS4xJG1kd8XF9IcM1gMX6510RAEL65zxzNImwdc8= +github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= +github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= +github.com/riverqueue/river v0.10.0 h1:RufBjhbtKxtnQB2tvNWYLMe9B/JzjR21i8wxSKrYHVc= +github.com/riverqueue/river v0.10.0/go.mod h1:FF7VV0tLfu2Mnxq1ybqtJOkVMHxhGGoVgSKokBdBCWY= +github.com/riverqueue/river v0.11.2 h1:U1f0xZ+B3qdOJSHJ8A2c93CEsFQGGkbG4ZN8blUas5g= +github.com/riverqueue/river v0.11.2/go.mod h1:0MCkMUIjwAjkKAmcWEbHP1IKWiXq+Z3iNVK5dsYVQYY= +github.com/riverqueue/river/riverdriver v0.10.0 h1:k2PTm3LDix/QXUNkZCKHHYGF3lzBqHDQq0LL57roiV4= +github.com/riverqueue/river/riverdriver v0.10.0/go.mod h1:4d5qvskeYRhT68JUssoo14lqBv/iUsoRTFfUaAOC0/E= +github.com/riverqueue/river/riverdriver v0.11.2 h1:2xC+R0Y+CFEOSDWKyeFef0wqQLuvhk3PsLkos7MLa1w= +github.com/riverqueue/river/riverdriver v0.11.2/go.mod h1:RhMuAjEtNGexwOFnz445G1iFNZVOnYQ90HDYxHMI+jM= +github.com/riverqueue/river/riverdriver/riverdatabasesql v0.10.0 h1:081xQZc0iZTxBiBQM4Q/au52N4HuE8nGzU/psrYoB54= +github.com/riverqueue/river/riverdriver/riverdatabasesql v0.10.0/go.mod h1:FxbPe1QjNykIApvA0PZmZdOioM6N0pEdSwaWeTzCy5Q= +github.com/riverqueue/river/riverdriver/riverdatabasesql v0.11.2 h1:I4ye1YEa35kqB6Jd3xVPNxbGDL6S1gpSTkZu25qffhc= +github.com/riverqueue/river/riverdriver/riverpgxv5 v0.10.0 h1:zEHcdyUnFQdqh1HlX4Au6e2pjZRop11RYEpylTDo8l4= +github.com/riverqueue/river/riverdriver/riverpgxv5 v0.10.0/go.mod h1:/VdY18n4cH7APULZkRZmk6K2xp254d5/0z+yaHx/hlg= +github.com/riverqueue/river/riverdriver/riverpgxv5 v0.11.2 h1:yxFi09ECN02iAr2uO0n7QhFKAyyGZ+Rn9fzKTt2TGhk= +github.com/riverqueue/river/riverdriver/riverpgxv5 v0.11.2/go.mod h1:ajPqIw7OgYBfR24MqH3VGI/SiYVgq0DkvdM7wrs+uDA= +github.com/riverqueue/river/rivershared v0.10.0 h1:ZoPJ7qtoNJb5CXFehNZqZzn5wZS9i+ot3Je7n6PFl3k= +github.com/riverqueue/river/rivershared v0.10.0/go.mod h1:2egnQ7czNcW8IXKXMRjko0aEMrQzF4V3k3jddmYiihE= +github.com/riverqueue/river/rivershared v0.11.2 h1:VbuLE6zm68R24xBi1elfnerhLBBn6X7DUxR9j4mcTR4= +github.com/riverqueue/river/rivershared v0.11.2/go.mod h1:J4U3qm8MbjHY1o5OlRNiWaminYagec1o8sHYX4ZQ4S4= +github.com/riverqueue/river/rivertype v0.10.0 h1:0yXURCpEripwjLfV3jxY6lbs9aG420wMnycc+fK1Ot0= +github.com/riverqueue/river/rivertype v0.10.0/go.mod h1:nDd50b/mIdxR/ezQzGS/JiAhBPERA7tUIne21GdfspQ= +github.com/riverqueue/river/rivertype v0.11.2 h1:YREWOGxDMDe1DTdvttwr2DVq/ql65u6e4jkw3VxuNyU= +github.com/riverqueue/river/rivertype v0.11.2/go.mod h1:bm5EMOGAEWhtXKqo27POWnViqSD5nHMZDP/jsrJc530= github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= -github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= -github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= -github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0= -github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= -github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= -github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= -github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= -github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= -github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= -github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= -github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= -github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= -github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= -github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= -github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= -github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk= +github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= +github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= +github.com/sethvargo/go-envconfig v1.1.0 h1:cWZiJxeTm7AlCvzGXrEXaSTCNgip5oJepekh/BOQuog= +github.com/sethvargo/go-envconfig v1.1.0/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw= +github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= +github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= @@ -214,148 +247,115 @@ github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= -github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/swaggo/echo-swagger v1.4.1 h1:Yf0uPaJWp1uRtDloZALyLnvdBeoEL5Kc7DtnjzO/TUk= github.com/swaggo/echo-swagger v1.4.1/go.mod h1:C8bSi+9yH2FLZsnhqMZLIZddpUxZdBYuNHbtaS1Hljc= -github.com/swaggo/files/v2 v2.0.0 h1:hmAt8Dkynw7Ssz46F6pn8ok6YmGZqHSVLZ+HQM7i0kw= -github.com/swaggo/files/v2 v2.0.0/go.mod h1:24kk2Y9NYEJ5lHuCra6iVwkMjIekMCaFq/0JQj66kyM= +github.com/swaggo/files/v2 v2.0.1 h1:XCVJO/i/VosCDsJu1YLpdejGsGnBE9deRMpjN4pJLHk= +github.com/swaggo/files/v2 v2.0.1/go.mod h1:24kk2Y9NYEJ5lHuCra6iVwkMjIekMCaFq/0JQj66kyM= github.com/swaggo/swag v1.16.3 h1:PnCYjPCah8FK4I26l2F/KQ4yz3sILcVUN3cTlBFA9Pg= github.com/swaggo/swag v1.16.3/go.mod h1:DImHIuOFXKpMFAQjcC7FG4m3Dg4+QuUgUzJmKjI/gRk= +github.com/testcontainers/testcontainers-go v0.32.0 h1:ug1aK08L3gCHdhknlTTwWjPHPS+/alvLJU/DRxTD/ME= +github.com/testcontainers/testcontainers-go v0.32.0/go.mod h1:CRHrzHLQhlXUsa5gXjTOfqIEJcrK5+xMDmBr/WMI88E= +github.com/testcontainers/testcontainers-go/modules/postgres v0.32.0 h1:ZE4dTdswj3P0j71nL+pL0m2e5HTXJwPoIFr+DDgdPaU= +github.com/testcontainers/testcontainers-go/modules/postgres v0.32.0/go.mod h1:njrNuyuoF2fjhVk6TG/R3Oeu82YwfYkbf5WVTyBXhV4= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8= -github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= -github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b h1:FosyBZYxY34Wul7O/MSKey3txpPYyCqVO5ZyceuQJEI= -github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= -go.temporal.io/api v1.34.0 h1:RBQtYF+jJa252uruscL0TULgdFNqUkhk5R7Bj8PT2ko= -go.temporal.io/api v1.34.0/go.mod h1:YN5Ty/DSp7uAdJxLxup+Y3aQLM00q+7cZuOEGFJ2Ob8= -go.temporal.io/sdk v1.26.1 h1:ggmFBythnuuW3yQRp0VzOTrmbOf+Ddbe00TZl+CQ+6U= -go.temporal.io/sdk v1.26.1/go.mod h1:ph3K/74cry+JuSV9nJH+Q+Zeir2ddzoX2LjWL/e5yCo= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zclconf/go-cty v1.15.0 h1:tTCRWxsexYUmtt/wVxgDClUe+uQusuI443uL6e+5sXQ= +github.com/zclconf/go-cty v1.15.0/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= +github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= +github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= +go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= +go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI= +go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= +go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= +go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= -go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= -go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= -go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= -golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f h1:99ci1mjWVBWwJiEKYY6jWa4d2nTQVIEhZIptnrVb1XY= -golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30= +golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= -golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/mod v0.19.0 h1:fEdghXQSo20giMthA7cd28ZC+jts4amQ3YMXiP5oMQ8= +golang.org/x/mod v0.19.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0= +golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= -golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.20.0 h1:4mQdhULixXKP1rwYBW0vAijoXnkTG0BLCDRzfe1idMo= -golang.org/x/oauth2 v0.20.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys= +golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE= +golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs= +golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= -golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= +golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= -golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= +golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.20.0 h1:hz/CVckiOxybQvFw6h7b/q80NTr9IUQb4s1IIzW7KNY= -golang.org/x/tools v0.20.0/go.mod h1:WvitBU7JJf6A4jOdg4S1tviW9bhUxkgeCui/0JHctQg= +golang.org/x/tools v0.23.0 h1:SGsXPZ+2l4JsgaCKkx+FQ9YZ5XEtA1GZYuoDjenLjvg= +golang.org/x/tools v0.23.0/go.mod h1:pnu6ufv6vQkll6szChhK3C3L/ruaIv5eBeztNG8wtsI= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto/googleapis/api v0.0.0-20240521202816-d264139d666e h1:SkdGTrROJl2jRGT/Fxv5QUf9jtdKCQh4KQJXbXVLAi0= -google.golang.org/genproto/googleapis/api v0.0.0-20240521202816-d264139d666e/go.mod h1:LweJcLbyVij6rCex8YunD8DYR5VDonap/jYl3ZRxcIU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e h1:Elxv5MwEkCI9f5SkoL6afed6NTdxaGoAo39eANBwHL8= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.64.0 h1:KH3VH9y/MgNQg1dE7b3XfVK0GsPSIzJwdF617gUSbvY= -google.golang.org/grpc v1.64.0/go.mod h1:oxjF8E3FBnjp+/gVFYdWacaLDx9na1aqy9oovLpxQYg= -google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg= -google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231016165738-49dd2c1f3d0b h1:ZlWIi1wSK56/8hn4QcBp/j9M7Gt3U/3hZw3mC7vDICo= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231016165738-49dd2c1f3d0b/go.mod h1:swOH3j0KzcDDgGUWr+SNpyTen5YrXjS3eyPzFYKc6lc= +google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk= +google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= -gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/square/go-jose.v2 v2.6.0 h1:NGk74WTnPKBNUhNzQX7PYcTLUjoq7mzKk2OKbvwk2iI= -gopkg.in/square/go-jose.v2 v2.6.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/internal/activities/general.go b/internal/activities/general.go deleted file mode 100644 index 0a5e429d..00000000 --- a/internal/activities/general.go +++ /dev/null @@ -1,35 +0,0 @@ -package activities - -import ( - "context" - "fmt" - - "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/dto" - "github.com/zibbp/ganymede/internal/utils" -) - -func CreateDirectory(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, err := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodCreateFolder(utils.Running).Save(ctx) - if err != nil { - return err - } - - err = utils.CreateFolder(fmt.Sprintf("%s/%s", input.Channel.Name, input.Vod.FolderName)) - if err != nil { - - _, err := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodCreateFolder(utils.Failed).Save(ctx) - if err != nil { - return err - } - return err - } - - _, err = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodCreateFolder(utils.Success).Save(ctx) - if err != nil { - return err - } - - return nil -} diff --git a/internal/activities/video.go b/internal/activities/video.go deleted file mode 100644 index 86bfb377..00000000 --- a/internal/activities/video.go +++ /dev/null @@ -1,988 +0,0 @@ -package activities - -import ( - "context" - "fmt" - "strconv" - "strings" - "time" - - osExec "os/exec" - - "github.com/rs/zerolog/log" - "github.com/spf13/viper" - entChannel "github.com/zibbp/ganymede/ent/channel" - entVod "github.com/zibbp/ganymede/ent/vod" - "github.com/zibbp/ganymede/internal/chapter" - "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/dto" - "github.com/zibbp/ganymede/internal/exec" - "github.com/zibbp/ganymede/internal/twitch" - "github.com/zibbp/ganymede/internal/utils" - "github.com/zibbp/ganymede/internal/vod" - "go.temporal.io/sdk/activity" - "go.temporal.io/sdk/temporal" -) - -func sendHeartbeat(ctx context.Context, msg string, stop chan bool) { - ticker := time.NewTicker(20 * time.Second) - log.Debug().Msgf("starting heartbeat %s", msg) - for { - select { - case <-ticker.C: - activity.RecordHeartbeat(ctx, msg) - case <-stop: - log.Debug().Msgf("stopping heartbeat %s", msg) - ticker.Stop() - return - } - } -} - -func convertTwitchChaptersToChapters(chapters []twitch.Node, duration int) ([]chapter.Chapter, error) { - if len(chapters) == 0 { - return nil, fmt.Errorf("no chapters found") - } - - convertedChapters := make([]chapter.Chapter, len(chapters)) - for i := 0; i < len(chapters); i++ { - convertedChapters[i].ID = chapters[i].ID - convertedChapters[i].Title = chapters[i].Description - convertedChapters[i].Type = string(chapters[i].Type) - convertedChapters[i].Start = int(chapters[i].PositionMilliseconds / 1000) - - if i+1 < len(chapters) { - convertedChapters[i].End = int(chapters[i+1].PositionMilliseconds / 1000) - } else { - convertedChapters[i].End = duration - } - } - - return convertedChapters, nil -} - -func ArchiveVideoActivity(ctx context.Context, input dto.ArchiveVideoInput) error { - return nil -} - -func SaveTwitchVideoInfo(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, err := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Running).Save(ctx) - if err != nil { - return err - } - - twitchService := twitch.NewService() - twitchVideo, err := twitchService.GetVodByID(input.VideoID) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - // get chapters - twitchChapters, err := twitch.GQLGetChapters(input.VideoID) - if err != nil { - _, dbEr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Failed).Save(ctx) - if dbEr != nil { - return dbEr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - // convert twitch chapters to chapters - // get nodes from gql response - var nodes []twitch.Node - for _, v := range twitchChapters.Data.Video.Moments.Edges { - nodes = append(nodes, v.Node) - } - if len(nodes) > 0 { - chapters, err := convertTwitchChaptersToChapters(nodes, input.Vod.Duration) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - // add chapters to database - chapterService := chapter.NewService() - for _, c := range chapters { - _, err := chapterService.CreateChapter(c, input.Vod.ID) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - } - - twitchVideo.Chapters = chapters - } - - // get muted segments - mutedSegments, err := twitch.GQLGetMutedSegments(input.VideoID) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - cleanMutedSegments := []vod.MutedSegment{} - - // insert muted segments into database - for _, mutedSegment := range mutedSegments.Data.Video.MuteInfo.MutedSegmentConnection.Nodes { - segmentEnd := mutedSegment.Offset + mutedSegment.Duration - if segmentEnd > input.Vod.Duration { - segmentEnd = input.Vod.Duration - } - // insert muted segment into database - _, err := database.DB().Client.MutedSegment.Create().SetStart(mutedSegment.Offset).SetEnd(segmentEnd).SetVod(input.Vod).Save(ctx) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - cleanMutedSegments = append(cleanMutedSegments, vod.MutedSegment{ - Start: mutedSegment.Offset, - End: segmentEnd, - }) - } - twitchVideo.MutedSegments = cleanMutedSegments - - err = utils.WriteJson(twitchVideo, fmt.Sprintf("%s/%s", input.Channel.Name, input.Vod.FolderName), fmt.Sprintf("%s-info.json", input.Vod.FileName)) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - _, err = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Success).Save(ctx) - if err != nil { - return err - } - - return nil -} - -func SaveTwitchLiveVideoInfo(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - twitchService := twitch.NewService() - stream, err := twitchService.GetStreams(fmt.Sprintf("?user_login=%s", input.Channel.Name)) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - if len(stream.Data) == 0 { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return fmt.Errorf("no stream found for channel %s", input.Channel.Name) - } - - twitchVideo := stream.Data[0] - - err = utils.WriteJson(twitchVideo, fmt.Sprintf("%s/%s", input.Channel.Name, input.Vod.FolderName), fmt.Sprintf("%s-info.json", input.Vod.FileName)) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - _, err = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodSaveInfo(utils.Success).Save(ctx) - if err != nil { - return err - } - - return nil -} - -func DownloadTwitchThumbnails(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - twitchService := twitch.NewService() - twitchVideo, err := twitchService.GetVodByID(input.VideoID) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - fullResThumbnailUrl := replacePlaceholders(twitchVideo.ThumbnailURL, "1920", "1080") - webResThumbnailUrl := replacePlaceholders(twitchVideo.ThumbnailURL, "640", "360") - - err = utils.DownloadFile(fullResThumbnailUrl, fmt.Sprintf("%s/%s", input.Channel.Name, input.Vod.FolderName), fmt.Sprintf("%s-thumbnail.jpg", input.Vod.FileName)) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - err = utils.DownloadFile(webResThumbnailUrl, fmt.Sprintf("%s/%s", input.Channel.Name, input.Vod.FolderName), fmt.Sprintf("%s-web_thumbnail.jpg", input.Vod.FileName)) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - _, err = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Success).Save(ctx) - if err != nil { - return err - } - - return nil -} - -func DownloadTwitchLiveThumbnails(ctx context.Context, input dto.ArchiveVideoInput) error { - - twitchService := twitch.NewService() - stream, err := twitchService.GetStreams(fmt.Sprintf("?user_login=%s", input.Channel.Name)) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - if len(stream.Data) == 0 { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - // stream isn't live so archive shouldn't continue and should be cleaned up - return temporal.NewApplicationError(fmt.Sprintf("no stream found for channel %s", input.Channel.Name), "", nil) - } - - twitchVideo := stream.Data[0] - - fullResThumbnailUrl := replaceLivePlaceholders(twitchVideo.ThumbnailURL, "1920", "1080") - webResThumbnailUrl := replaceLivePlaceholders(twitchVideo.ThumbnailURL, "640", "360") - - err = utils.DownloadFile(fullResThumbnailUrl, fmt.Sprintf("%s/%s", input.Channel.Name, input.Vod.FolderName), fmt.Sprintf("%s-thumbnail.jpg", input.Vod.FileName)) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - err = utils.DownloadFile(webResThumbnailUrl, fmt.Sprintf("%s/%s", input.Channel.Name, input.Vod.FolderName), fmt.Sprintf("%s-web_thumbnail.jpg", input.Vod.FileName)) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Failed).Save(ctx) - if dbErr != nil { - return dbErr - } - return temporal.NewApplicationError(err.Error(), "", nil) - } - - _, dbErr = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVodDownloadThumbnail(utils.Success).Save(ctx) - if dbErr != nil { - return dbErr - } - - return nil -} - -func replacePlaceholders(url, width, height string) string { - url = strings.ReplaceAll(url, "%{width}", width) - url = strings.ReplaceAll(url, "%{height}", height) - return url -} -func replaceLivePlaceholders(url, width, height string) string { - url = strings.ReplaceAll(url, "{width}", width) - url = strings.ReplaceAll(url, "{height}", height) - return url -} - -func DownloadTwitchVideo(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoDownload(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, fmt.Sprintf("download-video-%s", input.VideoID), stopHeartbeat) - - // Start the download - err := exec.DownloadTwitchVodVideo(input.Vod) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoDownload(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - _, dbErr = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoDownload(utils.Success).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - - stopHeartbeat <- true - return nil -} - -func DownloadTwitchLiveVideo(ctx context.Context, input dto.ArchiveVideoInput, ch chan bool) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoDownload(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, fmt.Sprintf("download-livevideo-%s", input.VideoID), stopHeartbeat) - - // Start the download - err := exec.DownloadTwitchLiveVideo(ctx, input.Vod, input.Channel, input.LiveChatWorkflowId) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoDownload(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(dbErr.Error(), "", nil) - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - _, dbErr = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoDownload(utils.Success).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(dbErr.Error(), "", nil) - } - - // Update video duration with duration from downloaded video - duration, err := exec.GetVideoDuration(input.Vod.TmpVideoDownloadPath) - if err != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - _, dbErr = database.DB().Client.Vod.UpdateOneID(input.Vod.ID).SetDuration(duration).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - - // attempt to find vod id of the livesstream so the external id is correct - videos, err := twitch.GetVideosByUser(input.Channel.ExtID, "archive") - if err != nil { - stopHeartbeat <- true - log.Err(err).Msg("error getting videos from twitch api") - } - - // attempt to find vod of current livestream - var livestreamVodId string - for _, video := range videos { - if video.StreamID == input.Vod.ExtID { - livestreamVodId = video.ID - log.Info().Msgf("found vod id %s for livestream %s, updating database", livestreamVodId, input.Vod.ExtID) - // update vod with external id - _, dbErr = database.DB().Client.Vod.UpdateOneID(input.Vod.ID).SetExtID(livestreamVodId).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - log.Err(dbErr).Msg("error updating vod with external id") - } - } - } - - if livestreamVodId == "" { - log.Info().Msgf("no vod found for livestream %s, keeping live stream ID as external id", input.Vod.ExtID) - } - - stopHeartbeat <- true - return nil -} - -func PostprocessVideo(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoConvert(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, fmt.Sprintf("postprocess-video-%s", input.VideoID), stopHeartbeat) - - // Start post process - err := exec.ConvertTwitchVodVideo(input.Vod) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoConvert(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - // Convert to HLS if needed - if viper.GetBool("archive.save_as_hls") { - err = exec.ConvertToHLS(input.Vod) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoConvert(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - // delete -convert video as it is not being moved - err := utils.DeleteFile(input.Vod.TmpVideoConvertPath) - if err != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - } - - _, dbErr = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoConvert(utils.Success).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - - stopHeartbeat <- true - return nil -} - -func MoveVideo(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoMove(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, fmt.Sprintf("move-video-%s", input.VideoID), stopHeartbeat) - - if viper.GetBool("archive.save_as_hls") { - err := utils.MoveFolder(input.Vod.TmpVideoHlsPath, input.Vod.VideoHlsPath) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoMove(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - } else { - err := utils.MoveFile(input.Vod.TmpVideoConvertPath, input.Vod.VideoPath) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoMove(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - } - - // Clean up files - // Delete source file - err := utils.DeleteFile(input.Vod.TmpVideoDownloadPath) - if err != nil { - log.Info().Err(err).Msgf("error deleting source file for vod %s", input.Vod.ID) - } - - _, dbErr = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskVideoMove(utils.Success).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - - stopHeartbeat <- true - return nil -} - -func DownloadTwitchChat(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatDownload(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, fmt.Sprintf("download-chat-%s", input.VideoID), stopHeartbeat) - - // Start the download - err := exec.DownloadTwitchVodChat(input.Vod) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatDownload(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - // copy json to vod folder - err = utils.CopyFile(input.Vod.TmpChatDownloadPath, input.Vod.ChatPath) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatDownload(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - _, dbErr = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatDownload(utils.Success).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - - stopHeartbeat <- true - return nil -} - -func DownloadTwitchLiveChat(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatDownload(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, fmt.Sprintf("download-livechat-%s", input.VideoID), stopHeartbeat) - - // Start the download - err := exec.DownloadTwitchLiveChat(ctx, input.Vod, input.Channel, input.Queue) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatDownload(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - // copy json to vod folder - err = utils.CopyFile(input.Vod.TmpLiveChatDownloadPath, input.Vod.ChatPath) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatDownload(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - _, dbErr = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatDownload(utils.Success).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - - return nil -} - -func RenderTwitchChat(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatRender(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, fmt.Sprintf("render-chat-%s", input.VideoID), stopHeartbeat) - - // Start the download - err, _ := exec.RenderTwitchVodChat(input.Vod) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatRender(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - _, dbErr = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatRender(utils.Success).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - - stopHeartbeat <- true - - return nil -} - -func MoveChat(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatMove(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, fmt.Sprintf("move-chat-%s", input.VideoID), stopHeartbeat) - - err := utils.MoveFile(input.Vod.TmpChatDownloadPath, input.Vod.ChatPath) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatMove(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - if input.Queue.RenderChat { - err = utils.MoveFile(input.Vod.TmpChatRenderPath, input.Vod.ChatVideoPath) - if err != nil { - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatMove(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - } - - _, dbErr = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatMove(utils.Success).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - - stopHeartbeat <- true - return nil -} - -func KillTwitchLiveChatDownload(ctx context.Context, input dto.ArchiveVideoInput) error { - - log.Info().Str("channel", input.Channel.Name).Str("stream_id", input.Vod.ExtID).Msg("Killing chat download") - - // find pid of chat_downloader to kill - // search for channel and unique temporary download path to ensure we do not kill a new instance - cmd := osExec.Command("pgrep", "-f", input.Vod.TmpLiveChatDownloadPath) - out, err := cmd.Output() - if err != nil { - return temporal.NewApplicationError(err.Error(), "", nil) - } - // parse output into array of process ids - pids := strings.Split(strings.TrimSpace(string(out)), "\n") - if len(pids) > 0 { - log.Debug().Str("channel", input.Channel.Name).Str("stream_id", input.Vod.ExtID).Msgf("Found chat download processes to kill: %s", pids) - - // kill pid - for _, pid := range pids { - cmd = osExec.Command("kill", "-15", pid) - _, err = cmd.Output() - if err != nil { - return temporal.NewApplicationError(err.Error(), "", nil) - } - } - - log.Info().Str("channel", input.Channel.Name).Str("stream_id", input.Vod.ExtID).Msgf("Killed chat downloader for channel %s", input.Channel.Name) - } else { - // not a big enough issue to raise an error if chat downloader is not running - log.Warn().Str("channel", input.Channel.Name).Str("stream_id", input.Vod.ExtID).Msg("No chat download processes found") - } - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatDownload(utils.Success).Save(ctx) - if dbErr != nil { - return dbErr - } - - return nil -} - -func ConvertTwitchLiveChat(ctx context.Context, input dto.ArchiveVideoInput) error { - - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatConvert(utils.Running).Save(ctx) - if dbErr != nil { - return dbErr - } - - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, fmt.Sprintf("convert-livechat-%s", input.VideoID), stopHeartbeat) - - // Check if chat file exists - if !utils.FileExists(input.Vod.TmpLiveChatDownloadPath) { - log.Debug().Msgf("chat file does not exist %s - this means there were no chat messages - setting chat to complete", input.Vod.TmpLiveChatDownloadPath) - // Set queue chat task to complete - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatConvert(utils.Success).SetTaskChatRender(utils.Success).SetTaskChatMove((utils.Success)).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - // Set VOD chat to empty - _, dbErr = database.DB().Client.Vod.UpdateOneID(input.Vod.ID).SetChatVideoPath("").SetChatPath("").Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return nil - } - - // Fetch streamer from Twitch API for their user ID - streamer, err := twitch.API.GetUserByLogin(input.Channel.Name) - if err != nil { - log.Error().Err(err).Msg("error getting streamer from Twitch API") - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatConvert(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - cID, err := strconv.Atoi(streamer.ID) - if err != nil { - log.Error().Err(err).Msg("error converting streamer ID to int") - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatConvert(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - // update queue item - updatedQueue, dbErr := database.DB().Client.Queue.Get(ctx, input.Queue.ID) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - input.Queue = updatedQueue - log.Info().Msgf("streamer ID: %s", streamer.ID) - // TwitchDownloader requires the ID of the video, or at least a previous video ID - videos, err := twitch.GetVideosByUser(streamer.ID, "archive") - if err != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - // attempt to find vod of current livestream - var previousVideoID string - for _, video := range videos { - if video.StreamID == input.Vod.ExtID { - previousVideoID = video.ID - } - } - // If no previous video ID was found, use a random id - if previousVideoID == "" { - log.Warn().Msgf("Stream %s on channel %s has no previous video ID, using %s", input.VideoID, input.Channel.Name, previousVideoID) - previousVideoID = "132195945" - } - - err = utils.ConvertTwitchLiveChatToTDLChat(input.Vod.TmpLiveChatDownloadPath, input.Channel.Name, input.Vod.ID.String(), input.Vod.ExtID, cID, input.Queue.ChatStart, string(previousVideoID)) - if err != nil { - log.Error().Err(err).Msg("error converting chat") - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatConvert(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - // TwitchDownloader "chatupdate" - // Embeds emotes and badges into the chat file - err = exec.TwitchChatUpdate(input.Vod) - if err != nil { - log.Error().Err(err).Msg("error updating chat") - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatConvert(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - // copy converted chat - err = utils.CopyFile(input.Vod.TmpLiveChatConvertPath, input.Vod.LiveChatConvertPath) - if err != nil { - log.Error().Err(err).Msg("error copying chat convert") - _, dbErr := database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatConvert(utils.Failed).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - _, dbErr = database.DB().Client.Queue.UpdateOneID(input.Queue.ID).SetTaskChatConvert(utils.Success).Save(ctx) - if dbErr != nil { - stopHeartbeat <- true - return dbErr - } - - stopHeartbeat <- true - return nil -} - -func TwitchSaveVideoChapters(ctx context.Context) error { - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, "save-video-chapters", stopHeartbeat) - - // get all videos - videos, err := database.DB().Client.Vod.Query().All(ctx) - if err != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - for _, video := range videos { - if video.Type == "live" { - continue - } - if video.ExtID == "" { - continue - } - log.Debug().Msgf("getting chapters for video %s", video.ID) - // get chapters - twitchChapters, err := twitch.GQLGetChapters(video.ExtID) - if err != nil { - log.Error().Err(err).Msgf("error getting chapters for video %s", video.ID) - continue - } - - // convert twitch chapters to chapters - // get nodes from gql response - var nodes []twitch.Node - for _, v := range twitchChapters.Data.Video.Moments.Edges { - nodes = append(nodes, v.Node) - } - if len(nodes) > 0 { - chapters, err := convertTwitchChaptersToChapters(nodes, video.Duration) - if err != nil { - return temporal.NewApplicationError(err.Error(), "", nil) - } - // add chapters to database - chapterService := chapter.NewService() - // check if chapters already exist - existingChapters, err := chapterService.GetVideoChapters(video.ID) - if err != nil { - log.Error().Err(err).Msgf("error getting chapters for video %s", video.ID) - } - if len(existingChapters) > 0 { - log.Debug().Msgf("chapters already exist for video %s", video.ID) - continue - } - - for _, c := range chapters { - _, err := chapterService.CreateChapter(c, video.ID) - if err != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - } - log.Info().Msgf("added %d chapters to video %s", len(chapters), video.ID) - } - // sleep for 0.25 seconds to not hit rate limit - time.Sleep(250 * time.Millisecond) - } - stopHeartbeat <- true - return nil -} - -func UpdateTwitchLiveStreamArchivesWithVodIds(ctx context.Context) error { - stopHeartbeat := make(chan bool) - go sendHeartbeat(ctx, "update-video-ids", stopHeartbeat) - - // get all channels - channels, err := database.DB().Client.Channel.Query().All(ctx) - if err != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - for _, channel := range channels { - log.Info().Msgf("processing channel %s", channel.Name) - // get all videos for channel - videos, err := database.DB().Client.Vod.Query().Where(entVod.HasChannelWith(entChannel.ID(channel.ID))).All(ctx) - if err != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - // get all videos from twitch for channel - twitchChannelVideoss, err := twitch.GetVideosByUser(channel.ExtID, "archive") - if err != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - - for _, video := range videos { - if video.Type != "live" { - continue - } - if video.ExtID == "" { - continue - } - // find video in twitch videos - for _, twitchVideo := range twitchChannelVideoss { - if video.ExtID == twitchVideo.StreamID { - log.Debug().Msgf("found video %s in twitch videos", video.ExtID) - // update video with vod id - _, err := database.DB().Client.Vod.UpdateOneID(video.ID).SetExtID(twitchVideo.ID).Save(ctx) - if err != nil { - stopHeartbeat <- true - return temporal.NewApplicationError(err.Error(), "", nil) - } - } - } - - } - } - stopHeartbeat <- true - return nil -} diff --git a/internal/admin/admin.go b/internal/admin/admin.go index 41f105a7..427da84c 100644 --- a/internal/admin/admin.go +++ b/internal/admin/admin.go @@ -1,8 +1,9 @@ package admin import ( + "context" "fmt" - "github.com/labstack/echo/v4" + "github.com/zibbp/ganymede/internal/database" ) @@ -19,13 +20,13 @@ type GetStatsResp struct { ChannelCount int `json:"channel_count"` } -func (s *Service) GetStats(c echo.Context) (GetStatsResp, error) { +func (s *Service) GetStats(ctx context.Context) (GetStatsResp, error) { - vC, err := s.Store.Client.Vod.Query().Count(c.Request().Context()) + vC, err := s.Store.Client.Vod.Query().Count(ctx) if err != nil { return GetStatsResp{}, fmt.Errorf("error getting vod count: %v", err) } - cC, err := s.Store.Client.Channel.Query().Count(c.Request().Context()) + cC, err := s.Store.Client.Channel.Query().Count(ctx) if err != nil { return GetStatsResp{}, fmt.Errorf("error getting channel count: %v", err) } diff --git a/internal/admin/admin_test.go b/internal/admin/admin_test.go new file mode 100644 index 00000000..ff0b3ab8 --- /dev/null +++ b/internal/admin/admin_test.go @@ -0,0 +1,31 @@ +package admin_test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/zibbp/ganymede/internal/channel" + "github.com/zibbp/ganymede/tests" +) + +func TestGetStats(t *testing.T) { + ctx := context.Background() + app, err := tests.Setup(t) + assert.NoError(t, err) + + // create a channel for to test\ + _, err = app.ChannelService.CreateChannel(channel.Channel{ + ExtID: "123456789", + Name: "test_channel", + DisplayName: "Test Channel", + ImagePath: "/vods/test_channel/test_channel.jpg", + }) + assert.NoError(t, err) + + // test GetStats + stats, err := app.AdminService.GetStats(ctx) + assert.NoError(t, err) + assert.Equal(t, 0, stats.VodCount) + assert.Equal(t, 1, stats.ChannelCount) +} diff --git a/internal/admin/info.go b/internal/admin/info.go index 40932c60..7dde5a1f 100644 --- a/internal/admin/info.go +++ b/internal/admin/info.go @@ -1,19 +1,17 @@ package admin import ( + "context" "fmt" "os/exec" - "strconv" "time" - "github.com/labstack/echo/v4" - "github.com/zibbp/ganymede/internal/kv" + "github.com/zibbp/ganymede/internal/utils" ) type InfoResp struct { - Version string `json:"version"` + CommitHash string `json:"commit_hash"` BuildTime string `json:"build_time"` - GitHash string `json:"git_hash"` Uptime string `json:"uptime"` ProgramVersions `json:"program_versions"` } @@ -25,17 +23,11 @@ type ProgramVersions struct { Streamlink string `json:"streamlink"` } -func (s *Service) GetInfo(c echo.Context) (InfoResp, error) { +func (s *Service) GetInfo(ctx context.Context) (InfoResp, error) { var resp InfoResp - resp.Version = kv.DB().Get("version") - resp.BuildTime = kv.DB().Get("build_time") - resp.GitHash = kv.DB().Get("git_hash") - startTimeUnix := kv.DB().Get("start_time_unix") - parsedStart, err := strconv.ParseInt(startTimeUnix, 10, 64) - if err != nil { - return resp, fmt.Errorf("error parsing start time: %v", err) - } - resp.Uptime = time.Since(time.Unix(parsedStart, 0)).String() + resp.CommitHash = utils.Commit + resp.BuildTime = utils.BuildTime + resp.Uptime = time.Since(utils.StartTime).String() // Program versions var programVersion ProgramVersions diff --git a/internal/archive/archive.go b/internal/archive/archive.go index 29d9329f..e73e0d6b 100644 --- a/internal/archive/archive.go +++ b/internal/archive/archive.go @@ -4,30 +4,30 @@ import ( "context" "fmt" "strings" - "time" "github.com/google/uuid" "github.com/rs/zerolog/log" - "github.com/spf13/viper" "github.com/zibbp/ganymede/ent" + "github.com/zibbp/ganymede/internal/blocked" "github.com/zibbp/ganymede/internal/channel" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/dto" + "github.com/zibbp/ganymede/internal/platform" "github.com/zibbp/ganymede/internal/queue" - "github.com/zibbp/ganymede/internal/temporal" - "github.com/zibbp/ganymede/internal/twitch" + "github.com/zibbp/ganymede/internal/tasks" + tasks_client "github.com/zibbp/ganymede/internal/tasks/client" "github.com/zibbp/ganymede/internal/utils" "github.com/zibbp/ganymede/internal/vod" - "github.com/zibbp/ganymede/internal/workflows" - "go.temporal.io/sdk/client" ) type Service struct { - Store *database.Database - TwitchService *twitch.Service - ChannelService *channel.Service - VodService *vod.Service - QueueService *queue.Service + Store *database.Database + ChannelService *channel.Service + VodService *vod.Service + QueueService *queue.Service + BlockedVodsService *blocked.Service + RiverClient *tasks_client.RiverClient + PlatformTwitch platform.Platform } type TwitchVodResponse struct { @@ -35,42 +35,43 @@ type TwitchVodResponse struct { Queue *ent.Queue `json:"queue"` } -func NewService(store *database.Database, twitchService *twitch.Service, channelService *channel.Service, vodService *vod.Service, queueService *queue.Service) *Service { - return &Service{Store: store, TwitchService: twitchService, ChannelService: channelService, VodService: vodService, QueueService: queueService} +func NewService(store *database.Database, channelService *channel.Service, vodService *vod.Service, queueService *queue.Service, blockedVodService *blocked.Service, riverClient *tasks_client.RiverClient, platformTwitch platform.Platform) *Service { + return &Service{Store: store, ChannelService: channelService, VodService: vodService, QueueService: queueService, BlockedVodsService: blockedVodService, RiverClient: riverClient, PlatformTwitch: platformTwitch} } -// ArchiveTwitchChannel - Create Twitch channel folder, profile image, and database entry. -func (s *Service) ArchiveTwitchChannel(cName string) (*ent.Channel, error) { - // Fetch channel from Twitch API - tChannel, err := twitch.API.GetUserByLogin(cName) +// ArchiveChannel - Create channel entry in database along with folder, profile image, etc. +func (s *Service) ArchiveChannel(ctx context.Context, channelName string) (*ent.Channel, error) { + env := config.GetEnvConfig() + // get channel from platform + platformChannel, err := s.PlatformTwitch.GetChannel(ctx, channelName) if err != nil { return nil, fmt.Errorf("error fetching twitch channel: %v", err) } // Check if channel exists in DB - cCheck := s.ChannelService.CheckChannelExists(tChannel.Login) + cCheck := s.ChannelService.CheckChannelExists(platformChannel.Login) if cCheck { return nil, fmt.Errorf("channel already exists") } // Create channel folder - err = utils.CreateFolder(tChannel.Login) + err = utils.CreateDirectory(fmt.Sprintf("%s/%s", env.VideosDir, platformChannel.Login)) if err != nil { return nil, fmt.Errorf("error creating channel folder: %v", err) } // Download channel profile image - err = utils.DownloadFile(tChannel.ProfileImageURL, tChannel.Login, "profile.png") + err = utils.DownloadFile(platformChannel.ProfileImageURL, fmt.Sprintf("%s/%s/%s", env.VideosDir, platformChannel.Login, "profile.png")) if err != nil { return nil, fmt.Errorf("error downloading channel profile image: %v", err) } // Create channel in DB channelDTO := channel.Channel{ - ExtID: tChannel.ID, - Name: tChannel.Login, - DisplayName: tChannel.DisplayName, - ImagePath: fmt.Sprintf("/vods/%s/profile.png", tChannel.Login), + ExtID: platformChannel.ID, + Name: platformChannel.Login, + DisplayName: platformChannel.DisplayName, + ImagePath: fmt.Sprintf("%s/%s/profile.png", env.VideosDir, platformChannel.Login), } dbC, err := s.ChannelService.CreateChannel(channelDTO) @@ -82,82 +83,102 @@ func (s *Service) ArchiveTwitchChannel(cName string) (*ent.Channel, error) { } -func (s *Service) ArchiveTwitchVod(vID string, quality string, chat bool, renderChat bool) (*TwitchVodResponse, error) { - log.Debug().Msgf("Archiving video %s quality: %s chat: %t render chat: %t", vID, quality, chat, renderChat) - // Fetch VOD from Twitch API - tVod, err := s.TwitchService.GetVodByID(vID) +type ArchiveVideoInput struct { + VideoId string + ChannelId uuid.UUID + Quality utils.VodQuality + ArchiveChat bool + RenderChat bool +} + +func (s *Service) ArchiveVideo(ctx context.Context, input ArchiveVideoInput) error { + // log.Debug().Msgf("Archiving video %s quality: %s chat: %t render chat: %t", videoId, quality, chat, renderChat) + + envConfig := config.GetEnvConfig() + + // check if video is blocked + blocked, err := s.BlockedVodsService.IsVideoBlocked(ctx, input.VideoId) + if err != nil { + return fmt.Errorf("error checking if vod is blocked: %v", err) + } + if blocked { + return fmt.Errorf("video id is blocked") + } + + // get video + video, err := s.PlatformTwitch.GetVideo(context.Background(), input.VideoId, false, false) if err != nil { - return nil, fmt.Errorf("error fetching twitch vod: %v", err) + return err } - // check if vod is processing - // the best way I know to check if a vod is processing / still being streamed - if strings.Contains(tVod.ThumbnailURL, "processing") { - return nil, fmt.Errorf("vod is still processing") + + // check if video is processing + if strings.Contains(video.ThumbnailURL, "processing") { + return fmt.Errorf("vod is still processing") } - // Check if vod is already archived - vCheck, err := s.VodService.CheckVodExists(tVod.ID) + + // Check if video is already archived + vCheck, err := s.VodService.CheckVodExists(video.ID) if err != nil { - return nil, fmt.Errorf("error checking if vod exists: %v", err) + return fmt.Errorf("error checking if vod exists: %v", err) } if vCheck { - return nil, fmt.Errorf("vod already exists") + return fmt.Errorf("vod already exists") } + // Check if channel exists - cCheck := s.ChannelService.CheckChannelExists(tVod.UserLogin) + cCheck := s.ChannelService.CheckChannelExists(video.UserLogin) if !cCheck { - log.Debug().Msgf("channel does not exist: %s while archiving vod. creating now.", tVod.UserLogin) - _, err := s.ArchiveTwitchChannel(tVod.UserLogin) + log.Debug().Msgf("channel does not exist: %s while archiving vod. creating now.", video.UserLogin) + _, err := s.ArchiveChannel(ctx, video.UserLogin) if err != nil { - return nil, fmt.Errorf("error creating channel: %v", err) + return fmt.Errorf("error creating channel: %v", err) } } + // Fetch channel - dbC, err := s.ChannelService.GetChannelByName(tVod.UserLogin) + channel, err := s.ChannelService.GetChannelByName(video.UserLogin) if err != nil { - return nil, fmt.Errorf("error fetching channel: %v", err) + return fmt.Errorf("error fetching channel: %v", err) } - // Generate VOD ID for folder name + // Generate Ganymede video ID for directory and file naming vUUID, err := uuid.NewUUID() if err != nil { - return nil, fmt.Errorf("error creating vod uuid: %v", err) + return fmt.Errorf("error creating vod uuid: %v", err) } - // Storage templates - folderName, err := GetFolderName(vUUID, tVod) + storageTemplateInput := StorageTemplateInput{ + UUID: vUUID, + ID: input.VideoId, + Channel: channel.Name, + Title: video.Title, + Type: video.Type, + Date: video.CreatedAt.Format("2006-01-02"), + } + // Create directory paths + folderName, err := GetFolderName(vUUID, storageTemplateInput) if err != nil { log.Error().Err(err).Msg("error using template to create folder name, falling back to default") - folderName = fmt.Sprintf("%s-%s", tVod.ID, vUUID.String()) + folderName = fmt.Sprintf("%s-%s", video.ID, vUUID.String()) } - fileName, err := GetFileName(vUUID, tVod) + fileName, err := GetFileName(vUUID, storageTemplateInput) if err != nil { log.Error().Err(err).Msg("error using template to create file name, falling back to default") - fileName = tVod.ID + fileName = video.ID } - // Sets - rootVodPath := fmt.Sprintf("/vods/%s/%s", tVod.UserLogin, folderName) + // set facts + rootVideoPath := fmt.Sprintf("%s/%s/%s", envConfig.VideosDir, video.UserLogin, folderName) chatPath := "" chatVideoPath := "" liveChatPath := "" liveChatConvertPath := "" - if chat { - chatPath = fmt.Sprintf("%s/%s-chat.json", rootVodPath, fileName) - chatVideoPath = fmt.Sprintf("%s/%s-chat.mp4", rootVodPath, fileName) - liveChatPath = fmt.Sprintf("%s/%s-live-chat.json", rootVodPath, fileName) - liveChatConvertPath = fmt.Sprintf("%s/%s-chat-convert.json", rootVodPath, fileName) - } - // Parse new Twitch API duration - parsedDuration, err := time.ParseDuration(tVod.Duration) - if err != nil { - return nil, fmt.Errorf("error parsing duration: %v", err) - } - - // Parse Twitch date to time.Time - parsedDate, err := time.Parse(time.RFC3339, tVod.CreatedAt) - if err != nil { - return nil, fmt.Errorf("error parsing date: %v", err) + if input.ArchiveChat { + chatPath = fmt.Sprintf("%s/%s-chat.json", rootVideoPath, fileName) + chatVideoPath = fmt.Sprintf("%s/%s-chat.mp4", rootVideoPath, fileName) + liveChatPath = fmt.Sprintf("%s/%s-live-chat.json", rootVideoPath, fileName) + liveChatConvertPath = fmt.Sprintf("%s/%s-chat-convert.json", rootVideoPath, fileName) } videoExtension := "mp4" @@ -165,164 +186,150 @@ func (s *Service) ArchiveTwitchVod(vID string, quality string, chat bool, render // Create VOD in DB vodDTO := vod.Vod{ ID: vUUID, - ExtID: tVod.ID, + ExtID: video.ID, Platform: "twitch", - Type: utils.VodType(tVod.Type), - Title: tVod.Title, - Duration: int(parsedDuration.Seconds()), - Views: int(tVod.ViewCount), - Resolution: quality, + Type: utils.VodType(video.Type), + Title: video.Title, + Duration: int(video.Duration.Seconds()), + Views: int(video.ViewCount), + Resolution: input.Quality.String(), Processing: true, - ThumbnailPath: fmt.Sprintf("%s/%s-thumbnail.jpg", rootVodPath, fileName), - WebThumbnailPath: fmt.Sprintf("%s/%s-web_thumbnail.jpg", rootVodPath, fileName), - VideoPath: fmt.Sprintf("%s/%s-video.%s", rootVodPath, fileName, videoExtension), + ThumbnailPath: fmt.Sprintf("%s/%s-thumbnail.jpg", rootVideoPath, fileName), + WebThumbnailPath: fmt.Sprintf("%s/%s-web_thumbnail.jpg", rootVideoPath, fileName), + VideoPath: fmt.Sprintf("%s/%s-video.%s", rootVideoPath, fileName, videoExtension), ChatPath: chatPath, LiveChatPath: liveChatPath, ChatVideoPath: chatVideoPath, LiveChatConvertPath: liveChatConvertPath, - InfoPath: fmt.Sprintf("%s/%s-info.json", rootVodPath, fileName), - StreamedAt: parsedDate, + InfoPath: fmt.Sprintf("%s/%s-info.json", rootVideoPath, fileName), + StreamedAt: video.CreatedAt, FolderName: folderName, FileName: fileName, // create temporary paths - TmpVideoDownloadPath: fmt.Sprintf("/tmp/%s_%s-video.%s", tVod.ID, vUUID, videoExtension), - TmpVideoConvertPath: fmt.Sprintf("/tmp/%s_%s-video-convert.%s", tVod.ID, vUUID, videoExtension), - TmpChatDownloadPath: fmt.Sprintf("/tmp/%s_%s-chat.json", tVod.ID, vUUID), - TmpLiveChatDownloadPath: fmt.Sprintf("/tmp/%s_%s-live-chat.json", tVod.ID, vUUID), - TmpLiveChatConvertPath: fmt.Sprintf("/tmp/%s_%s-chat-convert.json", tVod.ID, vUUID), - TmpChatRenderPath: fmt.Sprintf("/tmp/%s_%s-chat.mp4", tVod.ID, vUUID), + TmpVideoDownloadPath: fmt.Sprintf("%s/%s_%s-video.%s", envConfig.TempDir, video.ID, vUUID, videoExtension), + TmpVideoConvertPath: fmt.Sprintf("%s/%s_%s-video-convert.%s", envConfig.TempDir, video.ID, vUUID, videoExtension), + TmpChatDownloadPath: fmt.Sprintf("%s/%s_%s-chat.json", envConfig.TempDir, video.ID, vUUID), + TmpLiveChatDownloadPath: fmt.Sprintf("%s/%s_%s-live-chat.json", envConfig.TempDir, video.ID, vUUID), + TmpLiveChatConvertPath: fmt.Sprintf("%s/%s_%s-chat-convert.json", envConfig.TempDir, video.ID, vUUID), + TmpChatRenderPath: fmt.Sprintf("%s/%s_%s-chat.mp4", envConfig.TempDir, video.ID, vUUID), } - if viper.GetBool("archive.save_as_hls") { - vodDTO.TmpVideoHLSPath = fmt.Sprintf("/tmp/%s_%s-video_hls0", tVod.ID, vUUID) - vodDTO.VideoHLSPath = fmt.Sprintf("%s/%s-video_hls", rootVodPath, fileName) - vodDTO.VideoPath = fmt.Sprintf("%s/%s-video_hls/%s-video.m3u8", rootVodPath, fileName, tVod.ID) + if config.Get().Archive.SaveAsHls { + vodDTO.TmpVideoHLSPath = fmt.Sprintf("%s/%s_%s-video_hls0", envConfig.TempDir, video.ID, vUUID) + vodDTO.VideoHLSPath = fmt.Sprintf("%s/%s-video_hls", rootVideoPath, fileName) + vodDTO.VideoPath = fmt.Sprintf("%s/%s-video_hls/%s-video.m3u8", rootVideoPath, fileName, video.ID) } - v, err := s.VodService.CreateVod(vodDTO, dbC.ID) + v, err := s.VodService.CreateVod(vodDTO, channel.ID) if err != nil { - return nil, fmt.Errorf("error creating vod: %v", err) + return fmt.Errorf("error creating vod: %v", err) } // Create queue item - q, err := s.QueueService.CreateQueueItem(queue.Queue{LiveArchive: false}, v.ID) + q, err := s.QueueService.CreateQueueItem(queue.Queue{LiveArchive: false, ArchiveChat: input.ArchiveChat, RenderChat: input.RenderChat}, v.ID) if err != nil { - return nil, fmt.Errorf("error creating queue item: %v", err) + return fmt.Errorf("error creating queue item: %v", err) } // If chat is disabled update queue - if !chat { + if !input.ArchiveChat { _, err := q.Update().SetChatProcessing(false).SetTaskChatDownload(utils.Success).SetTaskChatRender(utils.Success).SetTaskChatMove(utils.Success).Save(context.Background()) if err != nil { - return nil, fmt.Errorf("error updating queue item: %v", err) + return fmt.Errorf("error updating queue item: %v", err) } _, err = v.Update().SetChatPath("").SetChatVideoPath("").Save(context.Background()) if err != nil { - return nil, fmt.Errorf("error updating vod: %v", err) + return fmt.Errorf("error updating vod: %v", err) } } // If render chat is disabled update queue - if !renderChat { + if !input.RenderChat { _, err := q.Update().SetTaskChatRender(utils.Success).SetRenderChat(false).Save(context.Background()) if err != nil { - return nil, fmt.Errorf("error updating queue item: %v", err) + return fmt.Errorf("error updating queue item: %v", err) } _, err = v.Update().SetChatVideoPath("").Save(context.Background()) if err != nil { - return nil, fmt.Errorf("error updating vod: %v", err) + return fmt.Errorf("error updating vod: %v", err) } } // Re-query queue from DB for updated values q, err = s.QueueService.GetQueueItem(q.ID) if err != nil { - return nil, fmt.Errorf("error fetching queue item: %v", err) + return fmt.Errorf("error fetching queue item: %v", err) } - wfOptions := client.StartWorkflowOptions{ - ID: vUUID.String(), - TaskQueue: "archive", + taskInput := tasks.ArchiveVideoInput{ + QueueId: q.ID, } - input := dto.ArchiveVideoInput{ - VideoID: vID, - Type: "vod", - Platform: "twitch", - Resolution: "source", - DownloadChat: true, - RenderChat: true, - Vod: v, - Channel: dbC, - Queue: q, - } - we, err := temporal.GetTemporalClient().Client.ExecuteWorkflow(context.Background(), wfOptions, workflows.ArchiveVideoWorkflow, input) + // enqueue first task + _, err = s.RiverClient.Client.Insert(ctx, tasks.CreateDirectoryArgs{ + Continue: true, + Input: taskInput, + }, nil) + if err != nil { - log.Error().Err(err).Msg("error starting workflow") - return nil, fmt.Errorf("error starting workflow: %v", err) + return fmt.Errorf("error enqueueing task: %v", err) } - log.Debug().Msgf("workflow id %s started for vod %s", we.GetID(), vID) - - return &TwitchVodResponse{ - VOD: v, - Queue: q, - }, nil + return nil } -func (s *Service) ArchiveTwitchLive(lwc *ent.Live, live twitch.Live) (*TwitchVodResponse, error) { - // Check if channel exists - cCheck := s.ChannelService.CheckChannelExists(live.UserLogin) - if !cCheck { - log.Debug().Msgf("channel does not exist: %s while archiving live stream. creating now.", live.UserLogin) - _, err := s.ArchiveTwitchChannel(live.UserLogin) - if err != nil { - return nil, fmt.Errorf("error creating channel: %v", err) - } +func (s *Service) ArchiveLivestream(ctx context.Context, input ArchiveVideoInput) error { + envConfig := config.GetEnvConfig() + + channel, err := s.ChannelService.GetChannel(input.ChannelId) + if err != nil { + return fmt.Errorf("error fetching channel: %v", err) } - // Fetch channel - dbC, err := s.ChannelService.GetChannelByName(live.UserLogin) + + // get video + video, err := s.PlatformTwitch.GetLiveStream(context.Background(), channel.Name) if err != nil { - return nil, fmt.Errorf("error fetching channel: %v", err) + return err } - // Generate VOD ID for folder name + // Generate Ganymede video ID for directory and file naming vUUID, err := uuid.NewUUID() if err != nil { - return nil, fmt.Errorf("error creating vod uuid: %v", err) + return fmt.Errorf("error creating vod uuid: %v", err) } - // Create vodDto for storage templates - tVodDto := twitch.Vod{ - ID: live.ID, - UserLogin: live.UserLogin, - Title: live.Title, - Type: "live", - CreatedAt: live.StartedAt, + storageTemplateInput := StorageTemplateInput{ + UUID: vUUID, + ID: video.ID, + Channel: channel.Name, + Title: video.Title, + Type: video.Type, + Date: video.StartedAt.Format("2006-01-02"), } - folderName, err := GetFolderName(vUUID, tVodDto) + // Create directory paths + folderName, err := GetFolderName(vUUID, storageTemplateInput) if err != nil { log.Error().Err(err).Msg("error using template to create folder name, falling back to default") - folderName = fmt.Sprintf("%s-%s", tVodDto.ID, vUUID.String()) + folderName = fmt.Sprintf("%s-%s", video.ID, vUUID.String()) } - fileName, err := GetFileName(vUUID, tVodDto) + fileName, err := GetFileName(vUUID, storageTemplateInput) if err != nil { log.Error().Err(err).Msg("error using template to create file name, falling back to default") - fileName = tVodDto.ID + fileName = video.ID } - // Sets - rootVodPath := fmt.Sprintf("/vods/%s/%s", live.UserLogin, folderName) + // set facts + rootVideoPath := fmt.Sprintf("%s/%s/%s", envConfig.VideosDir, video.UserLogin, folderName) chatPath := "" chatVideoPath := "" liveChatPath := "" liveChatConvertPath := "" - if lwc.ArchiveChat { - chatPath = fmt.Sprintf("%s/%s-chat.json", rootVodPath, fileName) - chatVideoPath = fmt.Sprintf("%s/%s-chat.mp4", rootVodPath, fileName) - liveChatPath = fmt.Sprintf("%s/%s-live-chat.json", rootVodPath, fileName) - liveChatConvertPath = fmt.Sprintf("%s/%s-chat-convert.json", rootVodPath, fileName) + if input.ArchiveChat { + chatPath = fmt.Sprintf("%s/%s-chat.json", rootVideoPath, fileName) + chatVideoPath = fmt.Sprintf("%s/%s-chat.mp4", rootVideoPath, fileName) + liveChatPath = fmt.Sprintf("%s/%s-live-chat.json", rootVideoPath, fileName) + liveChatConvertPath = fmt.Sprintf("%s/%s-chat-convert.json", rootVideoPath, fileName) } videoExtension := "mp4" @@ -330,119 +337,95 @@ func (s *Service) ArchiveTwitchLive(lwc *ent.Live, live twitch.Live) (*TwitchVod // Create VOD in DB vodDTO := vod.Vod{ ID: vUUID, - ExtID: live.ID, + ExtID: video.ID, + ExtStreamID: video.ID, Platform: "twitch", - Type: utils.VodType("live"), - Title: live.Title, + Type: utils.VodType(video.Type), + Title: video.Title, Duration: 1, Views: 1, - Resolution: lwc.Resolution, + Resolution: input.Quality.String(), Processing: true, - ThumbnailPath: fmt.Sprintf("%s/%s-thumbnail.jpg", rootVodPath, fileName), - WebThumbnailPath: fmt.Sprintf("%s/%s-web_thumbnail.jpg", rootVodPath, fileName), - VideoPath: fmt.Sprintf("%s/%s-video.%s", rootVodPath, fileName, videoExtension), + ThumbnailPath: fmt.Sprintf("%s/%s-thumbnail.jpg", rootVideoPath, fileName), + WebThumbnailPath: fmt.Sprintf("%s/%s-web_thumbnail.jpg", rootVideoPath, fileName), + VideoPath: fmt.Sprintf("%s/%s-video.%s", rootVideoPath, fileName, videoExtension), ChatPath: chatPath, LiveChatPath: liveChatPath, ChatVideoPath: chatVideoPath, LiveChatConvertPath: liveChatConvertPath, - InfoPath: fmt.Sprintf("%s/%s-info.json", rootVodPath, fileName), - StreamedAt: time.Now(), + InfoPath: fmt.Sprintf("%s/%s-info.json", rootVideoPath, fileName), + StreamedAt: video.StartedAt, FolderName: folderName, FileName: fileName, // create temporary paths - TmpVideoDownloadPath: fmt.Sprintf("/tmp/%s_%s-video.%s", live.ID, vUUID, videoExtension), - TmpVideoConvertPath: fmt.Sprintf("/tmp/%s_%s-video-convert.%s", live.ID, vUUID, videoExtension), - TmpChatDownloadPath: fmt.Sprintf("/tmp/%s_%s-chat.json", live.ID, vUUID), - TmpLiveChatDownloadPath: fmt.Sprintf("/tmp/%s_%s-live-chat.json", live.ID, vUUID), - TmpLiveChatConvertPath: fmt.Sprintf("/tmp/%s_%s-chat-convert.json", live.ID, vUUID), - TmpChatRenderPath: fmt.Sprintf("/tmp/%s_%s-chat.mp4", live.ID, vUUID), + TmpVideoDownloadPath: fmt.Sprintf("%s/%s_%s-video.%s", envConfig.TempDir, video.ID, vUUID, videoExtension), + TmpVideoConvertPath: fmt.Sprintf("%s/%s_%s-video-convert.%s", envConfig.TempDir, video.ID, vUUID, videoExtension), + TmpChatDownloadPath: fmt.Sprintf("%s/%s_%s-chat.json", envConfig.TempDir, video.ID, vUUID), + TmpLiveChatDownloadPath: fmt.Sprintf("%s/%s_%s-live-chat.json", envConfig.TempDir, video.ID, vUUID), + TmpLiveChatConvertPath: fmt.Sprintf("%s/%s_%s-chat-convert.json", envConfig.TempDir, video.ID, vUUID), + TmpChatRenderPath: fmt.Sprintf("%s/%s_%s-chat.mp4", envConfig.TempDir, video.ID, vUUID), } - if viper.GetBool("archive.save_as_hls") { - vodDTO.TmpVideoHLSPath = fmt.Sprintf("/tmp/%s_%s-video_hls0", live.ID, vUUID) - vodDTO.VideoHLSPath = fmt.Sprintf("%s/%s-video_hls", rootVodPath, fileName) - vodDTO.VideoPath = fmt.Sprintf("%s/%s-video_hls/%s-video.m3u8", rootVodPath, fileName, live.ID) + if config.Get().Archive.SaveAsHls { + vodDTO.TmpVideoHLSPath = fmt.Sprintf("%s/%s_%s-video_hls0", envConfig.TempDir, video.ID, vUUID) + vodDTO.VideoHLSPath = fmt.Sprintf("%s/%s-video_hls", rootVideoPath, fileName) + vodDTO.VideoPath = fmt.Sprintf("%s/%s-video_hls/%s-video.m3u8", rootVideoPath, fileName, video.ID) } - v, err := s.VodService.CreateVod(vodDTO, dbC.ID) + v, err := s.VodService.CreateVod(vodDTO, channel.ID) if err != nil { - return nil, fmt.Errorf("error creating vod: %v", err) + return fmt.Errorf("error creating vod: %v", err) } // Create queue item - q, err := s.QueueService.CreateQueueItem(queue.Queue{LiveArchive: true}, v.ID) + q, err := s.QueueService.CreateQueueItem(queue.Queue{LiveArchive: true, ArchiveChat: input.ArchiveChat, RenderChat: input.RenderChat}, v.ID) if err != nil { - return nil, fmt.Errorf("error creating queue item: %v", err) + return fmt.Errorf("error creating queue item: %v", err) } // If chat is disabled update queue - if !lwc.ArchiveChat { + if !input.ArchiveChat { _, err := q.Update().SetChatProcessing(false).SetTaskChatDownload(utils.Success).SetTaskChatConvert(utils.Success).SetTaskChatRender(utils.Success).SetTaskChatMove(utils.Success).Save(context.Background()) if err != nil { - return nil, fmt.Errorf("error updating queue item: %v", err) + return fmt.Errorf("error updating queue item: %v", err) } - _, err = v.Update().SetChatPath("").SetChatVideoPath("").Save(context.Background()) if err != nil { - return nil, fmt.Errorf("error updating vod: %v", err) + return fmt.Errorf("error updating vod: %v", err) } - } - if !lwc.RenderChat { + // If render chat is disabled update queue + if !input.RenderChat { _, err := q.Update().SetTaskChatRender(utils.Success).SetRenderChat(false).Save(context.Background()) if err != nil { - return nil, fmt.Errorf("error updating queue item: %v", err) + return fmt.Errorf("error updating queue item: %v", err) } _, err = v.Update().SetChatVideoPath("").Save(context.Background()) if err != nil { - return nil, fmt.Errorf("error updating vod: %v", err) + return fmt.Errorf("error updating vod: %v", err) } } // Re-query queue from DB for updated values q, err = s.QueueService.GetQueueItem(q.ID) if err != nil { - return nil, fmt.Errorf("error fetching queue item: %v", err) + return fmt.Errorf("error fetching queue item: %v", err) } - wfOptions := client.StartWorkflowOptions{ - ID: vUUID.String(), - TaskQueue: "archive", + taskInput := tasks.ArchiveVideoInput{ + QueueId: q.ID, } - input := dto.ArchiveVideoInput{ - VideoID: live.ID, - Type: "live", - Platform: "twitch", - Resolution: lwc.Resolution, - DownloadChat: lwc.ArchiveChat, - RenderChat: lwc.RenderChat, - Vod: v, - Channel: dbC, - Queue: q, - LiveWatchChannel: lwc, - } + // enqueue first task + _, err = s.RiverClient.Client.Insert(ctx, tasks.CreateDirectoryArgs{ + Continue: true, + Input: taskInput, + }, nil) - we, err := temporal.GetTemporalClient().Client.ExecuteWorkflow(context.Background(), wfOptions, workflows.ArchiveLiveVideoWorkflow, input) if err != nil { - log.Error().Err(err).Msg("error starting workflow") - return nil, fmt.Errorf("error starting workflow: %v", err) + return fmt.Errorf("error enqueueing task: %v", err) } - log.Debug().Msgf("workflow id %s started for live stream %s", we.GetID(), live.ID) - - // set IDs in queue - _, err = q.Update().SetWorkflowID(we.GetID()).SetWorkflowRunID(we.GetRunID()).Save(context.Background()) - if err != nil { - log.Error().Err(err).Msg("error updating queue item") - return nil, fmt.Errorf("error updating queue item: %v", err) - } - - // go s.TaskVodCreateFolder(dbC, v, q, true) - - return &TwitchVodResponse{ - VOD: v, - Queue: q, - }, nil + return nil } diff --git a/internal/archive/utils.go b/internal/archive/utils.go index 6a535187..3e97388e 100644 --- a/internal/archive/utils.go +++ b/internal/archive/utils.go @@ -4,12 +4,10 @@ import ( "fmt" "regexp" "strings" - "time" "github.com/google/uuid" "github.com/rs/zerolog/log" - "github.com/spf13/viper" - "github.com/zibbp/ganymede/internal/twitch" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/utils" ) @@ -17,14 +15,23 @@ var ( storageTemplateVariableRegex = regexp.MustCompile(`\{{([^}]+)\}}`) ) -func GetFolderName(uuid uuid.UUID, tVideoItem twitch.Vod) (string, error) { +type StorageTemplateInput struct { + UUID uuid.UUID + ID string + Channel string + Title string + Type string + Date string // parsed date +} + +func GetFolderName(uuid uuid.UUID, input StorageTemplateInput) (string, error) { - variableMap, err := getVariableMap(uuid, &tVideoItem) + variableMap, err := getVariableMap(uuid, input) if err != nil { return "", fmt.Errorf("error getting variable map: %w", err) } - folderTemplate := viper.GetString("storage_templates.folder_template") + folderTemplate := config.Get().StorageTemplates.FolderTemplate if folderTemplate == "" { log.Error().Msg("Folder template is empty") // Fallback template @@ -49,14 +56,14 @@ func GetFolderName(uuid uuid.UUID, tVideoItem twitch.Vod) (string, error) { return folderTemplate, nil } -func GetFileName(uuid uuid.UUID, tVideoItem twitch.Vod) (string, error) { +func GetFileName(uuid uuid.UUID, input StorageTemplateInput) (string, error) { - variableMap, err := getVariableMap(uuid, &tVideoItem) + variableMap, err := getVariableMap(uuid, input) if err != nil { return "", fmt.Errorf("error getting variable map: %w", err) } - fileTemplate := viper.GetString("storage_templates.file_template") + fileTemplate := config.Get().StorageTemplates.FileTemplate if fileTemplate == "" { log.Error().Msg("File template is empty") // Fallback template @@ -81,28 +88,16 @@ func GetFileName(uuid uuid.UUID, tVideoItem twitch.Vod) (string, error) { return fileTemplate, nil } -func getVariableMap(uuid uuid.UUID, tVideoItem *twitch.Vod) (map[string]interface{}, error) { - safeTitle := utils.SanitizeFileName(tVideoItem.Title) - parsedDate, err := parseDate(tVideoItem.CreatedAt) - if err != nil { - return nil, err - } +func getVariableMap(uuid uuid.UUID, input StorageTemplateInput) (map[string]interface{}, error) { + safeTitle := utils.SanitizeFileName(input.Title) variables := map[string]interface{}{ "uuid": uuid.String(), - "id": tVideoItem.ID, - "channel": tVideoItem.UserLogin, + "id": input.ID, + "channel": input.Channel, "title": safeTitle, - "date": parsedDate, - "type": tVideoItem.Type, + "date": input.Date, + "type": input.Type, } return variables, nil } - -func parseDate(dateString string) (string, error) { - t, err := time.Parse(time.RFC3339, dateString) - if err != nil { - return "", fmt.Errorf("error parsing date %v", err) - } - return t.Format("2006-01-02"), nil -} diff --git a/internal/auth/auth.go b/internal/auth/auth.go index f7a7cf2c..c57d7b3f 100644 --- a/internal/auth/auth.go +++ b/internal/auth/auth.go @@ -3,17 +3,17 @@ package auth import ( "context" "fmt" - "os" "strings" + "time" "github.com/coreos/go-oidc/v3/oidc" "github.com/golang-jwt/jwt/v4" "github.com/google/uuid" "github.com/labstack/echo/v4" "github.com/rs/zerolog/log" - "github.com/spf13/viper" "github.com/zibbp/ganymede/ent" entUser "github.com/zibbp/ganymede/ent/user" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/database" "github.com/zibbp/ganymede/internal/user" "github.com/zibbp/ganymede/internal/utils" @@ -30,13 +30,15 @@ type Service struct { } func NewService(store *database.Database) *Service { - oAuthEnabled := viper.GetBool("oauth_enabled") - if oAuthEnabled { + ctx := context.Background() + env := config.GetEnvConfig() + + if env.OAuthEnabled { // Fetch environment variables - providerURL := os.Getenv("OAUTH_PROVIDER_URL") - oauthClientID := os.Getenv("OAUTH_CLIENT_ID") - oauthClientSecret := os.Getenv("OAUTH_CLIENT_SECRET") - oauthRedirectURL := os.Getenv("OAUTH_REDIRECT_URL") + providerURL := env.OAuthProviderURL + oauthClientID := env.OAuthClientID + oauthClientSecret := env.OAuthClientSecret + oauthRedirectURL := env.OAuthRedirectURL if providerURL == "" || oauthClientID == "" || oauthClientSecret == "" || oauthRedirectURL == "" { log.Fatal().Msg("missing environment variables for oauth authentication") } @@ -53,7 +55,7 @@ func NewService(store *database.Database) *Service { Scopes: []string{oidc.ScopeOpenID, "profile", oidc.ScopeOfflineAccess}, } - err = FetchJWKS() + err = FetchJWKS(ctx) if err != nil { log.Fatal().Err(err).Msg("error fetching jwks") } @@ -78,14 +80,17 @@ type ChangePassword struct { NewPassword string `json:"new_password"` } -func (s *Service) Register(c echo.Context, user user.User) (*ent.User, error) { +func (s *Service) Register(ctx context.Context, user user.User) (*ent.User, error) { + if !config.Get().RegistrationEnabled { + return nil, fmt.Errorf("registration is disabled") + } // hash password hashedPassword, err := bcrypt.GenerateFromPassword([]byte(user.Password), 14) if err != nil { return nil, fmt.Errorf("error hashing password: %v", err) } - u, err := s.Store.Client.User.Create().SetUsername(user.Username).SetPassword(string(hashedPassword)).Save(c.Request().Context()) + u, err := s.Store.Client.User.Create().SetUsername(user.Username).SetPassword(string(hashedPassword)).Save(ctx) if err != nil { if _, ok := err.(*ent.ConstraintError); ok { return nil, fmt.Errorf("user already exists") @@ -111,12 +116,24 @@ func (s *Service) Login(c echo.Context, uDto user.User) (*ent.User, error) { Role: u.Role, } - // Generate JWT and set cookie - err = GenerateTokensAndSetCookies(&uDto, c) + // generate access token + accessToken, exp, err := generateJWTToken(&uDto, time.Now().Add(1*time.Hour), []byte(GetJWTSecret())) if err != nil { - return nil, fmt.Errorf("error generating tokens: %v", err) + return nil, fmt.Errorf("error generating access token: %v", err) } + // set access token cookie + setTokenCookie(c, accessTokenCookieName, accessToken, exp) + + // generate refresh token + refreshToken, exp, err := generateJWTToken(&uDto, time.Now().Add(30*24*time.Hour), []byte(GetJWTRefreshSecret())) + if err != nil { + return nil, fmt.Errorf("error generating refresh token: %v", err) + } + + // set refresh token cookie + setTokenCookie(c, refreshTokenCookieName, refreshToken, exp) + return u, nil } @@ -146,11 +163,15 @@ func (s *Service) Refresh(c echo.Context, refreshToken string) error { return fmt.Errorf("error getting user: %v", err) } - // Generate JWT and set cookie - err = GenerateTokensAndSetCookies(&user.User{ID: u.ID, Username: u.Username, Role: u.Role}, c) + // generate access token + accessToken, exp, err := generateJWTToken(&user.User{ID: u.ID, Username: u.Username, Role: u.Role}, time.Now().Add(1*time.Hour), []byte(GetJWTSecret())) if err != nil { - return fmt.Errorf("error generating tokens: %v", err) + return fmt.Errorf("error generating access token: %v", err) } + + // set access token cookie + setTokenCookie(c, accessTokenCookieName, accessToken, exp) + return nil } diff --git a/internal/auth/auth_test.go b/internal/auth/auth_test.go new file mode 100644 index 00000000..2bbf68f0 --- /dev/null +++ b/internal/auth/auth_test.go @@ -0,0 +1,73 @@ +package auth_test + +import ( + "context" + "encoding/base64" + "encoding/json" + "net/http/httptest" + "strings" + "testing" + + "github.com/golang-jwt/jwt/v4" + "github.com/labstack/echo/v4" + "github.com/stretchr/testify/assert" + "github.com/zibbp/ganymede/internal/user" + "github.com/zibbp/ganymede/tests" +) + +func TestRegister(t *testing.T) { + ctx := context.Background() + app, err := tests.Setup(t) + assert.NoError(t, err) + + // test Register + usr, err := app.AuthService.Register(ctx, user.User{Username: "test_user", Password: "password"}) + assert.NoError(t, err) + assert.Equal(t, "test_user", usr.Username) +} + +func TestLogin(t *testing.T) { + ctx := context.Background() + app, err := tests.Setup(t) + assert.NoError(t, err) + + e := echo.New() + req := httptest.NewRequest("POST", "/api/v1/auth/login", nil) + rec := httptest.NewRecorder() + + echoCtx := e.NewContext(req, rec) + + _, err = app.AuthService.Register(ctx, user.User{Username: "test_user", Password: "password"}) + assert.NoError(t, err) + + // test Login + usr, err := app.AuthService.Login(echoCtx, user.User{Username: "admin", Password: "ganymede"}) + assert.NoError(t, err) + assert.Equal(t, "admin", usr.Username) + + setCookies := rec.Header().Values("Set-Cookie") + + // test cookies are valid jwt tokens + for _, cookie := range setCookies { + // example cookie: + // refresh-token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoiNmRmNWFiNDctMzNiOC00ZWFjLWE2M2QtYjlhZjhlMmRiNWRjIiwidXNlcm5hbWUiOiJhZG1pbiIsInJvbGUiOiJhZG1pbiIsImV4cCI6MTcyNTg1NDM0NX0.wltMCYWMwbV6BqU2PM7PLIWIy9uqJmGN5N50oNLpWSY; Path=/; Expires=Mon, 09 Sep 2024 03:59:05 GMT; SameSite=Lax + split := strings.Split(cookie, ";") + token := strings.Split(split[0], "=")[1] + + assert.NotEmpty(t, token) + + parts := strings.Split(token, ".") + + assert.Equal(t, 3, len(parts)) + + payload, err := base64.RawURLEncoding.DecodeString(parts[1]) + assert.NoError(t, err) + + var claims jwt.MapClaims + err = json.Unmarshal(payload, &claims) + assert.NoError(t, err) + assert.Equal(t, usr.ID.String(), claims["user_id"]) + assert.Equal(t, usr.Username, claims["username"]) + assert.Equal(t, string(usr.Role), claims["role"]) + } +} diff --git a/internal/auth/jwt.go b/internal/auth/jwt.go index c8baccd0..4662de12 100644 --- a/internal/auth/jwt.go +++ b/internal/auth/jwt.go @@ -1,15 +1,15 @@ package auth import ( - "github.com/golang-jwt/jwt/v4" + "net/http" + "time" + + "github.com/golang-jwt/jwt/v5" "github.com/google/uuid" "github.com/labstack/echo/v4" - "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/user" "github.com/zibbp/ganymede/internal/utils" - "net/http" - "os" - "time" ) const ( @@ -25,56 +25,18 @@ type Claims struct { } func GetJWTSecret() string { - jwtSecret := os.Getenv("JWT_SECRET") - // Exit if JWT_SECRET is not set - if jwtSecret == "" { - log.Fatal().Msg("JWT_SECRET is not set") - } + env := config.GetEnvApplicationConfig() + jwtSecret := env.JWTSecret return jwtSecret } func GetJWTRefreshSecret() string { - jwtRefreshSecret := os.Getenv("JWT_REFRESH_SECRET") - // Exit if JWT_REFRESH_SECRET is not set - if jwtRefreshSecret == "" { - log.Fatal().Msg("JWT_REFRESH_SECRET is not set") - } + env := config.GetEnvApplicationConfig() + jwtRefreshSecret := env.JWTRefreshSecret return jwtRefreshSecret } -// GenerateTokensAndSetCookies generates jwt token and saves it to the http-only cookie. -func GenerateTokensAndSetCookies(user *user.User, c echo.Context) error { - accessToken, exp, err := generateAccessToken(user) - if err != nil { - return err - } - - setTokenCookie(accessTokenCookieName, accessToken, exp, c) - - // Refresh - refreshToken, exp, err := generateRefreshToken(user) - if err != nil { - return err - } - setTokenCookie(refreshTokenCookieName, refreshToken, exp, c) - - return nil -} - -func generateAccessToken(user *user.User) (string, time.Time, error) { - // Declare the expiration time of the token (1h). - expirationTime := time.Now().Add(1 * time.Hour) - - return generateToken(user, expirationTime, []byte(GetJWTSecret())) -} - -func generateRefreshToken(user *user.User) (string, time.Time, error) { - // Declare the expiration time of the token - 24 hours. - expirationTime := time.Now().Add(30 * 24 * time.Hour) - - return generateToken(user, expirationTime, []byte(GetJWTRefreshSecret())) -} - -func generateToken(user *user.User, expirationTime time.Time, secret []byte) (string, time.Time, error) { +// generateJWTToken generates a new JWT token for the user. +func generateJWTToken(user *user.User, expirationTime time.Time, secret []byte) (string, time.Time, error) { // Create the JWT claims, which includes the username and expiry time. claims := &Claims{ UserID: user.ID, @@ -98,16 +60,17 @@ func generateToken(user *user.User, expirationTime time.Time, secret []byte) (st return tokenString, expirationTime, nil } -// Here we are creating a new cookie, which will store the valid JWT token. -func setTokenCookie(name, token string, expiration time.Time, c echo.Context) { +// setTokenCookie sets the cookie with the token. +func setTokenCookie(c echo.Context, name string, token string, expiration time.Time) { // Get optional cookie domain name - cookieDomain := os.Getenv("COOKIE_DOMAIN") + env := config.GetEnvConfig() + cookieDomain := env.CookieDomain cookie := new(http.Cookie) cookie.Name = name cookie.Value = token cookie.Expires = expiration cookie.Path = "/" - // Http-only helps mitigate the risk of client side script accessing the protected cookie. + // Frontend uses the contents of the cookie - not the best but it works. cookie.HttpOnly = false cookie.SameSite = http.SameSiteLaxMode if cookieDomain != "" { @@ -117,6 +80,7 @@ func setTokenCookie(name, token string, expiration time.Time, c echo.Context) { c.SetCookie(cookie) } +// checkAccessToken checks if the JWT access token is valid. func checkAccessToken(accessToken string) (*Claims, error) { // Parse the token. token, err := jwt.ParseWithClaims(accessToken, &Claims{}, func(token *jwt.Token) (interface{}, error) { diff --git a/internal/auth/oauth.go b/internal/auth/oauth.go index a27887b9..39ac0a82 100644 --- a/internal/auth/oauth.go +++ b/internal/auth/oauth.go @@ -8,18 +8,18 @@ import ( "fmt" "io" "net/http" - "os" "strings" "time" "github.com/MicahParks/keyfunc" "github.com/coreos/go-oidc/v3/oidc" + "github.com/go-jose/go-jose/v4" "github.com/golang-jwt/jwt/v4" "github.com/labstack/echo/v4" "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/kv" "golang.org/x/oauth2" - "gopkg.in/square/go-jose.v2" ) type OAuthClaims struct { @@ -189,7 +189,7 @@ func clearCookie(c echo.Context, name string) { } func CheckOAuthAccessToken(c echo.Context, accessToken string) (*UserInfo, error) { - clientID := os.Getenv("OAUTH_CLIENT_ID") + env := config.GetEnvConfig() // Get JWKS from KV store jwksString := kv.DB().Get("jwks") if jwksString == "" { @@ -215,7 +215,7 @@ func CheckOAuthAccessToken(c echo.Context, accessToken string) (*UserInfo, error // Check aud aud := token.Claims.(jwt.MapClaims)["aud"] - if aud != clientID { + if aud != env.OAuthClientID { return nil, fmt.Errorf("invalid aud claim") } @@ -241,7 +241,8 @@ func randString(nByte int) (string, error) { } func setCallbackCookie(c echo.Context, name, value string) { - cookieDomain := os.Getenv("COOKIE_DOMAIN") + env := config.GetEnvConfig() + cookieDomain := env.CookieDomain cookie := new(http.Cookie) cookie.Name = name cookie.Value = value @@ -258,7 +259,8 @@ func setCallbackCookie(c echo.Context, name, value string) { } func setOauthCookie(c echo.Context, name, value string, time time.Time) { - cookieDomain := os.Getenv("COOKIE_DOMAIN") + env := config.GetEnvConfig() + cookieDomain := env.CookieDomain cookie := new(http.Cookie) cookie.Name = name cookie.Value = value @@ -274,11 +276,12 @@ func setOauthCookie(c echo.Context, name, value string, time time.Time) { c.SetCookie(cookie) } -func FetchJWKS() error { - providerURL := os.Getenv("OAUTH_PROVIDER_URL") +func FetchJWKS(ctx context.Context) error { + env := config.GetEnvConfig() + providerURL := env.OAuthProviderURL provider, err := oidc.NewProvider(context.Background(), providerURL) if err != nil { - log.Fatal().Err(err).Msg("error creating oauth provider") + return err } // Get JWKS uri @@ -290,34 +293,34 @@ func FetchJWKS() error { } client := &http.Client{} - req, err := http.NewRequest("GET", claims.JWKSURI, nil) + req, err := http.NewRequestWithContext(ctx, "GET", claims.JWKSURI, nil) if err != nil { - log.Error().Err(err).Msg("failed to create JWKS request") + return fmt.Errorf("failed to create request: %w", err) } jwksResp, err := client.Do(req) if err != nil { - log.Error().Err(err).Msg("failed to fetch JWKS") + return fmt.Errorf("failed to fetch JWKS: %w", err) } defer jwksResp.Body.Close() body, err := io.ReadAll(jwksResp.Body) if err != nil { - log.Error().Err(err).Msg("failed to read JWKS response") + return fmt.Errorf("failed to read body: %w", err) } var jwks jose.JSONWebKeySet err = json.Unmarshal(body, &jwks) if err != nil { - log.Error().Err(err).Msg("failed to decode JWKS response") + return fmt.Errorf("failed to unmarshal JWKS: %w", err) } // jwks to string jwksString, err := json.Marshal(jwks) if err != nil { - log.Error().Err(err).Msg("failed to encode JWKS") + return fmt.Errorf("failed to marshal JWKS: %w", err) } kv.DB().Set("jwks", string(jwksString)) - log.Debug().Msg("JWKS fetched and set") + log.Debug().Msg("fetched jwks") return nil } diff --git a/internal/blocked/blocked.go b/internal/blocked/blocked.go new file mode 100644 index 00000000..21bcfa9b --- /dev/null +++ b/internal/blocked/blocked.go @@ -0,0 +1,38 @@ +package blocked + +import ( + "context" + + "github.com/zibbp/ganymede/ent" + "github.com/zibbp/ganymede/ent/blockedvideos" + "github.com/zibbp/ganymede/internal/database" +) + +type Service struct { + Store *database.Database +} + +func NewService(store *database.Database) *Service { + return &Service{Store: store} +} + +func (s *Service) IsVideoBlocked(ctx context.Context, id string) (bool, error) { + return s.Store.Client.BlockedVideos.Query().Where(blockedvideos.ID(id)).Exist(ctx) +} + +func (s *Service) CreateBlockedVideo(ctx context.Context, id string) error { + _, err := s.Store.Client.BlockedVideos.Create().SetID(id).Save(ctx) + return err +} + +func (s *Service) DeleteBlockedVideo(ctx context.Context, id string) error { + return s.Store.Client.BlockedVideos.DeleteOneID(id).Exec(ctx) +} + +func (s *Service) GetBlockedVideos(ctx context.Context) ([]*ent.BlockedVideos, error) { + videos, err := s.Store.Client.BlockedVideos.Query().Order(ent.Asc(blockedvideos.FieldID)).All(ctx) + if err != nil { + return nil, err + } + return videos, nil +} diff --git a/internal/category/category.go b/internal/category/category.go new file mode 100644 index 00000000..f18c63d8 --- /dev/null +++ b/internal/category/category.go @@ -0,0 +1,26 @@ +package category + +import ( + "context" + "fmt" + + "github.com/zibbp/ganymede/ent" + "github.com/zibbp/ganymede/internal/database" +) + +type Service struct { + Store *database.Database +} + +func NewService(store *database.Database) *Service { + return &Service{Store: store} +} + +func (s *Service) GetCategories(ctx context.Context) ([]*ent.TwitchCategory, error) { + categories, err := database.DB().Client.TwitchCategory.Query().All(context.Background()) + if err != nil { + return nil, fmt.Errorf("failed to get categories: %v", err) + } + + return categories, nil +} diff --git a/internal/channel/channel.go b/internal/channel/channel.go index 9aae82c2..3c96365c 100644 --- a/internal/channel/channel.go +++ b/internal/channel/channel.go @@ -6,21 +6,22 @@ import ( "time" "github.com/google/uuid" - "github.com/labstack/echo/v4" "github.com/rs/zerolog/log" "github.com/zibbp/ganymede/ent" "github.com/zibbp/ganymede/ent/channel" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/twitch" + "github.com/zibbp/ganymede/internal/platform" "github.com/zibbp/ganymede/internal/utils" ) type Service struct { - Store *database.Database + Store *database.Database + PlatformTwitch platform.Platform } -func NewService(store *database.Database) *Service { - return &Service{Store: store} +func NewService(store *database.Database, platformTwitch platform.Platform) *Service { + return &Service{Store: store, PlatformTwitch: platformTwitch} } type Channel struct { @@ -118,6 +119,7 @@ func (s *Service) UpdateChannel(cId uuid.UUID, channelDto Channel) (*ent.Channel func (s *Service) CheckChannelExists(cName string) bool { _, err := s.Store.Client.Channel.Query().Where(channel.Name(cName)).Only(context.Background()) if err != nil { + fmt.Println(err) // if channel not found if _, ok := err.(*ent.NotFoundError); ok { return false @@ -129,21 +131,7 @@ func (s *Service) CheckChannelExists(cName string) bool { return true } -func (s *Service) CheckChannelExistsNoContext(cName string) bool { - _, err := s.Store.Client.Channel.Query().Where(channel.Name(cName)).Only(context.Background()) - if err != nil { - // if channel not found - if _, ok := err.(*ent.NotFoundError); ok { - return false - } - log.Error().Err(err).Msg("error checking channel exists") - return false - } - - return true -} - -func PopulateExternalChannelID() { +func (s *Service) PopulateExternalChannelID(ctx context.Context) { channels, err := database.DB().Client.Channel.Query().All(context.Background()) if err != nil { log.Debug().Err(err).Msg("error getting channels") @@ -153,12 +141,12 @@ func PopulateExternalChannelID() { if c.ExtID != "" { continue } - twitchC, err := twitch.API.GetUserByLogin(c.Name) + twitcChannel, err := s.PlatformTwitch.GetChannel(ctx, c.Name) if err != nil { log.Error().Msg("error getting twitch channel") continue } - _, err = database.DB().Client.Channel.UpdateOneID(c.ID).SetExtID(twitchC.ID).Save(context.Background()) + _, err = database.DB().Client.Channel.UpdateOneID(c.ID).SetExtID(twitcChannel.ID).Save(context.Background()) if err != nil { log.Error().Err(err).Msg("error updating channel") continue @@ -167,20 +155,22 @@ func PopulateExternalChannelID() { } } -func (s *Service) UpdateChannelImage(c echo.Context, channelID uuid.UUID) error { +func (s *Service) UpdateChannelImage(ctx context.Context, channelID uuid.UUID) error { channel, err := s.GetChannel(channelID) if err != nil { return fmt.Errorf("error getting channel: %v", err) } // Fetch channel from Twitch API - tChannel, err := twitch.API.GetUserByLogin(channel.Name) + twitchChannel, err := s.PlatformTwitch.GetChannel(ctx, channel.Name) if err != nil { return fmt.Errorf("error fetching twitch channel: %v", err) } + env := config.GetEnvConfig() + // Download channel profile image - err = utils.DownloadFile(tChannel.ProfileImageURL, tChannel.Login, "profile.png") + err = utils.DownloadFile(twitchChannel.ProfileImageURL, fmt.Sprintf("%s/%s/%s", env.VideosDir, twitchChannel.Login, "profile.png")) if err != nil { return fmt.Errorf("error downloading channel profile image: %v", err) } diff --git a/internal/channel/channel_test.go b/internal/channel/channel_test.go new file mode 100644 index 00000000..4a2d1ccb --- /dev/null +++ b/internal/channel/channel_test.go @@ -0,0 +1,96 @@ +package channel_test + +import ( + "context" + "errors" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/zibbp/ganymede/internal/channel" + "github.com/zibbp/ganymede/tests" +) + +func TestChannelCRUD(t *testing.T) { + app, err := tests.Setup(t) + assert.NoError(t, err) + + // test CreateChannel + chann, err := app.ChannelService.CreateChannel(channel.Channel{ + ExtID: "123456789", + Name: "test_channel", + DisplayName: "Test Channel", + ImagePath: "/vods/test_channel/test_channel.jpg", + }) + + assert.NoError(t, err) + assert.Equal(t, "123456789", chann.ExtID) + assert.Equal(t, "test_channel", chann.Name) + assert.Equal(t, "Test Channel", chann.DisplayName) + assert.Equal(t, "/vods/test_channel/test_channel.jpg", chann.ImagePath) + + // test GetChannel + getChannel, err := app.ChannelService.GetChannel(chann.ID) + assert.NoError(t, err) + assert.Equal(t, chann.ID, getChannel.ID) + + // test GetChannelByName + getChannelByName, err := app.ChannelService.GetChannelByName(chann.Name) + assert.NoError(t, err) + assert.Equal(t, chann.ID, getChannelByName.ID) + + // test GetChannels + channels, err := app.ChannelService.GetChannels() + assert.NoError(t, err) + assert.Equal(t, 1, len(channels)) + + // test UpdateChannel + updatedChannel, err := app.ChannelService.UpdateChannel(chann.ID, channel.Channel{ + Name: "updated_channel", + DisplayName: "Updated Channel", + ImagePath: "/vods/updated_channel/updated_channel.jpg", + Retention: true, + RetentionDays: 30, + }) + assert.NoError(t, err) + assert.Equal(t, "updated_channel", updatedChannel.Name) + assert.Equal(t, "Updated Channel", updatedChannel.DisplayName) + assert.Equal(t, "/vods/updated_channel/updated_channel.jpg", updatedChannel.ImagePath) + assert.Equal(t, true, updatedChannel.Retention) + assert.Equal(t, int64(30), updatedChannel.RetentionDays) + + // test CheckChannelExists + assert.True(t, app.ChannelService.CheckChannelExists(updatedChannel.Name)) + + // test DeleteChannel + err = app.ChannelService.DeleteChannel(updatedChannel.ID) + assert.NoError(t, err) + assert.False(t, app.ChannelService.CheckChannelExists(updatedChannel.Name)) +} + +func TestPlatformTwitchChannel(t *testing.T) { + ctx := context.Background() + app, err := tests.Setup(t) + assert.NoError(t, err) + + // test ArchiveChannel + chann, err := app.ArchiveService.ArchiveChannel(ctx, "sodapoppin") + assert.NoError(t, err) + assert.Equal(t, "sodapoppin", chann.Name) + + if _, err := os.Stat(chann.ImagePath); errors.Is(err, os.ErrNotExist) { + t.Errorf("image not found: %s", chann.ImagePath) + } + + // remove image + err = os.Remove(chann.ImagePath) + assert.NoError(t, err) + + // test UpdateChannelImage + assert.NoError(t, app.ChannelService.UpdateChannelImage(ctx, chann.ID)) + + // ensure image exists + if _, err := os.Stat(chann.ImagePath); errors.Is(err, os.ErrNotExist) { + t.Errorf("image not found: %s", chann.ImagePath) + } +} diff --git a/internal/chapter/chapter.go b/internal/chapter/chapter.go index 48c74500..139096cd 100644 --- a/internal/chapter/chapter.go +++ b/internal/chapter/chapter.go @@ -13,10 +13,13 @@ import ( ) type Service struct { + Store *database.Database } -func NewService() *Service { - return &Service{} +func NewService(store *database.Database) *Service { + return &Service{ + Store: store, + } } type Chapter struct { @@ -28,7 +31,7 @@ type Chapter struct { } func (s *Service) CreateChapter(c Chapter, videoId uuid.UUID) (*ent.Chapter, error) { - dbVideo, err := database.DB().Client.Vod.Query().Where(vod.ID(videoId)).First(context.Background()) + dbVideo, err := s.Store.Client.Vod.Query().Where(vod.ID(videoId)).First(context.Background()) if err != nil { if _, ok := err.(*ent.NotFoundError); ok { return nil, fmt.Errorf("video not found") @@ -36,7 +39,7 @@ func (s *Service) CreateChapter(c Chapter, videoId uuid.UUID) (*ent.Chapter, err return nil, fmt.Errorf("error getting video: %v", err) } - dbChapter, err := database.DB().Client.Chapter.Create().SetType(c.Type).SetTitle(c.Title).SetStart(c.Start).SetEnd(c.End).SetVod(dbVideo).Save(context.Background()) + dbChapter, err := s.Store.Client.Chapter.Create().SetType(c.Type).SetTitle(c.Title).SetStart(c.Start).SetEnd(c.End).SetVod(dbVideo).Save(context.Background()) if err != nil { return nil, fmt.Errorf("error creating chapter: %v", err) } @@ -45,7 +48,7 @@ func (s *Service) CreateChapter(c Chapter, videoId uuid.UUID) (*ent.Chapter, err } func (s *Service) GetVideoChapters(videoId uuid.UUID) ([]*ent.Chapter, error) { - chapters, err := database.DB().Client.Chapter.Query().Where(entChapter.HasVodWith(vod.ID(videoId))).All(context.Background()) + chapters, err := s.Store.Client.Chapter.Query().Where(entChapter.HasVodWith(vod.ID(videoId))).All(context.Background()) if err != nil { return nil, fmt.Errorf("error getting chapters: %v", err) } diff --git a/internal/chat/badge.go b/internal/chat/badge.go deleted file mode 100644 index b622a9fa..00000000 --- a/internal/chat/badge.go +++ /dev/null @@ -1,146 +0,0 @@ -package chat - -import ( - "encoding/json" - "fmt" - "io" - "net/http" -) - -type TwitchVersion map[string]TwitchItem - -type TwitchBadeResp struct { - BadgeSets map[string]TwitchBadge `json:"badge_sets"` -} - -type TwitchBadge map[string]TwitchVersion - -type TwitchItem struct { - ImageUrl1X string `json:"image_url_1x"` - ImageUrl2X string `json:"image_url_2x"` - ImageUrl4X string `json:"image_url_4x"` - Description string `json:"description"` - Title string `json:"title"` - ClickAction string `json:"click_action"` - ClickUrl string `json:"click_url"` -} - -type GanymedeBadges struct { - Badges []GanymedeBadge `json:"badges"` -} - -type GanymedeBadge struct { - Version string `json:"version"` - Name string `json:"name"` - ImageUrl1X string `json:"image_url_1x"` - ImageUrl2X string `json:"image_url_2x"` - ImageUrl4X string `json:"image_url_4x"` - Description string `json:"description"` - Title string `json:"title"` - ClickAction string `json:"click_action"` - ClickUrl string `json:"click_url"` -} - -func GetTwitchGlobalBadges() (*GanymedeBadges, error) { - client := &http.Client{} - req, err := http.NewRequest("GET", "https://badges.twitch.tv/v1/badges/global/display", nil) - if err != nil { - return nil, fmt.Errorf("failed to create request: %w", err) - } - - resp, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to get response: %w", err) - } - - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("failed to get response: %w", err) - } - - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response body: %w", err) - } - - var twitchBadgeResp TwitchBadeResp - if err := json.Unmarshal(body, &twitchBadgeResp); err != nil { - return nil, fmt.Errorf("failed to unmarshal response body: %w", err) - } - - var badgeResp GanymedeBadges - - for k, v := range twitchBadgeResp.BadgeSets { - for _, v := range v { - for version, v := range v { - badge := GanymedeBadge{ - Version: version, - Name: k, - ImageUrl1X: v.ImageUrl1X, - ImageUrl2X: v.ImageUrl2X, - ImageUrl4X: v.ImageUrl4X, - Description: v.Description, - Title: v.Title, - ClickAction: v.ClickAction, - ClickUrl: v.ClickUrl, - } - badgeResp.Badges = append(badgeResp.Badges, badge) - } - } - } - - return &badgeResp, nil -} - -func GetTwitchChannelBadges(channelId int64) (*GanymedeBadges, error) { - client := &http.Client{} - req, err := http.NewRequest("GET", fmt.Sprintf("https://badges.twitch.tv/v1/badges/channels/%d/display", channelId), nil) - if err != nil { - return nil, fmt.Errorf("failed to create request: %w", err) - } - - resp, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to get response: %w", err) - } - - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("failed to get response: %w", err) - } - - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response body: %w", err) - } - - var twitchBadgeResp TwitchBadeResp - if err := json.Unmarshal(body, &twitchBadgeResp); err != nil { - return nil, fmt.Errorf("failed to unmarshal response body: %w", err) - } - - var badgeResp GanymedeBadges - - for k, v := range twitchBadgeResp.BadgeSets { - for _, v := range v { - for version, v := range v { - badge := GanymedeBadge{ - Version: version, - Name: k, - ImageUrl1X: v.ImageUrl1X, - ImageUrl2X: v.ImageUrl2X, - ImageUrl4X: v.ImageUrl4X, - Description: v.Description, - Title: v.Title, - ClickAction: v.ClickAction, - ClickUrl: v.ClickUrl, - } - badgeResp.Badges = append(badgeResp.Badges, badge) - } - } - } - - return &badgeResp, nil -} diff --git a/internal/chat/bttv.go b/internal/chat/bttv.go index d5c09182..6a71f816 100644 --- a/internal/chat/bttv.go +++ b/internal/chat/bttv.go @@ -1,10 +1,13 @@ package chat import ( + "context" "encoding/json" "fmt" "io" "net/http" + + "github.com/zibbp/ganymede/internal/platform" ) type BTTVEmote struct { @@ -12,6 +15,7 @@ type BTTVEmote struct { Code string `json:"code"` ImageType ImageType `json:"imageType"` UserID UserID `json:"userId"` + Animated bool `json:"animated"` } type ImageType string @@ -26,9 +30,9 @@ type BTTVChannelEmotes struct { SharedEmotes []BTTVEmote `json:"sharedEmotes"` } -func GetBTTVGlobalEmotes() ([]*GanymedeEmote, error) { +func GetBTTVGlobalEmotes(ctx context.Context) ([]platform.Emote, error) { client := &http.Client{} - req, err := http.NewRequest("GET", "https://api.betterttv.net/3/cached/emotes/global", nil) + req, err := http.NewRequestWithContext(ctx, "GET", "https://api.betterttv.net/3/cached/emotes/global", nil) if err != nil { return nil, fmt.Errorf("failed to create request: %v", err) } @@ -51,25 +55,30 @@ func GetBTTVGlobalEmotes() ([]*GanymedeEmote, error) { return nil, fmt.Errorf("failed to unmarshal response: %v", err) } - var emotes []*GanymedeEmote + var emotes []platform.Emote for _, emote := range bttvGlobalEmotes { - emotes = append(emotes, &GanymedeEmote{ + e := platform.Emote{ ID: emote.ID, Name: emote.Code, URL: fmt.Sprintf("https://cdn.betterttv.net/emote/%s/1x", emote.ID), - Type: "third_party", + Format: platform.EmoteFormatStatic, + Type: platform.EmoteTypeGlobal, Source: "bttv", - }) + } + if emote.Animated { + e.Format = platform.EmoteFormatAnimated + } + + emotes = append(emotes, e) } return emotes, nil } -func GetBTTVChannelEmotes(channelId int64) ([]*GanymedeEmote, error) { - stringChannelId := fmt.Sprintf("%d", channelId) +func GetBTTVChannelEmotes(ctx context.Context, channelId string) ([]platform.Emote, error) { client := &http.Client{} - req, err := http.NewRequest("GET", fmt.Sprintf("https://api.betterttv.net/3/cached/users/twitch/%s", stringChannelId), nil) + req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("https://api.betterttv.net/3/cached/users/twitch/%s", channelId), nil) if err != nil { return nil, fmt.Errorf("failed to create request: %v", err) } @@ -92,24 +101,36 @@ func GetBTTVChannelEmotes(channelId int64) ([]*GanymedeEmote, error) { return nil, fmt.Errorf("failed to unmarshal response: %v", err) } - var emotes []*GanymedeEmote + var emotes []platform.Emote for _, emote := range bttvChannelEmotes.ChannelEmotes { - emotes = append(emotes, &GanymedeEmote{ + e := platform.Emote{ ID: emote.ID, Name: emote.Code, URL: fmt.Sprintf("https://cdn.betterttv.net/emote/%s/1x", emote.ID), - Type: "third_party", + Format: platform.EmoteFormatStatic, + Type: platform.EmoteTypeGlobal, Source: "bttv", - }) + } + if emote.Animated { + e.Format = platform.EmoteFormatAnimated + } + + emotes = append(emotes, e) } for _, emote := range bttvChannelEmotes.SharedEmotes { - emotes = append(emotes, &GanymedeEmote{ + e := platform.Emote{ ID: emote.ID, Name: emote.Code, URL: fmt.Sprintf("https://cdn.betterttv.net/emote/%s/1x", emote.ID), - Type: "third_party", + Format: platform.EmoteFormatStatic, + Type: platform.EmoteTypeGlobal, Source: "bttv", - }) + } + if emote.Animated { + e.Format = platform.EmoteFormatAnimated + } + + emotes = append(emotes, e) } return emotes, nil diff --git a/internal/chat/emote.go b/internal/chat/emote.go deleted file mode 100644 index 5c2cd9a8..00000000 --- a/internal/chat/emote.go +++ /dev/null @@ -1 +0,0 @@ -package chat diff --git a/internal/chat/ffz.go b/internal/chat/ffz.go index 78a24656..451cfa9d 100644 --- a/internal/chat/ffz.go +++ b/internal/chat/ffz.go @@ -1,11 +1,14 @@ package chat import ( + "context" "encoding/json" "fmt" "io" "net/http" "strconv" + + "github.com/zibbp/ganymede/internal/platform" ) type FFZEmote struct { @@ -14,6 +17,7 @@ type FFZEmote struct { Code string `json:"code"` Images FFZImages `json:"images"` ImageType ImageType `json:"imageType"` + Animated bool `json:"animated"` } type FFZImages struct { @@ -32,9 +36,9 @@ type FFZImageType string type DisplayName string -func GetFFZGlobalEmotes() ([]*GanymedeEmote, error) { +func GetFFZGlobalEmotes(ctx context.Context) ([]platform.Emote, error) { client := &http.Client{} - req, err := http.NewRequest("GET", "https://api.betterttv.net/3/cached/frankerfacez/emotes/global", nil) + req, err := http.NewRequestWithContext(ctx, "GET", "https://api.betterttv.net/3/cached/frankerfacez/emotes/global", nil) if err != nil { return nil, fmt.Errorf("failed to create request: %v", err) } @@ -57,24 +61,29 @@ func GetFFZGlobalEmotes() ([]*GanymedeEmote, error) { return nil, fmt.Errorf("failed to unmarshal response: %v", err) } - var emotes []*GanymedeEmote + var emotes []platform.Emote for _, emote := range ffzGlobalEmotes { - emotes = append(emotes, &GanymedeEmote{ + e := platform.Emote{ ID: strconv.FormatInt(emote.ID, 10), Name: emote.Code, URL: emote.Images.The1X, - Type: "third_party", + Format: platform.EmoteFormatStatic, + Type: platform.EmoteTypeGlobal, Source: "ffz", - }) + } + if emote.Animated { + e.Format = platform.EmoteFormatAnimated + } + + emotes = append(emotes, e) } return emotes, nil } -func GetFFZChannelEmotes(channelId int64) ([]*GanymedeEmote, error) { - stringChannelId := fmt.Sprintf("%d", channelId) +func GetFFZChannelEmotes(ctx context.Context, channelId string) ([]platform.Emote, error) { client := &http.Client{} - req, err := http.NewRequest("GET", fmt.Sprintf("https://api.betterttv.net/3/cached/frankerfacez/users/twitch/%s", stringChannelId), nil) + req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("https://api.betterttv.net/3/cached/frankerfacez/users/twitch/%s", channelId), nil) if err != nil { return nil, fmt.Errorf("failed to create request: %v", err) } @@ -97,15 +106,21 @@ func GetFFZChannelEmotes(channelId int64) ([]*GanymedeEmote, error) { return nil, fmt.Errorf("failed to unmarshal response: %v", err) } - var emotes []*GanymedeEmote + var emotes []platform.Emote for _, emote := range ffzChannelEmotes { - emotes = append(emotes, &GanymedeEmote{ + e := platform.Emote{ ID: strconv.FormatInt(emote.ID, 10), Name: emote.Code, URL: emote.Images.The1X, - Type: "third_party", + Format: platform.EmoteFormatStatic, + Type: platform.EmoteTypeGlobal, Source: "ffz", - }) + } + if emote.Animated { + e.Format = platform.EmoteFormatAnimated + } + + emotes = append(emotes, e) } return emotes, nil diff --git a/internal/chat/seventv.go b/internal/chat/seventv.go index f8fa56f9..166bbbce 100644 --- a/internal/chat/seventv.go +++ b/internal/chat/seventv.go @@ -1,10 +1,13 @@ package chat import ( + "context" "encoding/json" "fmt" "io" "net/http" + + "github.com/zibbp/ganymede/internal/platform" ) type SevenTVGlobalEmotes struct { @@ -141,9 +144,9 @@ type EmoteSet struct { Owner *User `json:"owner"` } -func Get7TVGlobalEmotes() ([]*GanymedeEmote, error) { +func Get7TVGlobalEmotes(ctx context.Context) ([]platform.Emote, error) { client := &http.Client{} - req, err := http.NewRequest("GET", "https://7tv.io/v3/emote-sets/global", nil) + req, err := http.NewRequestWithContext(ctx, "GET", "https://7tv.io/v3/emote-sets/global", nil) if err != nil { return nil, fmt.Errorf("failed to create request: %v", err) } @@ -160,33 +163,38 @@ func Get7TVGlobalEmotes() ([]*GanymedeEmote, error) { return nil, fmt.Errorf("failed to read body: %v", err) } - var emotes SevenTVGlobalEmotes - err = json.Unmarshal(body, &emotes) + var globalEmotes SevenTVGlobalEmotes + err = json.Unmarshal(body, &globalEmotes) if err != nil { return nil, fmt.Errorf("failed to unmarshal emotes: %v", err) } - var ganymedeEmotes []*GanymedeEmote - for _, emote := range emotes.Emotes { - ganymedeEmotes = append(ganymedeEmotes, &GanymedeEmote{ + var emotes []platform.Emote + for _, emote := range globalEmotes.Emotes { + e := platform.Emote{ ID: emote.ID, Name: emote.Name, - URL: fmt.Sprintf("https:%s/1x.webp", emote.Data.Host.URL), - Type: "third_party", + URL: fmt.Sprintf("https:%s/1x.avif", emote.Data.Host.URL), + Format: platform.EmoteFormatStatic, + Type: platform.EmoteTypeGlobal, Source: "7tv", Width: emote.Data.Host.Files[0].Width, Height: emote.Data.Host.Files[0].Height, - }) + } + if emote.Data.Animated { + e.Format = platform.EmoteFormatAnimated + } + + emotes = append(emotes, e) } - return ganymedeEmotes, nil + return emotes, nil } -func Get7TVChannelEmotes(channelId int64) ([]*GanymedeEmote, error) { - stringChannelId := fmt.Sprintf("%d", channelId) - +func Get7TVChannelEmotes(ctx context.Context, channelId string) ([]platform.Emote, error) { + fmt.Println("foooo") client := &http.Client{} - req, err := http.NewRequest("GET", fmt.Sprintf("https://7tv.io/v3/users/twitch/%s", stringChannelId), nil) + req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("https://7tv.io/v3/users/twitch/%s", channelId), nil) if err != nil { return nil, fmt.Errorf("failed to create request: %v", err) } @@ -203,30 +211,36 @@ func Get7TVChannelEmotes(channelId int64) ([]*GanymedeEmote, error) { return nil, fmt.Errorf("failed to read body: %v", err) } - var emotes SevenTVChannelEmotes - err = json.Unmarshal(body, &emotes) + var channelEmotes SevenTVChannelEmotes + err = json.Unmarshal(body, &channelEmotes) if err != nil { return nil, fmt.Errorf("failed to unmarshal emotes: %v", err) } - var ganymedeEmotes []*GanymedeEmote - for _, emote := range emotes.EmoteSet.Emotes { + var emotes []platform.Emote + for _, emote := range channelEmotes.EmoteSet.Emotes { var width int64 var height int64 if len(emote.Data.Host.Files) > 0 { width = emote.Data.Host.Files[0].Width height = emote.Data.Host.Files[0].Height } - ganymedeEmotes = append(ganymedeEmotes, &GanymedeEmote{ + e := platform.Emote{ ID: emote.ID, Name: emote.Name, - URL: fmt.Sprintf("https:%s/1x.webp", emote.Data.Host.URL), - Type: "third_party", + URL: fmt.Sprintf("https:%s/1x.avif", emote.Data.Host.URL), + Format: platform.EmoteFormatStatic, + Type: platform.EmoteTypeGlobal, Source: "7tv", Width: width, Height: height, - }) + } + if emote.Data.Animated { + e.Format = platform.EmoteFormatAnimated + } + + emotes = append(emotes, e) } - return ganymedeEmotes, nil + return emotes, nil } diff --git a/internal/chat/twitch.go b/internal/chat/twitch.go deleted file mode 100644 index 6d239842..00000000 --- a/internal/chat/twitch.go +++ /dev/null @@ -1,136 +0,0 @@ -package chat - -import ( - "encoding/json" - "fmt" - "io" - "net/http" - "os" -) - -type TwitchGlobalEmotes struct { - Data []TwitchGlobalEmote `json:"data"` - Template string `json:"template"` -} - -type TwitchGlobalEmote struct { - ID string `json:"id"` - Name string `json:"name"` - Images Images `json:"images"` - Format []Format `json:"format"` - Scale []string `json:"scale"` - ThemeMode []ThemeMode `json:"theme_mode"` -} - -type Images struct { - URL1X string `json:"url_1x"` - URL2X string `json:"url_2x"` - URL4X string `json:"url_4x"` -} - -type Format string - -const ( - Static Format = "static" -) - -type ThemeMode string - -const ( - Dark ThemeMode = "dark" - Light ThemeMode = "light" -) - -func GetTwitchGlobalEmotes() ([]*GanymedeEmote, error) { - accessToken := os.Getenv("TWITCH_ACCESS_TOKEN") - clientId := os.Getenv("TWITCH_CLIENT_ID") - client := &http.Client{} - req, err := http.NewRequest("GET", "https://api.twitch.tv/helix/chat/emotes/global", nil) - if err != nil { - return nil, err - } - req.Header.Add("Client-ID", clientId) - req.Header.Add("Authorization", "Bearer "+accessToken) - - resp, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to get global emotes: %v", err) - } - - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("failed to get global emotes: %v", resp) - } - - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response body: %v", err) - } - - var twitchGlobalEmotes TwitchGlobalEmotes - err = json.Unmarshal(body, &twitchGlobalEmotes) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal response: %v", err) - } - - var emotes []*GanymedeEmote - for _, emote := range twitchGlobalEmotes.Data { - // convert string to *string - emotes = append(emotes, &GanymedeEmote{ - ID: emote.ID, - Name: emote.Name, - URL: emote.Images.URL1X, - Type: "twitch", - }) - } - - return emotes, nil -} - -func GetTwitchChannelEmotes(channelId int64) ([]*GanymedeEmote, error) { - accessToken := os.Getenv("TWITCH_ACCESS_TOKEN") - clientId := os.Getenv("TWITCH_CLIENT_ID") - stringChannelId := fmt.Sprintf("%d", channelId) - client := &http.Client{} - req, err := http.NewRequest("GET", "https://api.twitch.tv/helix/chat/emotes?broadcaster_id="+stringChannelId, nil) - if err != nil { - return nil, err - } - req.Header.Add("Client-ID", clientId) - req.Header.Add("Authorization", "Bearer "+accessToken) - - resp, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to get channel emotes: %v", err) - } - - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("failed to get channel emotes: %v", resp) - } - - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response body: %v", err) - } - - var twitchChannelEmotes TwitchGlobalEmotes - err = json.Unmarshal(body, &twitchChannelEmotes) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal response: %v", err) - } - - var emotes []*GanymedeEmote - for _, emote := range twitchChannelEmotes.Data { - emotes = append(emotes, &GanymedeEmote{ - ID: emote.ID, - Name: emote.Name, - URL: emote.Images.URL1X, - Type: "twitch", - }) - } - - return emotes, nil -} diff --git a/internal/config/config.go b/internal/config/config.go index b291fe4f..de6fd2fd 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -1,34 +1,14 @@ package config import ( - "bytes" "encoding/json" - "fmt" "os" - "strings" - "time" - - "github.com/labstack/echo/v4" - "github.com/rs/zerolog/log" - "github.com/spf13/viper" - "github.com/zibbp/ganymede/internal/database" + "sync" ) -type Service struct { - Store *database.Database -} - -func NewService(store *database.Database) *Service { - return &Service{ - Store: store, - } -} - -type Conf struct { - Debug bool `json:"debug"` +type Config struct { LiveCheckInterval int `json:"live_check_interval_seconds"` VideoCheckInterval int `json:"video_check_interval_minutes"` - OAuthEnabled bool `json:"oauth_enabled"` RegistrationEnabled bool `json:"registration_enabled"` Parameters struct { TwitchToken string `json:"twitch_token"` @@ -39,7 +19,7 @@ type Conf struct { Archive struct { SaveAsHls bool `json:"save_as_hls"` } `json:"archive"` - Notifications Notification `json:"notifications"` + Notification Notification `json:"notifications"` StorageTemplates StorageTemplate `json:"storage_templates"` Livestream struct { Proxies []ProxyListItem `json:"proxies"` @@ -74,377 +54,119 @@ type ProxyListItem struct { Header string `json:"header"` } -func NewConfig(refresh bool) { - configLocation := "/data" - configName := "config" - configType := "json" - configPath := fmt.Sprintf("%s/%s.%s", configLocation, configName, configType) - - viper.AddConfigPath(configLocation) - viper.SetConfigName(configName) - viper.SetConfigType(configType) - - viper.SetDefault("debug", false) - viper.SetDefault("live_check_interval_seconds", 300) - viper.SetDefault("video_check_interval_minutes", 180) - viper.SetDefault("oauth_enabled", false) - viper.SetDefault("registration_enabled", true) - viper.SetDefault("parameters.video_convert", "-c:v copy -c:a copy") - viper.SetDefault("parameters.chat_render", "-h 1440 -w 340 --framerate 30 --font Inter --font-size 13") - viper.SetDefault("parameters.streamlink_live", "--twitch-low-latency,--twitch-disable-hosting") - viper.SetDefault("archive.save_as_hls", false) - viper.SetDefault("parameters.twitch_token", "") - // Notifications - viper.SetDefault("notifications.video_success_webhook_url", "") - viper.SetDefault("notifications.video_success_template", "✅ Video Archived: {{vod_title}} by {{channel_display_name}}.") - viper.SetDefault("notifications.video_success_enabled", true) - viper.SetDefault("notifications.live_success_webhook_url", "") - viper.SetDefault("notifications.live_success_template", "✅ Live Stream Archived: {{vod_title}} by {{channel_display_name}}.") - viper.SetDefault("notifications.live_success_enabled", true) - viper.SetDefault("notifications.error_webhook_url", "") - viper.SetDefault("notifications.error_template", "⚠️ Error: Queue ID {{queue_id}} for {{channel_display_name}} failed at task {{failed_task}}.") - viper.SetDefault("notifications.error_enabled", true) - viper.SetDefault("notifications.is_live_webhook_url", "") - viper.SetDefault("notifications.is_live_template", "🔴 {{channel_display_name}} is live!") - viper.SetDefault("notifications.is_live_enabled", true) +var ( + instance *Config + once sync.Once + mutex sync.RWMutex + initErr error +) - // Storage Templates - viper.SetDefault("storage_templates.folder_template", "{{date}}-{{id}}-{{type}}-{{uuid}}") - viper.SetDefault("storage_templates.file_template", "{{id}}") +var configFile string - // Livestream - viper.SetDefault("livestream.proxies", []ProxyListItem{ - { - URL: "https://eu.luminous.dev", - Header: "", - }, - { - URL: "https://api.ttv.lol", - Header: "x-donate-to:https://ttv.lol/donate", - }, - }) - viper.SetDefault("livestream.proxy_enabled", false) - viper.SetDefault("livestream.proxy_parameters", "%3Fplayer%3Dtwitchweb%26type%3Dany%26allow_source%3Dtrue%26allow_audio_only%3Dtrue%26allow_spectre%3Dfalse%26fast_bread%3Dtrue") - viper.SetDefault("livestream.proxy_whitelist", []string{ - "", +// Init initializes and returns the configuration +func Init() (*Config, error) { + env := GetEnvConfig() + configFile = env.ConfigDir + "/config.json" + once.Do(func() { + instance = &Config{} + initErr = instance.loadConfig() }) - - if _, err := os.Stat(configPath); os.IsNotExist(err) { - log.Info().Msgf("config file not found at %s, creating new one", configPath) - retries := 10 - for i := 0; i < retries; i++ { - err := viper.SafeWriteConfigAs(configPath) - if err == nil { - log.Info().Msgf("config file created") - break - } - log.Error().Err(err).Msgf("error creating config file (attempt %d/%d)", i+1, retries) - if i < retries-1 { - log.Info().Msgf("retrying in 1 second") - time.Sleep(1 * time.Second) - } else { - log.Panic().Err(err).Msg("error creating config file") - } - } - } else { - log.Info().Msgf("config file found at %s, loading", configPath) - retries := 10 - for i := 0; i < retries; i++ { - err := viper.ReadInConfig() - if err == nil { - log.Info().Msgf("config file loaded: %s", viper.ConfigFileUsed()) - break - } - log.Error().Err(err).Msgf("error loading config (attempt %d/%d)", i+1, retries) - if i < retries-1 { - log.Info().Msgf("retrying in 1 second") - time.Sleep(1 * time.Second) - } else { - log.Panic().Err(err).Msg("error loading config") - } - } - // Rewrite config file to apply new variables and remove old values - if refresh { - refreshConfig(configPath) - } - log.Debug().Msgf("config file loaded: %s", viper.ConfigFileUsed()) - } + return instance, initErr } -func (s *Service) GetConfig(c echo.Context) (*Conf, error) { - proxies := viper.Get("livestream.proxies") - var proxyListItems []ProxyListItem - for _, proxy := range proxies.([]interface{}) { - proxyListItem := ProxyListItem{ - URL: proxy.(map[string]interface{})["url"].(string), - Header: proxy.(map[string]interface{})["header"].(string), - } - proxyListItems = append(proxyListItems, proxyListItem) +// LoadConfig loads the configuration from the JSON file or creates a default one +func (c *Config) loadConfig() error { + if _, err := os.Stat(configFile); os.IsNotExist(err) { + c.setDefaults() + return SaveConfig() } - return &Conf{ - RegistrationEnabled: viper.GetBool("registration_enabled"), - Archive: struct { - SaveAsHls bool `json:"save_as_hls"` - }(struct { - SaveAsHls bool - }{ - SaveAsHls: viper.GetBool("archive.save_as_hls"), - }), - Parameters: struct { - TwitchToken string `json:"twitch_token"` - VideoConvert string `json:"video_convert"` - ChatRender string `json:"chat_render"` - StreamlinkLive string `json:"streamlink_live"` - }(struct { - TwitchToken string - VideoConvert string - ChatRender string - StreamlinkLive string - }{ - TwitchToken: viper.GetString("parameters.twitch_token"), - VideoConvert: viper.GetString("parameters.video_convert"), - ChatRender: viper.GetString("parameters.chat_render"), - StreamlinkLive: viper.GetString("parameters.streamlink_live"), - }), - StorageTemplates: struct { - FolderTemplate string `json:"folder_template"` - FileTemplate string `json:"file_template"` - }(struct { - FolderTemplate string - FileTemplate string - }{ - FolderTemplate: viper.GetString("storage_templates.folder_template"), - FileTemplate: viper.GetString("storage_templates.file_template"), - }), - Livestream: struct { - Proxies []ProxyListItem `json:"proxies"` - ProxyEnabled bool `json:"proxy_enabled"` - ProxyParameters string `json:"proxy_parameters"` - ProxyWhitelist []string `json:"proxy_whitelist"` - }(struct { - Proxies []ProxyListItem - ProxyEnabled bool - ProxyParameters string - ProxyWhitelist []string - }{ - Proxies: proxyListItems, - ProxyEnabled: viper.GetBool("livestream.proxy_enabled"), - ProxyParameters: viper.GetString("livestream.proxy_parameters"), - ProxyWhitelist: viper.GetStringSlice("livestream.proxy_whitelist"), - }), - }, nil -} - -func (s *Service) UpdateConfig(c echo.Context, cDto *Conf) error { - viper.Set("registration_enabled", cDto.RegistrationEnabled) - viper.Set("parameters.video_convert", cDto.Parameters.VideoConvert) - viper.Set("parameters.chat_render", cDto.Parameters.ChatRender) - viper.Set("parameters.streamlink_live", cDto.Parameters.StreamlinkLive) - viper.Set("parameters.twitch_token", cDto.Parameters.TwitchToken) - viper.Set("archive.save_as_hls", cDto.Archive.SaveAsHls) - // proxies - var proxyListItems []interface{} - for _, proxy := range cDto.Livestream.Proxies { - proxyListItem := map[string]interface{}{ - "url": proxy.URL, - "header": proxy.Header, - } - proxyListItems = append(proxyListItems, proxyListItem) - } - viper.Set("livestream.proxies", proxyListItems) - viper.Set("livestream.proxy_enabled", cDto.Livestream.ProxyEnabled) - viper.Set("livestream.proxy_whitelist", cDto.Livestream.ProxyWhitelist) - - err := viper.WriteConfig() + file, err := os.ReadFile(configFile) if err != nil { - return fmt.Errorf("error writing config file: %w", err) + return err } - return nil -} -func (s *Service) GetNotificationConfig(c echo.Context) (*Notification, error) { - return &Notification{ - VideoSuccessWebhookUrl: viper.GetString("notifications.video_success_webhook_url"), - VideoSuccessTemplate: viper.GetString("notifications.video_success_template"), - VideoSuccessEnabled: viper.GetBool("notifications.video_success_enabled"), - LiveSuccessWebhookUrl: viper.GetString("notifications.live_success_webhook_url"), - LiveSuccessTemplate: viper.GetString("notifications.live_success_template"), - LiveSuccessEnabled: viper.GetBool("notifications.live_success_enabled"), - ErrorWebhookUrl: viper.GetString("notifications.error_webhook_url"), - ErrorTemplate: viper.GetString("notifications.error_template"), - ErrorEnabled: viper.GetBool("notifications.error_enabled"), - IsLiveWebhookUrl: viper.GetString("notifications.is_live_webhook_url"), - IsLiveTemplate: viper.GetString("notifications.is_live_template"), - IsLiveEnabled: viper.GetBool("notifications.is_live_enabled"), - }, nil -} - -func (s *Service) GetStorageTemplateConfig(c echo.Context) (*StorageTemplate, error) { - return &StorageTemplate{ - FolderTemplate: viper.GetString("storage_templates.folder_template"), - FileTemplate: viper.GetString("storage_templates.file_template"), - }, nil -} - -func (s *Service) UpdateNotificationConfig(c echo.Context, nDto *Notification) error { - viper.Set("notifications.video_success_webhook_url", nDto.VideoSuccessWebhookUrl) - viper.Set("notifications.video_success_template", nDto.VideoSuccessTemplate) - viper.Set("notifications.video_success_enabled", nDto.VideoSuccessEnabled) - viper.Set("notifications.live_success_webhook_url", nDto.LiveSuccessWebhookUrl) - viper.Set("notifications.live_success_template", nDto.LiveSuccessTemplate) - viper.Set("notifications.live_success_enabled", nDto.LiveSuccessEnabled) - viper.Set("notifications.error_webhook_url", nDto.ErrorWebhookUrl) - viper.Set("notifications.error_template", nDto.ErrorTemplate) - viper.Set("notifications.error_enabled", nDto.ErrorEnabled) - viper.Set("notifications.is_live_webhook_url", nDto.IsLiveWebhookUrl) - viper.Set("notifications.is_live_template", nDto.IsLiveTemplate) - viper.Set("notifications.is_live_enabled", nDto.IsLiveEnabled) - err := viper.WriteConfig() + err = json.Unmarshal(file, c) if err != nil { - return fmt.Errorf("error writing config file: %w", err) + return err } - return nil -} -func (s *Service) UpdateStorageTemplateConfig(c echo.Context, stDto *StorageTemplate) error { - viper.Set("storage_templates.folder_template", stDto.FolderTemplate) - viper.Set("storage_templates.file_template", stDto.FileTemplate) - err := viper.WriteConfig() - if err != nil { - return fmt.Errorf("error writing config file: %w", err) - } return nil } -// refreshConfig: rewrites config file applying variable changes and removing old ones -func refreshConfig(configPath string) { - err := unset("live_check_interval") - if err != nil { - log.Error().Err(err).Msg("error unsetting config value") - } - // Add authentication method - if !viper.IsSet("oauth_enabled") { - viper.Set("oauth_enabled", false) - } - // streamlink params - if !viper.IsSet("parameters.streamlink_live") { - viper.Set("parameters.streamlink_live", "--twitch-low-latency,--twitch-disable-hosting") - } - err = viper.WriteConfigAs(configPath) - if err != nil { - log.Panic().Err(err).Msg("error writing config file") - } - if viper.IsSet("webhook_url") && viper.GetString("webhook_url") != "" { - oldWebhookUrl := viper.GetString("webhook_url") - viper.Set("notifications.video_success_webhook_url", oldWebhookUrl) - viper.Set("notifications.live_success_webhook_url", oldWebhookUrl) - viper.Set("notifications.error_webhook_url", oldWebhookUrl) - viper.Set("notifications.is_live_webhook_url", oldWebhookUrl) - err = viper.WriteConfigAs(configPath) - if err != nil { - log.Panic().Err(err).Msg("error writing config file") - } - err = unset("webhook_url") - if err != nil { - log.Error().Err(err).Msg("error unsetting config value") - } - } else { - err = unset("webhook_url") - if err != nil { - log.Error().Err(err).Msg("error unsetting config value") - } - } - // Archive - if !viper.IsSet("archive.save_as_hls") { - viper.Set("archive.save_as_hls", false) - } - // Storage template - if !viper.IsSet("storage_templates.folder_template") { - viper.Set("storage_templates.folder_template", "{{date}}-{{id}}-{{type}}-{{uuid}}") - } - if !viper.IsSet("storage_templates.file_template") { - viper.Set("storage_templates.file_template", "{{id}}") - } - // Twitch Token - if !viper.IsSet("parameters.twitch_token") { - viper.Set("parameters.twitch_token", "") - } - // Livestream - if !viper.IsSet("livestream.proxies") { - viper.Set("livestream.proxies", []ProxyListItem{ - { - URL: "https://eu.luminous.dev", - Header: "", - }, - { - URL: "https://api.ttv.lol", - Header: "x-donate-to:https://ttv.lol/donate", - }, - }) - } - if !viper.IsSet("livestream.proxy_enabled") { - viper.Set("livestream.proxy_enabled", false) - } - if !viper.IsSet("livestream.proxy_parameters") { - viper.Set("livestream.proxy_parameters", "%3Fplayer%3Dtwitchweb%26type%3Dany%26allow_source%3Dtrue%26allow_audio_only%3Dtrue%26allow_spectre%3Dfalse%26fast_bread%3Dtrue") - } - if !viper.IsSet("livestream.proxy_whitelist") { - viper.Set("livestream.proxy_whitelist", []string{ - "", - }) - } - if !viper.IsSet("video_check_interval_minutes") { - viper.Set("video_check_interval_minutes", 180) - } - err = unset("db_seeded") - if err != nil { - log.Error().Err(err).Msg("error unsetting config value") - } - err = unset("active_queue_items") - if err != nil { - log.Error().Err(err).Msg("error unsetting config value") +func (c *Config) setDefaults() { + c.LiveCheckInterval = 300 + c.VideoCheckInterval = 180 + c.RegistrationEnabled = true + c.Parameters.TwitchToken = "" + c.Parameters.VideoConvert = "-c:v copy -c:a copy" + c.Parameters.ChatRender = "-h 1440 -w 340 --framerate 30 --font Inter --font-size 13" + c.Parameters.StreamlinkLive = "--twitch-low-latency,--twitch-disable-hosting" + c.Archive.SaveAsHls = false + + // notifications + c.Notification.VideoSuccessWebhookUrl = "" + c.Notification.VideoSuccessTemplate = "✅ Video Archived: {{vod_title}} by {{channel_display_name}}." + c.Notification.VideoSuccessEnabled = true + c.Notification.LiveSuccessWebhookUrl = "" + c.Notification.LiveSuccessTemplate = "✅ Live Stream Archived: {{vod_title}} by {{channel_display_name}}." + c.Notification.LiveSuccessEnabled = true + c.Notification.ErrorWebhookUrl = "" + c.Notification.ErrorTemplate = "⚠️ Error: Queue {{queue_id}} failed at task {{failed_task}}." + c.Notification.ErrorEnabled = true + c.Notification.IsLiveWebhookUrl = "" + c.Notification.IsLiveTemplate = "🔴 {{channel_display_name}} is live!" + c.Notification.IsLiveEnabled = true + + // storage templates + c.StorageTemplates.FolderTemplate = "{{date}}-{{id}}-{{type}}-{{uuid}}" + c.StorageTemplates.FileTemplate = "{{id}}" + + // livestream + c.Livestream.Proxies = []ProxyListItem{ + { + URL: "https://eu.luminous.dev", + Header: "", + }, + { + URL: "https://api.ttv.lol", + Header: "x-donate-to:https://ttv.lol/donate", + }, } - + c.Livestream.ProxyEnabled = false + c.Livestream.ProxyParameters = "%3Fplayer%3Dtwitchweb%26type%3Dany%26allow_source%3Dtrue%26allow_audio_only%3Dtrue%26allow_spectre%3Dfalse%26fast_bread%3Dtrue" + c.Livestream.ProxyWhitelist = []string{} } -// unset: removes variable from config file -// https://github.com/spf13/viper/issues/632#issuecomment-869668629 -func unset(vars ...string) error { - cfg := viper.AllSettings() - vals := cfg +func UpdateConfig(newConfig *Config) error { + mutex.Lock() + defer mutex.Unlock() - for _, v := range vars { - parts := strings.Split(v, ".") - for i, k := range parts { - v, ok := vals[k] - if !ok { - // Doesn't exist no action needed - break - } + // Make a deep copy of the new config + *instance = *newConfig - switch len(parts) { - case i + 1: - // Last part so delete. - delete(vals, k) - default: - m, ok := v.(map[string]interface{}) - if !ok { - return fmt.Errorf("unsupported type: %T for %q", v, strings.Join(parts[0:i], ".")) - } - vals = m - } - } - } + // Call SaveConfig without the mutex + return saveConfigUnsafe() +} + +// SaveConfig saves the current configuration to the JSON file +func SaveConfig() error { + mutex.Lock() + defer mutex.Unlock() + return saveConfigUnsafe() +} - b, err := json.MarshalIndent(cfg, "", " ") +// saveConfigUnsafe saves the config without locking the mutex +func saveConfigUnsafe() error { + file, err := json.MarshalIndent(instance, "", " ") if err != nil { return err } - if err = viper.ReadConfig(bytes.NewReader(b)); err != nil { - return err - } + return os.WriteFile(configFile, file, 0644) +} - return viper.WriteConfig() +// Get returns the configuration +func Get() *Config { + return instance } diff --git a/internal/config/env.go b/internal/config/env.go new file mode 100644 index 00000000..41df39c5 --- /dev/null +++ b/internal/config/env.go @@ -0,0 +1,70 @@ +package config + +import ( + "context" + + "github.com/rs/zerolog/log" + "github.com/sethvargo/go-envconfig" +) + +type EnvApplicationConfig struct { + DB_HOST string `env:"DB_HOST, required"` + DB_PORT string `env:"DB_PORT, required"` + DB_USER string `env:"DB_USER, required"` + DB_PASS string `env:"DB_PASS, required"` + DB_NAME string `env:"DB_NAME, required"` + DB_SSL string `env:"DB_SSL, default=disable"` + DB_SSL_ROOT_CERT string `env:"DB_SSL_ROOT_CERT, default="` + JWTSecret string `env:"JWT_SECRET, required"` + JWTRefreshSecret string `env:"JWT_REFRESH_SECRET, required"` + FrontendHost string `env:"FRONTEND_HOST, required"` +} + +// EnvConfig represents the environment variables for the application +type EnvConfig struct { + // application + DEBUG bool `env:"DEBUG, default=false"` + CookieDomain string `env:"COOKIE_DOMAIN, default="` + // customizable paths + VideosDir string `env:"VIDEOS_DIR, default=/data/videos"` + TempDir string `env:"TEMP_DIR, default=/data/temp"` + ConfigDir string `env:"CONFIG_DIR, default=/data/config"` + LogsDir string `env:"LOGS_DIR, default=/data/logs"` + // platform variables + TwitchClientId string `env:"TWITCH_CLIENT_ID, default="` + TwitchClientSecret string `env:"TWITCH_CLIENT_SECRET, default="` + + // worker config + MaxChatDownloadExecutions int `env:"MAX_CHAT_DOWNLOAD_EXECUTIONS, default=3"` + MaxChatRenderExecutions int `env:"MAX_CHAT_RENDER_EXECUTIONS, default=2"` + MaxVideoDownloadExecutions int `env:"MAX_VIDEO_DOWNLOAD_EXECUTIONS, default=2"` + MaxVideoConvertExecutions int `env:"MAX_VIDEO_CONVERT_EXECUTIONS, default=3"` + + // oauth OIDC + OAuthEnabled bool `env:"OAUTH_ENABLED, default=false"` + OAuthProviderURL string `env:"OAUTH_PROVIDER_URL, default="` + OAuthClientID string `env:"OAUTH_CLIENT_ID, default="` + OAuthClientSecret string `env:"OAUTH_CLIENT_SECRET, default="` + OAuthRedirectURL string `env:"OAUTH_REDIRECT_URL, default="` +} + +// GetEnvConfig returns the environment variables for the application +func GetEnvConfig() EnvConfig { + ctx := context.Background() + + var c EnvConfig + if err := envconfig.Process(ctx, &c); err != nil { + log.Panic().Err(err).Msg("error getting env config") + } + return c +} + +func GetEnvApplicationConfig() EnvApplicationConfig { + ctx := context.Background() + + var c EnvApplicationConfig + if err := envconfig.Process(ctx, &c); err != nil { + log.Panic().Err(err).Msg("error getting env config") + } + return c +} diff --git a/internal/config/env_test.go b/internal/config/env_test.go new file mode 100644 index 00000000..db4ac569 --- /dev/null +++ b/internal/config/env_test.go @@ -0,0 +1,53 @@ +package config + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGetEnvConfig(t *testing.T) { + os.Setenv("VIDEOS_DIR", "/custom/videos") + + env := GetEnvConfig() + + assert.Equal(t, "/custom/videos", env.VideosDir) + + os.Unsetenv("VIDEOS_DIR") +} + +func TestGetEnvRequiredConfig(t *testing.T) { + os.Setenv("DB_HOST", "localhost") + os.Setenv("DB_PORT", "5432") + os.Setenv("DB_USER", "postgres") + os.Setenv("DB_PASS", "password") + os.Setenv("DB_NAME", "ganymede") + os.Setenv("JWT_SECRET", "secret") + os.Setenv("JWT_REFRESH_SECRET", "refresh_secret") + os.Setenv("FRONTEND_HOST", "localhost") + + env := GetEnvApplicationConfig() + + assert.Equal(t, "localhost", env.DB_HOST) + assert.Equal(t, "5432", env.DB_PORT) + assert.Equal(t, "postgres", env.DB_USER) + assert.Equal(t, "password", env.DB_PASS) + assert.Equal(t, "ganymede", env.DB_NAME) + assert.Equal(t, "secret", env.JWTSecret) + assert.Equal(t, "refresh_secret", env.JWTRefreshSecret) + assert.Equal(t, "localhost", env.FrontendHost) + + os.Unsetenv("DB_HOST") + os.Unsetenv("DB_PORT") + os.Unsetenv("DB_USER") + os.Unsetenv("DB_PASS") + os.Unsetenv("DB_NAME") + os.Unsetenv("JWT_SECRET") + os.Unsetenv("JWT_REFRESH_SECRET") + os.Unsetenv("FRONTEND_HOST") +} + +func TestGetEnvRequiredConfigMissing(t *testing.T) { + assert.Panics(t, func() { GetEnvApplicationConfig() }) +} diff --git a/internal/database/database.go b/internal/database/database.go index 85f8ba3c..a23e3218 100644 --- a/internal/database/database.go +++ b/internal/database/database.go @@ -3,8 +3,8 @@ package database import ( "context" "fmt" - "os" + "github.com/jackc/pgx/v5/pgxpool" _ "github.com/lib/pq" "github.com/rs/zerolog/log" "github.com/zibbp/ganymede/ent" @@ -14,31 +14,24 @@ import ( var db *Database -type Database struct { - Client *ent.Client +type DatabaseConnectionInput struct { + DBString string + IsWorker bool } -func InitializeDatabase(worker bool) { - log.Debug().Msg("setting up database connection") - - dbHost := os.Getenv("DB_HOST") - dbPort := os.Getenv("DB_PORT") - dbUser := os.Getenv("DB_USER") - dbPass := os.Getenv("DB_PASS") - dbName := os.Getenv("DB_NAME") - dbSSL := os.Getenv("DB_SSL") - dbSSLTRootCert := os.Getenv("DB_SSL_ROOT_CERT") - - connectionString := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=%s sslrootcert=%s", - dbHost, dbPort, dbUser, dbPass, dbName, dbSSL, dbSSLTRootCert) +type Database struct { + Client *ent.Client + ConnPool *pgxpool.Pool +} - client, err := ent.Open("postgres", connectionString) +func InitializeDatabase(input DatabaseConnectionInput) { + client, err := ent.Open("postgres", input.DBString) if err != nil { log.Fatal().Err(err).Msg("error connecting to database") } - if !worker { + if !input.IsWorker { // Run auto migration if err := client.Schema.Create(context.Background()); err != nil { log.Fatal().Err(err).Msg("error running auto migration") @@ -64,46 +57,45 @@ func DB() *Database { return db } -func NewDatabase() (*Database, error) { - log.Debug().Msg("setting up database connection") - - dbHost := os.Getenv("DB_HOST") - dbPort := os.Getenv("DB_PORT") - dbUser := os.Getenv("DB_USER") - dbPass := os.Getenv("DB_PASS") - dbName := os.Getenv("DB_NAME") - dbSSL := os.Getenv("DB_SSL") - dbSSLTRootCert := os.Getenv("DB_SSL_ROOT_CERT") - - connectionString := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=%s sslrootcert=%s", - dbHost, dbPort, dbUser, dbPass, dbName, dbSSL, dbSSLTRootCert) - - client, err := ent.Open("postgres", connectionString) +func NewDatabase(ctx context.Context, input DatabaseConnectionInput) *Database { + client, err := ent.Open("postgres", input.DBString) if err != nil { - return nil, err + log.Fatal().Err(err).Msg("error connecting to database") } - // Run auto migration - if err := client.Schema.Create(context.Background()); err != nil { - return nil, err + if !input.IsWorker { + // Run auto migration + if err := client.Schema.Create(context.Background()); err != nil { + log.Fatal().Err(err).Msg("error running auto migration") + } + // check if any users exist + users, err := client.User.Query().All(context.Background()) + if err != nil { + log.Panic().Err(err).Msg("error querying users") + } + // if no users exist, seed database + if len(users) == 0 { + // seed database + log.Debug().Msg("seeding database") + if err := seedDatabase(client); err != nil { + log.Panic().Err(err).Msg("error seeding database") + } + } } - // check if any users exist - users, err := client.User.Query().All(context.Background()) + connPool, err := pgxpool.New(ctx, input.DBString) if err != nil { - return nil, err + log.Panic().Err(err).Msg("error connecting to database") } - // if no users exist, seed database - if len(users) == 0 { - // seed database - log.Debug().Msg("seeding database") - if err := seedDatabase(client); err != nil { - return nil, err - } + // defer connPool.Close() + + db = &Database{ + Client: client, + ConnPool: connPool, } - return &Database{Client: client}, nil + return db } func seedDatabase(client *ent.Client) error { diff --git a/internal/database/migrate.go b/internal/database/migrate.go new file mode 100644 index 00000000..01ba3ca4 --- /dev/null +++ b/internal/database/migrate.go @@ -0,0 +1,130 @@ +package database + +import ( + "context" + "strings" + + "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/ent" + "github.com/zibbp/ganymede/internal/utils" +) + +// VideosDirMigrate migrates the videos directory if it has changed. +// It will do nothing if the videos directory has not changed. +func (db *Database) VideosDirMigrate(ctx context.Context, videosDir string) error { + // get latest video from database + video, err := db.Client.Vod.Query().WithChannel().Limit(1).Order(ent.Desc("created_at")).First(ctx) + if err != nil { + // no videos found, likely a new instance. Return gracefully + if _, ok := err.(*ent.NotFoundError); ok { + return nil + } else { + return err + } + } + + // get path of current videos directory + oldVideoPath := utils.GetPathBefore(video.VideoPath, video.Edges.Channel.Name) + oldVideoPath = strings.TrimRight(oldVideoPath, "/") + + // check if videos directory has changed + if oldVideoPath != "" && oldVideoPath != videosDir { + log.Info().Msg("detected new videos directory; migrating pathes to new directory") + + // update channel paths + channels, err := db.Client.Channel.Query().All(ctx) + if err != nil { + return err + } + // replace old path with new path + for _, c := range channels { + update := db.Client.Channel.UpdateOne(c) + update.SetImagePath(strings.Replace(c.ImagePath, oldVideoPath, videosDir, 1)) + + if _, err := update.Save(ctx); err != nil { + return err + } + } + + // update video paths + videos, err := db.Client.Vod.Query().WithChannel().All(ctx) + if err != nil { + return err + } + // replace old path with new path + for _, v := range videos { + update := db.Client.Vod.UpdateOneID(v.ID) + update.SetThumbnailPath(strings.Replace(v.ThumbnailPath, oldVideoPath, videosDir, 1)) + update.SetWebThumbnailPath(strings.Replace(v.WebThumbnailPath, oldVideoPath, videosDir, 1)) + update.SetVideoPath(strings.Replace(v.VideoPath, oldVideoPath, videosDir, 1)) + update.SetVideoHlsPath(strings.Replace(v.VideoHlsPath, oldVideoPath, videosDir, 1)) + update.SetChatPath(strings.Replace(v.ChatPath, oldVideoPath, videosDir, 1)) + update.SetLiveChatPath(strings.Replace(v.LiveChatPath, oldVideoPath, videosDir, 1)) + update.SetLiveChatConvertPath(strings.Replace(v.LiveChatConvertPath, oldVideoPath, videosDir, 1)) + update.SetChatVideoPath(strings.Replace(v.ChatVideoPath, oldVideoPath, videosDir, 1)) + update.SetInfoPath(strings.Replace(v.InfoPath, oldVideoPath, videosDir, 1)) + update.SetCaptionPath(strings.Replace(v.CaptionPath, oldVideoPath, videosDir, 1)) + + if _, err := update.Save(ctx); err != nil { + return err + } + } + + log.Info().Msg("finished migrating existing video directories") + } + + return nil +} + +// TempDirMigrate migrates the temp directory if it has changed. +// It will do nothing if the temp directory has not changed. +func (db *Database) TempDirMigrate(ctx context.Context, tempDir string) error { + // get latest video from database + video, err := db.Client.Vod.Query().WithChannel().Limit(1).Order(ent.Desc("created_at")).First(ctx) + if err != nil { + // no videos found, likely a new instance. Return gracefully + if _, ok := err.(*ent.NotFoundError); ok { + return nil + } else { + return err + } + } + + if video.TmpVideoDownloadPath == "" { + return nil + } + + // get path of current videos directory + oldTmpVideoDownloadPath := utils.GetPathBeforePartial(video.TmpVideoDownloadPath, video.ID.String()) + oldTmpVideoDownloadPath = strings.TrimRight(oldTmpVideoDownloadPath, "/") + + // check if videos directory has changed + if oldTmpVideoDownloadPath != "" && oldTmpVideoDownloadPath != tempDir { + log.Info().Msg("detected new temp path directory; migrating existing video directories") + + videos, err := db.Client.Vod.Query().WithChannel().All(ctx) + if err != nil { + return err + } + + // replace old path with new path + for _, v := range videos { + update := db.Client.Vod.UpdateOneID(v.ID) + update.SetTmpVideoDownloadPath(strings.Replace(v.TmpVideoDownloadPath, oldTmpVideoDownloadPath, tempDir, 1)) + update.SetTmpVideoConvertPath(strings.Replace(v.TmpVideoConvertPath, oldTmpVideoDownloadPath, tempDir, 1)) + update.SetTmpChatDownloadPath(strings.Replace(v.TmpChatDownloadPath, oldTmpVideoDownloadPath, tempDir, 1)) + update.SetTmpLiveChatDownloadPath(strings.Replace(v.TmpLiveChatDownloadPath, oldTmpVideoDownloadPath, tempDir, 1)) + update.SetTmpLiveChatConvertPath(strings.Replace(v.TmpLiveChatConvertPath, oldTmpVideoDownloadPath, tempDir, 1)) + update.SetTmpChatRenderPath(strings.Replace(v.TmpChatRenderPath, oldTmpVideoDownloadPath, tempDir, 1)) + update.SetTmpVideoHlsPath(strings.Replace(v.TmpVideoHlsPath, oldTmpVideoDownloadPath, tempDir, 1)) + + if _, err := update.Save(ctx); err != nil { + return err + } + } + + log.Info().Msg("finished migrating existing temp video directories") + } + + return nil +} diff --git a/internal/errors/errors.go b/internal/errors/errors.go new file mode 100644 index 00000000..6eb7f8b8 --- /dev/null +++ b/internal/errors/errors.go @@ -0,0 +1,42 @@ +package errors + +import ( + "fmt" +) + +// CustomError is the base type for all custom errors +type CustomError struct { + message string +} + +// Error implements the error interface +func (e *CustomError) Error() string { + return e.message +} + +// New creates a new CustomError +func New(message string) *CustomError { + return &CustomError{message: message} +} + +// Define specific custom errors +var ( + ErrNoChatMessages = New("not chat messages found") +) + +// Is checks if the given error is of the specified custom error type +func Is(err error, target *CustomError) bool { + customErr, ok := err.(*CustomError) + if !ok { + return false + } + return customErr.message == target.message +} + +// Wrap wraps an error with additional context +func Wrap(err error, message string) error { + if err == nil { + return nil + } + return fmt.Errorf("%s: %w", message, err) +} diff --git a/internal/exec/exec.go b/internal/exec/exec.go index f4fff45c..9d44f7d0 100644 --- a/internal/exec/exec.go +++ b/internal/exec/exec.go @@ -1,251 +1,127 @@ package exec import ( + "bufio" "bytes" "context" "encoding/json" "fmt" - "io" "net/http" "os" - "os/exec" osExec "os/exec" "strconv" "strings" "time" "github.com/rs/zerolog/log" - "github.com/spf13/viper" "github.com/zibbp/ganymede/ent" "github.com/zibbp/ganymede/internal/config" - "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/temporal" + "github.com/zibbp/ganymede/internal/errors" "github.com/zibbp/ganymede/internal/twitch" "github.com/zibbp/ganymede/internal/utils" ) -func DownloadTwitchVodVideo(v *ent.Vod) error { - - var argArr []string - // Check if twitch token is set - argArr = append(argArr, fmt.Sprintf("https://twitch.tv/videos/%s", v.ExtID), fmt.Sprintf("%s,best", v.Resolution), "--force-progress", "--force") - - twitchToken := viper.GetString("parameters.twitch_token") - if twitchToken != "" { - // Note: if the token is invalid, streamlink will exit with "no playable streams found on this URL" - argArr = append(argArr, fmt.Sprintf("--twitch-api-header=Authorization=OAuth %s", twitchToken)) - } - - argArr = append(argArr, "-o", v.TmpVideoDownloadPath) - - log.Debug().Msgf("running streamlink for vod video download: %s", strings.Join(argArr, " ")) - - cmd := osExec.Command("streamlink", argArr...) - - videoLogfile, err := os.Create(fmt.Sprintf("/logs/%s-video.log", v.ID)) +func DownloadTwitchVideo(ctx context.Context, video ent.Vod) error { + env := config.GetEnvConfig() + // open log file + logFilePath := fmt.Sprintf("%s/%s-video.log", env.LogsDir, video.ID.String()) + file, err := os.Create(logFilePath) if err != nil { - return fmt.Errorf("error creating video logfile: %w", err) + return fmt.Errorf("failed to open log file: %w", err) } + defer file.Close() + log.Debug().Str("video_id", video.ID.String()).Msgf("logging streamlink output to %s", logFilePath) - defer videoLogfile.Close() - cmd.Stdout = videoLogfile - cmd.Stderr = videoLogfile + var cmdArgs []string + cmdArgs = append(cmdArgs, fmt.Sprintf("https://twitch.tv/videos/%s", video.ExtID), fmt.Sprintf("%s,best", video.Resolution), "--force-progress", "--force") - if err := cmd.Run(); err != nil { - if exitError, ok := err.(*osExec.ExitError); ok { - log.Error().Err(err).Msg("error running streamlink for vod download") - return fmt.Errorf("error running streamlink for vod download with exit code %d: %w", exitError.ExitCode(), exitError) - } - return fmt.Errorf("error running streamlink for vod video download: %w", err) + // check if user has twitch token set + // if so, set token in streamlink command + twitchToken := config.Get().Parameters.TwitchToken + if twitchToken != "" { + cmdArgs = append(cmdArgs, fmt.Sprintf("--twitch-api-header=Authorization=OAuth %s", twitchToken)) } - log.Debug().Msgf("finished downloading vod video for %s", v.ExtID) - return nil -} + // output + cmdArgs = append(cmdArgs, "-o", video.TmpVideoDownloadPath) -func DownloadTwitchVodChat(v *ent.Vod) error { - cmd := osExec.Command("TwitchDownloaderCLI", "chatdownload", "--id", v.ExtID, "--embed-images", "-o", v.TmpChatDownloadPath) + log.Debug().Str("video_id", video.ID.String()).Str("cmd", strings.Join(cmdArgs, " ")).Msgf("running streamlink") - chatLogfile, err := os.Create(fmt.Sprintf("/logs/%s-chat.log", v.ID)) - if err != nil { - return fmt.Errorf("error creating chat logfile: %w", err) - } - defer chatLogfile.Close() - cmd.Stdout = chatLogfile - cmd.Stderr = chatLogfile + cmd := osExec.CommandContext(ctx, "streamlink", cmdArgs...) - if err := cmd.Run(); err != nil { - if exitError, ok := err.(*osExec.ExitError); ok { - log.Error().Err(err).Msg("error running TwitchDownloaderCLI for vod chat download") - return fmt.Errorf("error running TwitchDownloaderCLI for vod chat download with exit code %d: %w", exitError.ExitCode(), exitError) - } - log.Error().Err(err).Msg("error running TwitchDownloaderCLI for vod chat download") - return fmt.Errorf("error running TwitchDownloaderCLI for vod chat download: %w", err) - } + cmd.Stderr = file + cmd.Stdout = file - log.Debug().Msgf("finished downloading vod chat for %s", v.ExtID) - return nil -} - -func RenderTwitchVodChat(v *ent.Vod) (error, bool) { - // Fetch config params - chatRenderParams := viper.GetString("parameters.chat_render") - // Split supplied params into array - arr := strings.Fields(chatRenderParams) - // Generate args for exec - argArr := []string{"chatrender", "-i", v.TmpChatDownloadPath} - // add each config param to arg - argArr = append(argArr, arr...) - // add output file - argArr = append(argArr, "-o", v.TmpChatRenderPath) - log.Debug().Msgf("chat render args: %v", argArr) - // Execute chat render - cmd := osExec.Command("TwitchDownloaderCLI", argArr...) - - chatRenderLogfile, err := os.Create(fmt.Sprintf("/logs/%s-chat-render.log", v.ID)) - if err != nil { - return fmt.Errorf("error creating chat render logfile: %w", err), true + if err := cmd.Start(); err != nil { + return fmt.Errorf("error starting streamlink: %w", err) } - defer chatRenderLogfile.Close() - cmd.Stdout = chatRenderLogfile - cmd.Stderr = chatRenderLogfile - if err := cmd.Run(); err != nil { - if exitError, ok := err.(*osExec.ExitError); ok { - log.Error().Err(err).Msg("error running TwitchDownloaderCLI for vod chat render") - return fmt.Errorf("error running TwitchDownloaderCLI for vod chat render with exit code %d: %w", exitError.ExitCode(), exitError), true - } - log.Error().Err(err).Msg("error running TwitchDownloaderCLI for vod chat render") + done := make(chan error) + go func() { + done <- cmd.Wait() + }() - // Check if error is because of no messages - checkCmd := fmt.Sprintf("cat /logs/%s-chat-render.log | grep 'Sequence contains no elements'", v.ID) - _, err := osExec.Command("bash", "-c", checkCmd).Output() + // Wait for the command to finish or context to be cancelled + select { + case <-ctx.Done(): + // Context was cancelled, kill the process + if err := cmd.Process.Kill(); err != nil { + return fmt.Errorf("failed to kill streamlink process: %v", err) + } + <-done // Wait for copying to finish + return ctx.Err() + case err := <-done: + // Command finished normally if err != nil { - log.Error().Err(err).Msg("error checking chat render logfile for no messages") - return fmt.Errorf("erreor checking chat render logfile for no messages %w", err), true + if exitError, ok := err.(*osExec.ExitError); ok { + log.Error().Err(err).Str("exitCode", strconv.Itoa(exitError.ExitCode())).Str("exit_error", exitError.Error()).Msg("error running streamlink") + return fmt.Errorf("error running streamlink") + } + return fmt.Errorf("error running streamlink: %w", err) } - - // TODO: re-implment this - // log.Debug().Msg("no messages found in chat render logfile. setting vod and queue to reflect no chat.") - // v.Update().SetChatPath("").SetChatVideoPath("").SaveX(context.Background()) - // q.Update().SetChatProcessing(false).SetTaskChatMove(utils.Success).SaveX(context.Background()) - return nil, false - } - - log.Debug().Msgf("finished vod chat render for %s", v.ExtID) - return nil, true -} - -func ConvertTwitchVodVideo(v *ent.Vod) error { - // Fetch config params - ffmpegParams := viper.GetString("parameters.video_convert") - // Split supplied params into array - arr := strings.Fields(ffmpegParams) - // Generate args for exec - argArr := []string{"-y", "-hide_banner", "-i", v.TmpVideoDownloadPath} - // add each config param to arg - argArr = append(argArr, arr...) - // add output file - argArr = append(argArr, v.TmpVideoConvertPath) - log.Debug().Msgf("video convert args: %v", argArr) - // Execute ffmpeg - cmd := osExec.Command("ffmpeg", argArr...) - - videoConvertLogfile, err := os.Create(fmt.Sprintf("/logs/%s-video-convert.log", v.ID)) - if err != nil { - log.Error().Err(err).Msg("error creating video convert logfile") - return err } - defer videoConvertLogfile.Close() - cmd.Stdout = videoConvertLogfile - cmd.Stderr = videoConvertLogfile - if err := cmd.Run(); err != nil { - log.Error().Err(err).Msg("error running ffmpeg for vod video convert") - return err - } - - log.Debug().Msgf("finished vod video convert for %s", v.ExtID) return nil } -func ConvertToHLS(v *ent.Vod) error { - // Delete original video file to save space - log.Debug().Msgf("deleting original video file for %s to save space", v.ExtID) - if err := os.Remove(v.TmpVideoDownloadPath); err != nil { - log.Error().Err(err).Msg("error deleting original video file") - return err - } - - cmd := osExec.Command("ffmpeg", "-y", "-hide_banner", "-i", v.TmpVideoConvertPath, "-c", "copy", "-start_number", "0", "-hls_time", "10", "-hls_list_size", "0", "-hls_segment_filename", fmt.Sprintf("/tmp/%s_%s-video_hls%s/%s_segment%s.ts", v.ExtID, v.ID, "%v", v.ExtID, "%d"), "-f", "hls", fmt.Sprintf("/tmp/%s_%s-video_hls%s/%s-video.m3u8", v.ExtID, v.ID, "%v", v.ExtID)) - - videoConverLogFile, err := os.Open(fmt.Sprintf("/logs/%s-video-convert.log", v.ID)) +func DownloadTwitchLiveVideo(ctx context.Context, video ent.Vod, channel ent.Channel, startChat chan bool) error { + env := config.GetEnvConfig() + // open log file + logFilePath := fmt.Sprintf("%s/%s-video.log", env.LogsDir, video.ID.String()) + file, err := os.Create(logFilePath) if err != nil { - log.Error().Err(err).Msg("error opening video convert logfile") - return err + return fmt.Errorf("failed to open log file: %w", err) } - defer videoConverLogFile.Close() - cmd.Stdout = videoConverLogFile - cmd.Stderr = videoConverLogFile + defer file.Close() + log.Debug().Str("video_id", video.ID.String()).Msgf("logging streamlink output to %s", logFilePath) - if err := cmd.Run(); err != nil { - log.Error().Err(err).Msg("error running ffmpeg for vod video convert - hls") - return err - } - - log.Debug().Msgf("finished vod video convert - hls for %s", v.ExtID) - return nil - -} + configStreamlinkArgs := config.Get().Parameters.StreamlinkLive -func DownloadTwitchLiveVideo(ctx context.Context, v *ent.Vod, ch *ent.Channel, liveChatWorkflowId string) error { - // Fetch config params - liveStreamlinkParams := viper.GetString("parameters.streamlink_live") - // Split supplied params into array - splitStreamlinkParams := strings.Split(liveStreamlinkParams, ",") - // remove param if contains 'twith-api-header' (set by different config value) - for i, param := range splitStreamlinkParams { - if strings.Contains(param, "twitch-api-header") { - log.Info().Msg("twitch-api-header found in streamlink paramters. Please move your token to the dedicated 'twitch token' field.") - splitStreamlinkParams = append(splitStreamlinkParams[:i], splitStreamlinkParams[i+1:]...) - } - } + configStreamlinkArgsArr := strings.Split(configStreamlinkArgs, ",") + proxyEnabled := false proxyFound := false - streamURL := "" + streamUrl := fmt.Sprintf("https://twitch.tv/%s", channel.Name) proxyHeader := "" - // check if user has proxies enabled - proxyEnabled := viper.GetBool("livestream.proxy_enabled") - whitelistedChannels := viper.GetStringSlice("livestream.proxy_whitelist") + // check if user has proxies enable + proxyEnabled = config.Get().Livestream.ProxyEnabled + whitelistedChannels := config.Get().Livestream.ProxyWhitelist // list of channels that are whitelisted from using proxy if proxyEnabled { - // check if channel is whitelisted - if utils.Contains(whitelistedChannels, ch.Name) { - log.Debug().Msgf("channel %s is whitelisted - not using proxy", ch.Name) + if utils.Contains(whitelistedChannels, channel.Name) { + log.Debug().Str("channel_name", channel.Name).Msg("channel is whitelisted, not using proxy") } else { - // Get proxy parameters - proxyParams := viper.GetString("livestream.proxy_parameters") - // Get proxy list - proxyListString := viper.Get("livestream.proxies") - var proxyList []config.ProxyListItem - for _, proxy := range proxyListString.([]interface{}) { - proxyListItem := config.ProxyListItem{ - URL: proxy.(map[string]interface{})["url"].(string), - Header: proxy.(map[string]interface{})["header"].(string), - } - proxyList = append(proxyList, proxyListItem) - } - log.Debug().Msgf("proxy list: %v", proxyList) + proxyParams := config.Get().Livestream.ProxyParameters + proxyList := config.Get().Livestream.Proxies + + log.Debug().Str("proxy_list", fmt.Sprintf("%v", proxyList)).Msg("proxy list") // test proxies - for i, proxy := range proxyList { - proxyUrl := fmt.Sprintf("%s/playlist/%s.m3u8%s", proxy.URL, ch.Name, proxyParams) + for _, proxy := range proxyList { + proxyUrl := fmt.Sprintf("%s/playlist/%s.m3u8%s", proxy.URL, channel.Name, proxyParams) if testProxyServer(proxyUrl, proxy.Header) { - log.Debug().Msgf("proxy %d is good", i) - log.Debug().Msgf("setting stream url to %s", proxyUrl) + log.Debug().Str("channel_name", channel.Name).Str("proxy_url", proxy.URL).Msg("proxy found") proxyFound = true - // set proxy stream url (include hls:// so streamlink can download it) - streamURL = fmt.Sprintf("hls://%s", proxyUrl) - // set proxy header + streamUrl = fmt.Sprintf("hls://%s", proxyUrl) proxyHeader = proxy.Header break } @@ -255,210 +131,553 @@ func DownloadTwitchLiveVideo(ctx context.Context, v *ent.Vod, ch *ent.Channel, l twitchToken := "" // check if user has twitch token set - configTwitchToken := viper.GetString("parameters.twitch_token") + configTwitchToken := config.Get().Parameters.TwitchToken if configTwitchToken != "" { - // check token is valid - err := twitch.CheckUserAccessToken(configTwitchToken) + // check if token is valid + err := twitch.CheckUserAccessToken(ctx, configTwitchToken) if err != nil { - log.Error().Err(err).Msg("error checking twitch token") + log.Error().Err(err).Msg("invalid twitch token") } else { twitchToken = configTwitchToken } } - // if proxy not enabled, or none are working, use twitch URL - if streamURL == "" { - streamURL = fmt.Sprintf("https://twitch.tv/%s", ch.Name) - } - // streamlink livestreams do not use the 30 fps suffix - v.Resolution = strings.Replace(v.Resolution, "30", "", 1) + video.Resolution = strings.Replace(video.Resolution, "30", "", 1) // streamlink livestreams expect 'audio_only' instead of 'audio' - if v.Resolution == "audio" { - v.Resolution = "audio_only" + if video.Resolution == "audio" { + video.Resolution = "audio_only" } - // Generate args for exec - args := []string{"--progress=force", "--force", streamURL, fmt.Sprintf("%s,best", v.Resolution)} + var cmdArgs []string + cmdArgs = append(cmdArgs, streamUrl, fmt.Sprintf("%s,best", video.Resolution), "--force-progress", "--force") - // if proxy requires headers, pass them + // pass proxy header if proxyHeader != "" { - args = append(args, "--add-headers", proxyHeader) + cmdArgs = append(cmdArgs, "--add-headers", proxyHeader) } + // pass twitch token as header if available - // only pass if not using proxy for security reasons + // ! token is passed only if proxy is not enabled for security reasons if twitchToken != "" && !proxyFound { - args = append(args, "--http-header", fmt.Sprintf("Authorization=OAuth %s", twitchToken)) + cmdArgs = append(cmdArgs, "--http-header", fmt.Sprintf("Authorization=OAuth %s", twitchToken)) } - // pass config params - args = append(args, splitStreamlinkParams...) + // pass config args + cmdArgs = append(cmdArgs, configStreamlinkArgsArr...) - filteredArgs := make([]string, 0, len(args)) - for _, arg := range args { + filteredArgs := make([]string, 0) + for _, arg := range cmdArgs { if arg != "" { - filteredArgs = append(filteredArgs, arg) + filteredArgs = append(filteredArgs, arg) //nolint:staticcheck } } - cmdArgs := append(filteredArgs, "-o", v.TmpVideoDownloadPath) + // output + filteredArgs = append(cmdArgs, "-o", video.TmpVideoDownloadPath) + + log.Debug().Str("channel", channel.Name).Str("cmd", strings.Join(filteredArgs, " ")).Msgf("running streamlink") - log.Debug().Msgf("streamlink live args: %v", cmdArgs) - log.Debug().Msgf("running: streamlink %s", strings.Join(cmdArgs, " ")) + // start chat download + startChat <- true - // Start chat download workflow if liveChatWorkflowId is set (chat is being archived) - if liveChatWorkflowId != "" { - // Notify chat download that video download is about to start - log.Debug().Msg("notifying chat download that video download is about to start") + cmd := osExec.CommandContext(ctx, "streamlink", filteredArgs...) - // !send signal to workflow to start chat download - temporal.InitializeTemporalClient() - signal := utils.ArchiveTwitchLiveChatStartSignal{ - Start: true, + cmd.Stderr = file + cmd.Stdout = file + + if err := cmd.Start(); err != nil { + return fmt.Errorf("error starting streamlink: %w", err) + } + + done := make(chan error) + go func() { + done <- cmd.Wait() + }() + + // Wait for the command to finish or context to be cancelled + select { + case <-ctx.Done(): + // Context was cancelled, kill the process + if err := cmd.Process.Kill(); err != nil { + return fmt.Errorf("failed to kill streamlink process: %v", err) } - err := temporal.GetTemporalClient().Client.SignalWorkflow(ctx, liveChatWorkflowId, "", "start-chat-download", signal) + <-done // Wait for copying to finish + return ctx.Err() + case err := <-done: + // Command finished normally if err != nil { - return fmt.Errorf("error sending signal to workflow to start chat download: %w", err) + // Streamlink will error when the stream goes offline - do not return an error + log.Info().Str("channel", channel.Name).Str("exit_error", err.Error()).Msg("finished downloading live video") + // Check if log output indicates no messages + noStreams, err := checkLogForNoStreams(logFilePath) + if err == nil && noStreams { + return utils.NewLiveVideoDownloadNoStreamError("no streams found") + } + return nil } } - // Execute streamlink - cmd := osExec.Command("streamlink", cmdArgs...) + return nil +} + +func PostProcessVideo(ctx context.Context, video ent.Vod) error { + env := config.GetEnvConfig() + configFfmpegArgs := config.Get().Parameters.VideoConvert + arr := strings.Fields(configFfmpegArgs) + ffmpegArgs := []string{"-y", "-hide_banner", "-i", video.TmpVideoDownloadPath} + + ffmpegArgs = append(ffmpegArgs, arr...) + ffmpegArgs = append(ffmpegArgs, video.TmpVideoConvertPath) - videoLogfile, err := os.Create(fmt.Sprintf("/logs/%s-video.log", v.ID)) + // open log file + logFilePath := fmt.Sprintf("%s/%s-video-convert.log", env.LogsDir, video.ID.String()) + file, err := os.Create(logFilePath) if err != nil { - log.Error().Err(err).Msg("error creating video logfile") - return err + return fmt.Errorf("failed to open log file: %w", err) } - defer videoLogfile.Close() - cmd.Stderr = videoLogfile - var stdout bytes.Buffer + defer file.Close() + log.Debug().Str("video_id", video.ID.String()).Msgf("logging ffmpeg output to %s", logFilePath) - multiWriterStdout := io.MultiWriter(videoLogfile, &stdout) + log.Debug().Str("video_id", video.ID.String()).Str("cmd", strings.Join(ffmpegArgs, " ")).Msgf("running ffmpeg") - cmd.Stdout = multiWriterStdout + cmd := osExec.CommandContext(ctx, "ffmpeg", ffmpegArgs...) - if err := cmd.Run(); err != nil { - // Streamlink will error when the stream is offline - do not log this as an error - log.Debug().Msgf("finished downloading live video for %s - %s", v.ExtID, err.Error()) - log.Debug().Msgf("streamlink live stdout: %s", stdout.String()) - if strings.Contains(stdout.String(), "No playable streams found on this URL") { - log.Error().Msgf("no playable streams found on this URL for %s", v.ExtID) - return utils.NewLiveVideoDownloadNoStreamError("no playable streams found on this URL") + cmd.Stderr = file + cmd.Stdout = file + + if err := cmd.Start(); err != nil { + return fmt.Errorf("error starting ffmpeg: %w", err) + } + + done := make(chan error) + go func() { + done <- cmd.Wait() + }() + + // Wait for the command to finish or context to be cancelled + select { + case <-ctx.Done(): + // Context was cancelled, kill the process + if err := cmd.Process.Kill(); err != nil { + return fmt.Errorf("failed to kill ffmpeg process: %v", err) + } + <-done // Wait for copying to finish + return ctx.Err() + case err := <-done: + // Command finished normally + if err != nil { + log.Error().Err(err).Msg("error running ffmpeg") + return fmt.Errorf("error running ffmpeg: %w", err) } - return nil } - log.Debug().Msgf("finished downloading live video for %s", v.ExtID) return nil } -func DownloadTwitchLiveChat(ctx context.Context, v *ent.Vod, ch *ent.Channel, q *ent.Queue) error { +func ConvertVideoToHLS(ctx context.Context, video ent.Vod) error { + env := config.GetEnvConfig() + ffmpegArgs := []string{"-y", "-hide_banner", "-i", video.TmpVideoConvertPath, "-c", "copy", "-start_number", "0", "-hls_time", "10", "-hls_list_size", "0", "-hls_segment_filename", fmt.Sprintf("%s/%s_segment%s.ts", video.TmpVideoHlsPath, video.ExtID, "%d"), "-f", "hls", fmt.Sprintf("%s/%s-video.m3u8", video.TmpVideoHlsPath, video.ExtID)} - log.Debug().Msg("setting chat start time") - chatStartTime := time.Now() - _, err := database.DB().Client.Queue.UpdateOneID(q.ID).SetChatStart(chatStartTime).Save(ctx) + // open log file + logFilePath := fmt.Sprintf("%s/%s-video-convert.log", env.LogsDir, video.ID.String()) + file, err := os.Create(logFilePath) if err != nil { - log.Error().Err(err).Msg("error setting chat start time") - return err + return fmt.Errorf("failed to open log file: %w", err) + } + defer file.Close() + + log.Debug().Str("video_id", video.ID.String()).Msgf("logging ffmpeg output to %s", logFilePath) + + log.Debug().Str("video_id", video.ID.String()).Str("cmd", strings.Join(ffmpegArgs, " ")).Msgf("running ffmpeg") + + cmd := osExec.CommandContext(ctx, "ffmpeg", ffmpegArgs...) + + cmd.Stderr = file + cmd.Stdout = file + + if err := cmd.Start(); err != nil { + return fmt.Errorf("error starting ffmpeg: %w", err) + } + + done := make(chan error) + go func() { + done <- cmd.Wait() + }() + + // Wait for the command to finish or context to be cancelled + select { + case <-ctx.Done(): + // Context was cancelled, kill the process + if err := cmd.Process.Kill(); err != nil { + return fmt.Errorf("failed to kill ffmpeg process: %v", err) + } + <-done // Wait for copying to finish + return ctx.Err() + case err := <-done: + // Command finished normally + if err != nil { + log.Error().Err(err).Msg("error running ffmpeg") + return fmt.Errorf("error running ffmpeg: %w", err) + } + } + + return nil +} + +func DownloadTwitchChat(ctx context.Context, video ent.Vod) error { + env := config.GetEnvConfig() + // open log file + logFilePath := fmt.Sprintf("%s/%s-chat.log", env.LogsDir, video.ID.String()) + file, err := os.Create(logFilePath) + if err != nil { + return fmt.Errorf("failed to open log file: %w", err) + } + defer file.Close() + log.Debug().Str("video_id", video.ID.String()).Msgf("logging streamlink output to %s", logFilePath) + + var cmdArgs []string + cmdArgs = append(cmdArgs, "chatdownload", "--id", video.ExtID, "--embed-images", "--collision", "overwrite", "-o", video.TmpChatDownloadPath) + + log.Debug().Str("video_id", video.ID.String()).Str("cmd", strings.Join(cmdArgs, " ")).Msgf("running TwitchDownloaderCLI") + + cmd := osExec.CommandContext(ctx, "TwitchDownloaderCLI", cmdArgs...) + + cmd.Stderr = file + cmd.Stdout = file + + if err := cmd.Start(); err != nil { + return fmt.Errorf("error starting TwitchDownloader: %w", err) + } + + done := make(chan error) + go func() { + done <- cmd.Wait() + }() + + // Wait for the command to finish or context to be cancelled + select { + case <-ctx.Done(): + // Context was cancelled, kill the process + if err := cmd.Process.Kill(); err != nil { + return fmt.Errorf("failed to kill TwitchDownloaderCLI process: %v", err) + } + <-done // Wait for copying to finish + return ctx.Err() + case err := <-done: + // Command finished normally + if err != nil { + if exitError, ok := err.(*osExec.ExitError); ok { + log.Error().Err(err).Msg("error running TwitchDownloaderCLI") + return fmt.Errorf("error running TwitchDownloaderCLI exit code %d: %w", exitError.ExitCode(), exitError) + } + log.Error().Err(err).Msg("error running TwitchDownloaderCLI") + return fmt.Errorf("error running TwitchDownloaderCLI: %w", err) + } } - cmd := osExec.Command("chat_downloader", fmt.Sprintf("https://twitch.tv/%s", ch.Name), "--output", v.TmpLiveChatDownloadPath, "-q") + return nil +} - chatLogfile, err := os.Create(fmt.Sprintf("/logs/%s-chat.log", v.ID)) +func DownloadTwitchLiveChat(ctx context.Context, video ent.Vod, channel ent.Channel, queue ent.Queue) error { + env := config.GetEnvConfig() + // set chat start time + chatStarTime := time.Now() + _, err := queue.Update().SetChatStart(chatStarTime).Save(ctx) if err != nil { - log.Error().Err(err).Msg("error creating chat logfile") return err } - defer chatLogfile.Close() - cmd.Stdout = chatLogfile - cmd.Stderr = chatLogfile - // Append string to chatLogFile - _, err = chatLogfile.WriteString("Chat downloader started. It it unlikely that you will see further output in this log.") + + // open log file + logFilePath := fmt.Sprintf("%s/%s-chat.log", env.LogsDir, video.ID.String()) + file, err := os.Create(logFilePath) if err != nil { - log.Error().Err(err).Msg("error writing to chat logfile") + return fmt.Errorf("failed to open log file: %w", err) } + defer file.Close() + log.Debug().Str("video_id", video.ID.String()).Msgf("logging chat downloader output to %s", logFilePath) + + var cmdArgs []string + cmdArgs = append(cmdArgs, fmt.Sprintf("https://twitch.tv/%s", channel.Name), "--output", video.TmpLiveChatDownloadPath, "-q") + + log.Debug().Str("video_id", video.ID.String()).Str("cmd", strings.Join(cmdArgs, " ")).Msgf("running chat_downloader") + + cmd := osExec.CommandContext(ctx, "chat_downloader", cmdArgs...) + + cmd.Stderr = file + cmd.Stdout = file if err := cmd.Start(); err != nil { - log.Error().Err(err).Msg("error starting chat_downloader for live chat download") - return err + return fmt.Errorf("error starting TwitchDownloader: %w", err) } - // Wait for the command to finish - if err := cmd.Wait(); err != nil { - // Check if the error is due to a signal - if exitErr, ok := err.(*exec.ExitError); ok { - if status, ok := exitErr.Sys().(interface{ ExitStatus() int }); ok { - if status.ExitStatus() != -1 { - fmt.Println("chat_downloader terminated by signal:", status.ExitStatus()) + done := make(chan error) + go func() { + done <- cmd.Wait() + }() + + // Wait for the command to finish or context to be cancelled + select { + case <-ctx.Done(): + // Context was cancelled, kill the process + if err := cmd.Process.Kill(); err != nil { + return fmt.Errorf("failed to kill TwitchDownloaderCLI process: %v", err) + } + <-done // Wait for copying to finish + return ctx.Err() + case err := <-done: + // Command finished normally + if err != nil { + if exitError, ok := err.(*osExec.ExitError); ok { + if status, ok := exitError.Sys().(interface{ ExitStatus() int }); ok { + if status.ExitStatus() != -1 { + fmt.Println("chat_downloader terminated - exit code:", status.ExitStatus()) + } } } + log.Error().Err(err).Msg("error running chat_downloader") + return fmt.Errorf("error running chat_downloader: %w", err) } + } - fmt.Println("error in chat_downloader for live chat download:", err) + return nil +} + +func RenderTwitchChat(ctx context.Context, video ent.Vod) error { + env := config.GetEnvConfig() + // open log file + logFilePath := fmt.Sprintf("%s/%s-chat-render.log", env.LogsDir, video.ID.String()) + file, err := os.Create(logFilePath) + if err != nil { + return fmt.Errorf("failed to open log file: %w", err) + } + defer file.Close() + log.Debug().Str("video_id", video.ID.String()).Msgf("logging chat_downloader output to %s", logFilePath) + + var cmdArgs []string + + configRenderArgs := config.Get().Parameters.ChatRender + configRenderArgsArr := strings.Fields(configRenderArgs) + + cmdArgs = append(cmdArgs, "chatrender", "-i", video.TmpChatDownloadPath, "--collision", "overwrite") + + cmdArgs = append(cmdArgs, configRenderArgsArr...) + cmdArgs = append(cmdArgs, "-o", video.TmpChatRenderPath) + + log.Debug().Str("video_id", video.ID.String()).Str("cmd", strings.Join(cmdArgs, " ")).Msgf("running TwitchDownloaderCLI") + + cmd := osExec.CommandContext(ctx, "TwitchDownloaderCLI", cmdArgs...) + + cmd.Stderr = file + cmd.Stdout = file + + if err := cmd.Start(); err != nil { + return fmt.Errorf("error starting TwitchDownloader: %w", err) + } + + done := make(chan error) + go func() { + done <- cmd.Wait() + }() + + // Wait for the command to finish or context to be cancelled + select { + case <-ctx.Done(): + // Context was cancelled, kill the process + if err := cmd.Process.Kill(); err != nil { + return fmt.Errorf("failed to kill TwitchDownloaderCLI process: %v", err) + } + <-done // Wait for copying to finish + return ctx.Err() + case err := <-done: + // Command finished normally + if err != nil { + if exitError, ok := err.(*osExec.ExitError); ok { + log.Error().Err(err).Msg("error running TwitchDownloaderCLI") + return fmt.Errorf("error running TwitchDownloaderCLI exit code %d: %w", exitError.ExitCode(), exitError) + } + + // Check if log output indicates no messages + noElements, err := checkLogForNoElements(logFilePath) + if err == nil && noElements { + return errors.ErrNoChatMessages + } + + log.Error().Err(err).Msg("error running TwitchDownloaderCLI") + return fmt.Errorf("error running TwitchDownloaderCLI: %w", err) + } } - log.Debug().Msgf("finished downloading live chat for %s", v.ExtID) return nil } -func GetVideoDuration(path string) (int, error) { - log.Debug().Msg("getting video duration") - cmd := osExec.Command("ffprobe", "-v", "error", "-show_entries", "format=duration", "-of", "default=noprint_wrappers=1:nokey=1", path) +// checkLogForNoElements returns true if the log file contains the expected message. +// +// Used to check if the chat render failure was caused by no messages in the chat. +func checkLogForNoElements(logFilePath string) (bool, error) { + file, err := os.Open(logFilePath) + if err != nil { + return false, fmt.Errorf("failed to open log file: %w", err) + } + defer file.Close() + + scanner := bufio.NewScanner(file) + for scanner.Scan() { + if strings.Contains(scanner.Text(), "Sequence contains no elements") { + return true, nil + } + } + + if err := scanner.Err(); err != nil { + return false, fmt.Errorf("error reading log file: %w", err) + } + + return false, nil +} + +func GetVideoDuration(ctx context.Context, path string) (int, error) { + cmd := osExec.CommandContext(ctx, "ffprobe", "-v", "error", "-show_entries", "format=duration", "-of", "default=noprint_wrappers=1:nokey=1", path) + out, err := cmd.Output() if err != nil { - log.Error().Err(err).Msg("error getting video duration") - return 1, err + return 0, fmt.Errorf("error running ffprobe: %w", err) } - durOut := strings.TrimSpace(string(out)) - durFloat, err := strconv.ParseFloat(durOut, 64) + durationOut := strings.TrimSpace(string(out)) + + duration, err := strconv.ParseFloat(durationOut, 64) if err != nil { - log.Error().Err(err).Msg("error converting video duration") - return 1, err + return 0, fmt.Errorf("error parsing duration: %w", err) } - duration := int(durFloat) - log.Debug().Msgf("video duration: %d", duration) - return duration, nil + return int(duration), nil } -func GetFfprobeData(path string) (map[string]interface{}, error) { - cmd := osExec.Command("ffprobe", "-hide_banner", "-v", "quiet", "-print_format", "json", "-show_format", "-show_streams", path) - out, err := cmd.Output() +func UpdateTwitchChat(ctx context.Context, video ent.Vod) error { + env := config.GetEnvConfig() + // open log file + logFilePath := fmt.Sprintf("%s/%s-chat-convert.log", env.LogsDir, video.ID.String()) + file, err := os.Create(logFilePath) if err != nil { - log.Error().Err(err).Msgf("error getting ffprobe data for %s - err: %v", path, err) - return nil, fmt.Errorf("error getting ffprobe data for %s - err: %w ", path, err) + return fmt.Errorf("failed to open log file: %w", err) } - var data map[string]interface{} - if err := json.Unmarshal(out, &data); err != nil { - log.Error().Err(err).Msg("error unmarshalling ffprobe data") - return nil, err + defer file.Close() + log.Debug().Str("video_id", video.ID.String()).Msgf("logging TwitchDownloader output to %s", logFilePath) + + var cmdArgs []string + cmdArgs = append(cmdArgs, "chatupdate", "-i", video.TmpLiveChatConvertPath, "--embed-missing", "--collision", "overwrite", "-o", video.TmpChatDownloadPath) + + log.Debug().Str("video_id", video.ID.String()).Str("cmd", strings.Join(cmdArgs, " ")).Msgf("running TwitchDownloaderCLI") + + cmd := osExec.CommandContext(ctx, "TwitchDownloaderCLI", cmdArgs...) + + cmd.Stderr = file + cmd.Stdout = file + + if err := cmd.Start(); err != nil { + return fmt.Errorf("error starting TwitchDownloader: %w", err) } - return data, nil + + done := make(chan error) + go func() { + done <- cmd.Wait() + }() + + // Wait for the command to finish or context to be cancelled + select { + case <-ctx.Done(): + // Context was cancelled, kill the process + if err := cmd.Process.Kill(); err != nil { + return fmt.Errorf("failed to kill TwitchDownloader process: %v", err) + } + <-done // Wait for copying to finish + return ctx.Err() + case err := <-done: + // Command finished normally + if err != nil { + if exitError, ok := err.(*osExec.ExitError); ok { + log.Error().Err(err).Str("exitCode", strconv.Itoa(exitError.ExitCode())).Str("exit_error", exitError.Error()).Msg("error running TwitchDownloader") + return fmt.Errorf("error running TwitchDownloader") + } + return fmt.Errorf("error running TwitchDownloader: %w", err) + } + } + + return nil } -func TwitchChatUpdate(v *ent.Vod) error { +// checkLogForNoStreams returns true if the log file contains the expected message. +// +// Used to check if live stream download failed because no streams were found. +func checkLogForNoStreams(logFilePath string) (bool, error) { + file, err := os.Open(logFilePath) + if err != nil { + return false, fmt.Errorf("failed to open log file: %w", err) + } + defer file.Close() - cmd := osExec.Command("TwitchDownloaderCLI", "chatupdate", "-i", v.TmpLiveChatConvertPath, "--embed-missing", "-o", v.TmpChatDownloadPath) + scanner := bufio.NewScanner(file) + for scanner.Scan() { + if strings.Contains(scanner.Text(), "No playable streams found on this URL") { + return true, nil + } + } - chatLogfile, err := os.Create(fmt.Sprintf("/logs/%s-chat-convert.log", v.ID)) + if err := scanner.Err(); err != nil { + return false, fmt.Errorf("error reading log file: %w", err) + } + + return false, nil +} + +func ConvertTwitchVodVideo(v *ent.Vod) error { + env := config.GetEnvConfig() + // Fetch config params + ffmpegParams := config.Get().Parameters.VideoConvert + // Split supplied params into array + arr := strings.Fields(ffmpegParams) + // Generate args for exec + argArr := []string{"-y", "-hide_banner", "-i", v.TmpVideoDownloadPath} + // add each config param to arg + argArr = append(argArr, arr...) + // add output file + argArr = append(argArr, v.TmpVideoConvertPath) + log.Debug().Msgf("video convert args: %v", argArr) + // Execute ffmpeg + cmd := osExec.Command("ffmpeg", argArr...) + + videoConvertLogfile, err := os.Create(fmt.Sprintf("%s/%s-video-convert.log", env.LogsDir, v.ID)) if err != nil { - log.Error().Err(err).Msg("error creating chat convert logfile") + log.Error().Err(err).Msg("error creating video convert logfile") return err } - defer chatLogfile.Close() - cmd.Stdout = chatLogfile - cmd.Stderr = chatLogfile + defer videoConvertLogfile.Close() + cmd.Stdout = videoConvertLogfile + cmd.Stderr = videoConvertLogfile if err := cmd.Run(); err != nil { - log.Error().Err(err).Msg("error running TwitchDownloaderCLI for chat update") + log.Error().Err(err).Msg("error running ffmpeg for vod video convert") return err } - log.Debug().Msgf("finished updating chat for %s", v.ExtID) + log.Debug().Msgf("finished vod video convert for %s", v.ExtID) return nil } +func GetFfprobeData(path string) (map[string]interface{}, error) { + cmd := osExec.Command("ffprobe", "-hide_banner", "-v", "quiet", "-print_format", "json", "-show_format", "-show_streams", path) + out, err := cmd.Output() + if err != nil { + log.Error().Err(err).Msgf("error getting ffprobe data for %s - err: %v", path, err) + return nil, fmt.Errorf("error getting ffprobe data for %s - err: %w ", path, err) + } + var data map[string]interface{} + if err := json.Unmarshal(out, &data); err != nil { + log.Error().Err(err).Msg("error unmarshalling ffprobe data") + return nil, err + } + return data, nil +} + // test proxy server by making http request to proxy server // if request is successful return true // timeout after 5 seconds @@ -490,3 +709,52 @@ func testProxyServer(url string, header string) bool { log.Debug().Msg("proxy server test successful") return true } + +// GenerateStaticThumbnail generates static thumbnail for video. +// +// Resolution is optional and if not set the thumbnail will be generated at the original resolution. +func GenerateStaticThumbnail(ctx context.Context, videoPath string, position int, thumbnailPath string, resolution string) error { + log.Info().Str("videoPath", videoPath).Str("position", strconv.Itoa(position)).Str("thumbnailPath", thumbnailPath).Str("resolution", resolution).Msg("generating static thumbnail") + // placing -ss 1 before the input is faster + // https://stackoverflow.com/questions/27568254/how-to-extract-1-screenshot-for-a-video-with-ffmpeg-at-a-given-time + ffmpegArgs := []string{"-y", "-hide_banner", "-ss", strconv.Itoa(position), "-i", videoPath, "-vframes", "1", "-update", "1"} + if resolution != "" { + ffmpegArgs = append(ffmpegArgs, "-s", resolution) + } + + ffmpegArgs = append(ffmpegArgs, thumbnailPath) + + cmd := osExec.CommandContext(ctx, "ffmpeg", ffmpegArgs...) + + var stdout, stderr bytes.Buffer + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + if err := cmd.Start(); err != nil { + return fmt.Errorf("error starting ffmpeg: %w", err) + } + + done := make(chan error) + go func() { + done <- cmd.Wait() + }() + + // Wait for the command to finish or context to be cancelled + select { + case <-ctx.Done(): + // Context was cancelled, kill the process + if err := cmd.Process.Kill(); err != nil { + return fmt.Errorf("failed to kill ffmpeg process: %v", err) + } + <-done // Wait for copying to finish + return ctx.Err() + case err := <-done: + // Command finished normally + if err != nil { + log.Error().Err(err).Str("ffmpeg_stderr", stderr.String()).Str("ffmpeg_stdout", stdout.String()).Msg("error running ffmpeg") + return fmt.Errorf("error running ffmpeg: %w", err) + } + } + + return nil +} diff --git a/internal/exec/exec_test.go b/internal/exec/exec_test.go new file mode 100644 index 00000000..10623319 --- /dev/null +++ b/internal/exec/exec_test.go @@ -0,0 +1 @@ +package exec_test diff --git a/internal/live/live.go b/internal/live/live.go index 26d5c0fd..be73d1b5 100644 --- a/internal/live/live.go +++ b/internal/live/live.go @@ -2,8 +2,10 @@ package live import ( "context" + "errors" "fmt" "regexp" + "strings" "time" "github.com/google/uuid" @@ -16,35 +18,37 @@ import ( "github.com/zibbp/ganymede/ent/livecategory" "github.com/zibbp/ganymede/ent/livetitleregex" "github.com/zibbp/ganymede/ent/queue" + entVod "github.com/zibbp/ganymede/ent/vod" "github.com/zibbp/ganymede/internal/archive" "github.com/zibbp/ganymede/internal/database" "github.com/zibbp/ganymede/internal/notification" - "github.com/zibbp/ganymede/internal/twitch" + "github.com/zibbp/ganymede/internal/platform" "github.com/zibbp/ganymede/internal/utils" ) type Service struct { Store *database.Database - TwitchService *twitch.Service ArchiveService *archive.Service + PlatformTwitch platform.Platform } type Live struct { - ID uuid.UUID `json:"id"` - WatchLive bool `json:"watch_live"` - WatchVod bool `json:"watch_vod"` - DownloadArchives bool `json:"download_archives"` - DownloadHighlights bool `json:"download_highlights"` - DownloadUploads bool `json:"download_uploads"` - IsLive bool `json:"is_live"` - ArchiveChat bool `json:"archive_chat"` - Resolution string `json:"resolution"` - LastLive time.Time `json:"last_live"` - RenderChat bool `json:"render_chat"` - DownloadSubOnly bool `json:"download_sub_only"` - Categories []string `json:"categories"` - MaxAge int64 `json:"max_age"` - TitleRegex []ent.LiveTitleRegex `json:"title_regex"` + ID uuid.UUID `json:"id"` + WatchLive bool `json:"watch_live"` + WatchVod bool `json:"watch_vod"` + DownloadArchives bool `json:"download_archives"` + DownloadHighlights bool `json:"download_highlights"` + DownloadUploads bool `json:"download_uploads"` + IsLive bool `json:"is_live"` + ArchiveChat bool `json:"archive_chat"` + Resolution string `json:"resolution"` + LastLive time.Time `json:"last_live"` + RenderChat bool `json:"render_chat"` + DownloadSubOnly bool `json:"download_sub_only"` + Categories []string `json:"categories"` + ApplyCategoriesToLive bool `json:"apply_categories_to_live"` + MaxAge int64 `json:"max_age"` + TitleRegex []ent.LiveTitleRegex `json:"title_regex"` } type ConvertChat struct { @@ -63,8 +67,8 @@ type ArchiveLive struct { RenderChat bool `json:"render_chat"` } -func NewService(store *database.Database, twitchService *twitch.Service, archiveService *archive.Service) *Service { - return &Service{Store: store, TwitchService: twitchService, ArchiveService: archiveService} +func NewService(store *database.Database, archiveService *archive.Service, platformTwitch platform.Platform) *Service { + return &Service{Store: store, ArchiveService: archiveService, PlatformTwitch: platformTwitch} } func (s *Service) GetLiveWatchedChannels(c echo.Context) ([]*ent.Live, error) { @@ -85,7 +89,7 @@ func (s *Service) AddLiveWatchedChannel(c echo.Context, liveDto Live) (*ent.Live return nil, fmt.Errorf("channel already watched") } - l, err := s.Store.Client.Live.Create().SetChannelID(liveDto.ID).SetWatchLive(liveDto.WatchLive).SetWatchVod(liveDto.WatchVod).SetDownloadArchives(liveDto.DownloadArchives).SetDownloadHighlights(liveDto.DownloadHighlights).SetDownloadUploads(liveDto.DownloadUploads).SetResolution(liveDto.Resolution).SetArchiveChat(liveDto.ArchiveChat).SetRenderChat(liveDto.RenderChat).SetDownloadSubOnly(liveDto.DownloadSubOnly).SetVideoAge(liveDto.MaxAge).Save(c.Request().Context()) + l, err := s.Store.Client.Live.Create().SetChannelID(liveDto.ID).SetWatchLive(liveDto.WatchLive).SetWatchVod(liveDto.WatchVod).SetDownloadArchives(liveDto.DownloadArchives).SetDownloadHighlights(liveDto.DownloadHighlights).SetDownloadUploads(liveDto.DownloadUploads).SetResolution(liveDto.Resolution).SetArchiveChat(liveDto.ArchiveChat).SetRenderChat(liveDto.RenderChat).SetDownloadSubOnly(liveDto.DownloadSubOnly).SetVideoAge(liveDto.MaxAge).SetApplyCategoriesToLive(liveDto.ApplyCategoriesToLive).Save(c.Request().Context()) if err != nil { return nil, fmt.Errorf("error adding watched channel: %v", err) } @@ -111,7 +115,7 @@ func (s *Service) AddLiveWatchedChannel(c echo.Context, liveDto Live) (*ent.Live } func (s *Service) UpdateLiveWatchedChannel(c echo.Context, liveDto Live) (*ent.Live, error) { - l, err := s.Store.Client.Live.UpdateOneID(liveDto.ID).SetWatchLive(liveDto.WatchLive).SetWatchVod(liveDto.WatchVod).SetDownloadArchives(liveDto.DownloadArchives).SetDownloadHighlights(liveDto.DownloadHighlights).SetDownloadUploads(liveDto.DownloadUploads).SetResolution(liveDto.Resolution).SetArchiveChat(liveDto.ArchiveChat).SetRenderChat(liveDto.RenderChat).SetDownloadSubOnly(liveDto.DownloadSubOnly).SetVideoAge(liveDto.MaxAge).Save(c.Request().Context()) + l, err := s.Store.Client.Live.UpdateOneID(liveDto.ID).SetWatchLive(liveDto.WatchLive).SetWatchVod(liveDto.WatchVod).SetDownloadArchives(liveDto.DownloadArchives).SetDownloadHighlights(liveDto.DownloadHighlights).SetDownloadUploads(liveDto.DownloadUploads).SetResolution(liveDto.Resolution).SetArchiveChat(liveDto.ArchiveChat).SetRenderChat(liveDto.RenderChat).SetDownloadSubOnly(liveDto.DownloadSubOnly).SetVideoAge(liveDto.MaxAge).SetApplyCategoriesToLive(liveDto.ApplyCategoriesToLive).Save(c.Request().Context()) if err != nil { return nil, fmt.Errorf("error updating watched channel: %v", err) } @@ -189,10 +193,10 @@ func (s *Service) DeleteLiveWatchedChannel(c echo.Context, lID uuid.UUID) error // s.Every(5).Minutes().Do(Check) //} -func (s *Service) Check() error { +func (s *Service) Check(ctx context.Context) error { log.Debug().Msg("checking live channels") // get live watched channels from database - liveWatchedChannels, err := s.Store.Client.Live.Query().Where(live.WatchLive(true)).WithChannel().WithTitleRegex(func(ltrq *ent.LiveTitleRegexQuery) { + liveWatchedChannels, err := s.Store.Client.Live.Query().Where(live.WatchLive(true)).WithChannel().WithCategories().WithTitleRegex(func(ltrq *ent.LiveTitleRegexQuery) { ltrq.Where(livetitleregex.ApplyToVideosEQ(false)) }).All(context.Background()) if err != nil { @@ -213,32 +217,37 @@ func (s *Service) Check() error { liveWatchedChannelsSplit = append(liveWatchedChannelsSplit, liveWatchedChannels[i:end]) } - var streams []twitch.Live + var streams []platform.LiveStreamInfo + channels := make([]string, 0) // generate query string for twitch api for _, lwc := range liveWatchedChannelsSplit { - var queryString string - for i, lwc := range lwc { - if i == 0 { - queryString += "?user_login=" + lwc.Edges.Channel.Name - } else { - queryString += "&user_login=" + lwc.Edges.Channel.Name - } + for _, lwc := range lwc { + channels = append(channels, lwc.Edges.Channel.Name) } - twitchStreams, err := s.TwitchService.GetStreams(queryString) + log.Debug().Str("channels", strings.Join(channels, ", ")).Msg("checking live streams") + + twitchStreams, err := s.PlatformTwitch.GetLiveStreams(ctx, channels) if err != nil { - log.Error().Err(err).Msg("error getting twitch streams") + if errors.Is(err, &platform.ErrorNoStreamsFound{}) { + log.Debug().Msg("no streams found") + continue + } else { + return fmt.Errorf("error getting live streams: %v", err) + } } - streams = append(streams, twitchStreams.Data...) + + streams = append(streams, twitchStreams...) } // check if live stream is online OUTER: for _, lwc := range liveWatchedChannels { // Check if LWC is in twitchStreams.Data - stream := stringInSlice(lwc.Edges.Channel.Name, streams) + stream := channelInLiveStreamInfo(lwc.Edges.Channel.Name, streams) if len(stream.ID) > 0 { if !lwc.IsLive { // stream is live + log.Debug().Str("channel", lwc.Edges.Channel.Name).Msg("stream is live; checking for restrictions before archiving") // check for any user-constraints before archiving if lwc.Edges.TitleRegex != nil && len(lwc.Edges.TitleRegex) > 0 { // run regexes against title @@ -263,6 +272,28 @@ OUTER: } } + tmpCategoryNames := make([]string, 0) + for _, category := range lwc.Edges.Categories { + tmpCategoryNames = append(tmpCategoryNames, category.Name) + } + + // check for category restrictions + if lwc.ApplyCategoriesToLive && len(lwc.Edges.Categories) > 0 { + found := false + for _, category := range lwc.Edges.Categories { + if strings.EqualFold(category.Name, stream.GameName) { + log.Debug().Str("category", stream.GameName).Str("category_restrictions", strings.Join(tmpCategoryNames, ", ")).Msgf("%s matches category restrictions", lwc.Edges.Channel.Name) + found = true + break + } + } + + if !found { + log.Debug().Str("category", stream.GameName).Str("category_restrictions", strings.Join(tmpCategoryNames, ", ")).Msgf("%s does not match category restrictions", lwc.Edges.Channel.Name) + continue + } + } + log.Debug().Msgf("%s is now live", lwc.Edges.Channel.Name) // Stream is online, update database _, err := s.Store.Client.Live.UpdateOneID(lwc.ID).SetIsLive(true).Save(context.Background()) @@ -281,13 +312,23 @@ OUTER: } } // Archive stream - archiveResp, err := s.ArchiveService.ArchiveTwitchLive(lwc, stream) + err = s.ArchiveService.ArchiveLivestream(ctx, archive.ArchiveVideoInput{ + ChannelId: lwc.Edges.Channel.ID, + Quality: utils.VodQuality(lwc.Resolution), + ArchiveChat: lwc.ArchiveChat, + RenderChat: lwc.RenderChat, + }) if err != nil { - log.Error().Err(err).Msg("error archiving twitch live") + log.Error().Err(err).Msg("error archiving twitch livestream") } // Notification // Fetch channel for notification - go notification.SendLiveNotification(lwc.Edges.Channel, archiveResp.VOD, archiveResp.Queue) + vod, err := s.Store.Client.Vod.Query().Where(entVod.ExtStreamID(stream.ID)).WithChannel().WithQueue().Order(entVod.ByCreatedAt()).Limit(1).First(ctx) + if err != nil { + log.Error().Err(err).Msg("error getting vod") + continue + } + go notification.SendLiveNotification(lwc.Edges.Channel, vod, vod.Edges.Queue) } } else { if lwc.IsLive { @@ -300,68 +341,15 @@ OUTER: } } } - - return nil -} - -// func (s *Service) ConvertChat(c echo.Context, convertChatDto ConvertChat) error { -// i, err := strconv.ParseInt(convertChatDto.ChatStart, 10, 64) -// if err != nil { -// return fmt.Errorf("error parsing chat start: %v", err) -// } -// tm := time.Unix(i, 0) -// err = utils.ConvertTwitchLiveChatToVodChat( -// fmt.Sprintf("/tmp/%s", convertChatDto.FileName), -// convertChatDto.ChannelName, -// convertChatDto.VodID, -// convertChatDto.VodExternalID, -// convertChatDto.ChannelID, -// tm, -// ) -// if err != nil { -// return fmt.Errorf("error converting chat: %v", err) -// } -// return nil -// } - -func (s *Service) ArchiveLiveChannel(c echo.Context, archiveLiveChannelDto ArchiveLive) error { - // fetch channel - channel, err := s.Store.Client.Channel.Query().Where(channel.ID(archiveLiveChannelDto.ChannelID)).Only(c.Request().Context()) - if err != nil { - if _, ok := err.(*ent.NotFoundError); ok { - return fmt.Errorf("channel not found") - } - return fmt.Errorf("error fetching channel: %v", err) - } - - // check if channel is live - queryString := "?user_login=" + channel.Name - twitchStream, err := s.TwitchService.GetStreams(queryString) - if err != nil { - return fmt.Errorf("error getting twitch streams: %v", err) - } - if len(twitchStream.Data) == 0 { - return fmt.Errorf("channel is not live") - } - // create a temp live watched channel - lwc := &ent.Live{ - ArchiveChat: archiveLiveChannelDto.ArchiveChat, - RenderChat: archiveLiveChannelDto.RenderChat, - Resolution: archiveLiveChannelDto.Resolution, - } - _, err = s.ArchiveService.ArchiveTwitchLive(lwc, twitchStream.Data[0]) - if err != nil { - log.Error().Err(err).Msg("error archiving twitch livestream") - } - return nil } -func stringInSlice(a string, list []twitch.Live) twitch.Live { +// channelInLiveStreamInfo searches for a string in a slice of LiveStreamInfo and returns the first match. +func channelInLiveStreamInfo(a string, list []platform.LiveStreamInfo) platform.LiveStreamInfo { for _, b := range list { if b.UserLogin == a { return b } } - return twitch.Live{} + return platform.LiveStreamInfo{} } diff --git a/internal/live/vod.go b/internal/live/vod.go index d8f7dd59..cf66dc28 100644 --- a/internal/live/vod.go +++ b/internal/live/vod.go @@ -6,14 +6,17 @@ import ( "strings" "time" + "github.com/rs/zerolog" "github.com/rs/zerolog/log" - "github.com/spf13/viper" "github.com/zibbp/ganymede/ent" "github.com/zibbp/ganymede/ent/channel" "github.com/zibbp/ganymede/ent/live" "github.com/zibbp/ganymede/ent/livetitleregex" "github.com/zibbp/ganymede/ent/vod" - "github.com/zibbp/ganymede/internal/twitch" + "github.com/zibbp/ganymede/internal/archive" + "github.com/zibbp/ganymede/internal/config" + "github.com/zibbp/ganymede/internal/platform" + "github.com/zibbp/ganymede/internal/utils" ) type TwitchVideoResponse struct { @@ -55,20 +58,22 @@ type UserName string type Viewable string -func (s *Service) CheckVodWatchedChannels() { +func (s *Service) CheckVodWatchedChannels(ctx context.Context, logger zerolog.Logger) error { // Get channels from DB channels, err := s.Store.Client.Live.Query().Where(live.WatchVod(true)).WithChannel().WithCategories().WithTitleRegex(func(ltrq *ent.LiveTitleRegexQuery) { ltrq.Where(livetitleregex.ApplyToVideosEQ(true)) }).All(context.Background()) if err != nil { - log.Debug().Err(err).Msg("error getting channels") - return + return err } + if len(channels) == 0 { - log.Debug().Msg("No channels to check") - return + logger.Info().Msg("no channels to check") + return nil } - log.Info().Msgf("Checking %d channels for new videos", len(channels)) + + logger.Info().Msgf("checking %d channels for new videos", len(channels)) + for _, watch := range channels { // Check if channel has category restrictions var channelVideoCategories []string @@ -76,33 +81,33 @@ func (s *Service) CheckVodWatchedChannels() { for _, category := range watch.Edges.Categories { channelVideoCategories = append(channelVideoCategories, category.Name) } - log.Debug().Msgf("Channel %s has category restrictions: %s", watch.Edges.Channel.Name, strings.Join(channelVideoCategories, ", ")) + logger.Debug().Msgf("channel %s has category restrictions: %s", watch.Edges.Channel.Name, strings.Join(channelVideoCategories, ", ")) } - var videos []twitch.Video + var videos []platform.VideoInfo // If archives is enabled, fetch all videos if watch.DownloadArchives { - tmpVideos, err := twitch.GetVideosByUser(watch.Edges.Channel.ExtID, "archive") + tmpVideos, err := s.PlatformTwitch.GetVideos(ctx, watch.Edges.Channel.ExtID, platform.VideoTypeArchive, false, false) if err != nil { - log.Error().Err(err).Msg("error getting videos") + logger.Error().Str("channel", watch.Edges.Channel.Name).Err(err).Msg("error getting videos") continue } videos = append(videos, tmpVideos...) } // If highlights is enabled, fetch all videos if watch.DownloadHighlights { - tmpVideos, err := twitch.GetVideosByUser(watch.Edges.Channel.ExtID, "highlight") + tmpVideos, err := s.PlatformTwitch.GetVideos(ctx, watch.Edges.Channel.ExtID, platform.VideoTypeHighlight, false, false) if err != nil { - log.Error().Err(err).Msg("error getting videos") + logger.Error().Str("channel", watch.Edges.Channel.Name).Err(err).Msg("error getting videos") continue } videos = append(videos, tmpVideos...) } // If uploads is enabled, fetch all videos if watch.DownloadUploads { - tmpVideos, err := twitch.GetVideosByUser(watch.Edges.Channel.ExtID, "upload") + tmpVideos, err := s.PlatformTwitch.GetVideos(ctx, watch.Edges.Channel.ExtID, platform.VideoTypeUpload, false, false) if err != nil { - log.Error().Err(err).Msg("error getting videos") + logger.Error().Str("channel", watch.Edges.Channel.Name).Err(err).Msg("error getting videos") continue } videos = append(videos, tmpVideos...) @@ -111,7 +116,7 @@ func (s *Service) CheckVodWatchedChannels() { // Fetch all videos from DB dbVideos, err := s.Store.Client.Vod.Query().Where(vod.HasChannelWith(channel.ID(watch.Edges.Channel.ID))).All(context.Background()) if err != nil { - log.Error().Err(err).Msg("error getting videos from DB") + logger.Error().Str("channel", watch.Edges.Channel.Name).Err(err).Msg("error getting videos from database") continue } // Check if video is already in DB @@ -119,13 +124,18 @@ func (s *Service) CheckVodWatchedChannels() { for _, video := range videos { // Video is not in DB if !contains(dbVideos, video.ID) { + platformVideo, err := s.PlatformTwitch.GetVideo(ctx, video.ID, true, true) + if err != nil { + logger.Error().Str("channel", watch.Edges.Channel.Name).Err(err).Msg("error getting video") + continue + } // check if there are any title regexes that need to be tested if watch.Edges.TitleRegex != nil && len(watch.Edges.TitleRegex) > 0 { // run regexes against title for _, titleRegex := range watch.Edges.TitleRegex { regex, err := regexp.Compile(titleRegex.Regex) if err != nil { - log.Error().Err(err).Msg("error compiling regex for watched channel check, skipping this regex") + logger.Error().Err(err).Msgf("error compiling regex %s", titleRegex.Regex) continue } matches := regex.FindAllString(video.Title, -1) @@ -138,66 +148,51 @@ func (s *Service) CheckVodWatchedChannels() { continue } - log.Debug().Str("regex", titleRegex.Regex).Str("title", video.Title).Msgf("no regex matches for video") + logger.Debug().Str("regex", titleRegex.Regex).Str("title", video.Title).Msgf("no regex matches for video") continue OUTER } } - // Query the video using Twitch's GraphQL API to check for restrictions - gqlVideo, err := twitch.GQLGetVideo(video.ID) - if err != nil { - log.Error().Err(err).Msgf("error getting video %s from GraphQL API", video.ID) - continue - } - // check if video is too old if watch.VideoAge > 0 { - parsedTime, err := time.Parse(time.RFC3339, video.CreatedAt) - if err != nil { - log.Error().Err(err).Msgf("error parsing video %s created_at", video.ID) - continue - } currentTime := time.Now() ageDuration := time.Duration(watch.VideoAge) * 24 * time.Hour ageCutOff := currentTime.Add(-ageDuration) - if parsedTime.Before(ageCutOff) { - log.Debug().Msgf("skipping video %s. video is older than %d days.", video.ID, watch.VideoAge) + if platformVideo.CreatedAt.Before(ageCutOff) { + logger.Debug().Str("video_id", video.ID).Msgf("skipping video; video is older than %d days.", watch.VideoAge) continue } } // Get video chapters - gqlVideoChapters, err := twitch.GQLGetChapters(video.ID) - if err != nil { - log.Error().Err(err).Msgf("error getting video %s chapters from GraphQL API", video.ID) - continue - } var videoChapters []string - if len(gqlVideoChapters.Data.Video.Moments.Edges) > 0 { - for _, chapter := range gqlVideoChapters.Data.Video.Moments.Edges { - videoChapters = append(videoChapters, chapter.Node.Details.Game.DisplayName) + if len(platformVideo.Chapters) > 0 { + for _, chapter := range platformVideo.Chapters { + videoChapters = append(videoChapters, chapter.Title) } - log.Debug().Msgf("Video %s has chapters: %s", video.ID, strings.Join(videoChapters, ", ")) + logger.Debug().Str("video_id", video.ID).Str("chapters", strings.Join(videoChapters, ", ")).Msg("video has chapters") } // Append chapters and video category to video categories var videoCategories []string videoCategories = append(videoCategories, videoChapters...) - videoCategories = append(videoCategories, gqlVideo.Data.Video.Game.Name) + if platformVideo.Category != nil { + videoCategories = append(videoCategories, *platformVideo.Category) + } // Check if video is sub only restricted - if strings.Contains(gqlVideo.Data.Video.ResourceRestriction.Type, "SUB") { + if video.Restriction != nil && *video.Restriction == string(platform.VideoRestrictionSubscriber) { // Skip if sub only is disabled if !watch.DownloadSubOnly { - log.Info().Msgf("skipping sub only video %s.", video.ID) + logger.Info().Str("video_id", video.ID).Msgf("skipping subscriber-only video") continue } // Skip if Twitch token is not set - if viper.GetString("parameters.twitch_token") == "" { - log.Info().Msgf("skipping sub only video %s. Twitch token is not set.", video.ID) + if config.Get().Parameters.TwitchToken == "" { + logger.Info().Str("video_id", video.ID).Msg("skipping sub only video; Twitch token is not set") continue } } @@ -214,22 +209,28 @@ func (s *Service) CheckVodWatchedChannels() { } } if !found { - log.Info().Msgf("skipping video %s. video has categories of %s when the restriction requires %s.", video.ID, strings.Join(videoCategories, ", "), strings.Join(channelVideoCategories, ", ")) + logger.Info().Str("video_id", video.ID).Str("categories", strings.Join(videoCategories, ", ")).Str("expected_categories", strings.Join(channelVideoCategories, ", ")).Msg("video does not match category restrictions") continue } } // archive the video - _, err = s.ArchiveService.ArchiveTwitchVod(video.ID, watch.Resolution, watch.ArchiveChat, watch.RenderChat) + input := archive.ArchiveVideoInput{ + VideoId: video.ID, + Quality: utils.VodQuality(watch.Resolution), + ArchiveChat: watch.ArchiveChat, + RenderChat: watch.RenderChat, + } + err = s.ArchiveService.ArchiveVideo(ctx, input) if err != nil { - log.Error().Err(err).Msgf("Error archiving video %s", video.ID) + log.Error().Err(err).Str("video_id", video.ID).Msgf("error archiving video") continue } - log.Info().Msgf("[Channel Watch] starting archive for video %s", video.ID) + logger.Info().Str("video_id", video.ID).Msgf("archiving video") } } } - log.Info().Msg("Finished checking channels for new videos") + return nil } func contains(videos []*ent.Vod, id string) bool { diff --git a/internal/metrics/metrics.go b/internal/metrics/metrics.go index a0cf1f68..84c6af59 100644 --- a/internal/metrics/metrics.go +++ b/internal/metrics/metrics.go @@ -2,107 +2,242 @@ package metrics import ( "context" + "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/promauto" + "github.com/riverqueue/river" + "github.com/riverqueue/river/rivertype" "github.com/rs/zerolog/log" "github.com/zibbp/ganymede/ent/queue" "github.com/zibbp/ganymede/internal/database" + tasks_client "github.com/zibbp/ganymede/internal/tasks/client" ) type Service struct { - Store *database.Database + Store *database.Database + riverClient *tasks_client.RiverClient + metrics *Metrics + Registry *prometheus.Registry } -func NewService(store *database.Database) *Service { - return &Service{Store: store} +type Metrics struct { + totalVods prometheus.Gauge + totalChannels prometheus.Gauge + totalUsers prometheus.Gauge + totalLiveWatchedChannels prometheus.Gauge + channelVodCount *prometheus.GaugeVec + totalVodsInQueue prometheus.Gauge + riverTotalPendingJobs prometheus.Gauge + riverTotalScheduledJobs prometheus.Gauge + riverTotalAvailableJobs prometheus.Gauge + riverTotalRunningJobs prometheus.Gauge + riverTotalRetryableJobs prometheus.Gauge + riverTotalCancelledJobs prometheus.Gauge + riverTotalDiscardedJobs prometheus.Gauge + riverTotalCompletedJobs prometheus.Gauge } -// Define metrics -var ( - totalVods = promauto.NewGauge(prometheus.GaugeOpts{ - Name: "total_vods", - Help: "Total number of vods", - }) - totalChannels = promauto.NewGauge(prometheus.GaugeOpts{ - Name: "total_channels", - Help: "Total number of channels", - }) - totalUsers = promauto.NewGauge(prometheus.GaugeOpts{ - Name: "total_users", - Help: "Total number of users", - }) - totalLiveWatchedChannels = promauto.NewGauge(prometheus.GaugeOpts{ - Name: "total_live_watched_channels", - Help: "Total number of live watched channels", - }) - channelVodCount = promauto.NewGaugeVec(prometheus.GaugeOpts{ - Name: "channel_vod_count", - Help: "Number of vods per channel", - }, []string{"channel"}) - totalVodsInQueue = promauto.NewGauge(prometheus.GaugeOpts{ - Name: "total_vods_in_queue", - Help: "Total number of vods in queue", - }) -) +func NewService(store *database.Database, riverClient *tasks_client.RiverClient) *Service { + registry := prometheus.NewRegistry() + metrics := &Metrics{ + totalVods: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "total_vods", + Help: "Total number of vods", + }), + totalChannels: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "total_channels", + Help: "Total number of channels", + }), + totalUsers: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "total_users", + Help: "Total number of users", + }), + totalLiveWatchedChannels: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "total_live_watched_channels", + Help: "Total number of live watched channels", + }), + channelVodCount: prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Name: "channel_vod_count", + Help: "Number of vods per channel", + }, []string{"channel"}), + totalVodsInQueue: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "total_vods_in_queue", + Help: "Total number of vods in queue", + }), + riverTotalPendingJobs: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "river_total_pending_jobs", + Help: "Total number of pending jobs", + }), + riverTotalScheduledJobs: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "river_total_scheduled_jobs", + Help: "Total number of scheduled jobs", + }), + riverTotalAvailableJobs: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "river_total_available_jobs", + Help: "Total number of available jobs", + }), + riverTotalRunningJobs: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "river_total_running_jobs", + Help: "Total number of running jobs", + }), + riverTotalRetryableJobs: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "river_total_retryable_jobs", + Help: "Total number of retryable jobs", + }), + riverTotalCancelledJobs: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "river_total_cancelled_jobs", + Help: "Total number of cancelled jobs", + }), + riverTotalDiscardedJobs: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "river_total_discarded_jobs", + Help: "Total number of discarded jobs", + }), + riverTotalCompletedJobs: prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "river_total_completed_jobs", + Help: "Total number of completed jobs", + }), + } + + registry.MustRegister( + metrics.totalVods, + metrics.totalChannels, + metrics.totalUsers, + metrics.totalLiveWatchedChannels, + metrics.channelVodCount, + metrics.totalVodsInQueue, + metrics.riverTotalPendingJobs, + metrics.riverTotalScheduledJobs, + metrics.riverTotalAvailableJobs, + metrics.riverTotalRunningJobs, + metrics.riverTotalRetryableJobs, + metrics.riverTotalCancelledJobs, + metrics.riverTotalDiscardedJobs, + metrics.riverTotalCompletedJobs, + ) + + return &Service{Store: store, riverClient: riverClient, metrics: metrics, Registry: registry} +} + +func (s *Service) gatherRiverJobMetrics() error { + pendingJobsParams := river.NewJobListParams().States(rivertype.JobStatePending).First(10000) + pendingJobs, err := s.riverClient.JobList(context.Background(), pendingJobsParams) + if err != nil { + return err + } + s.metrics.riverTotalPendingJobs.Set(float64(len(pendingJobs.Jobs))) + + scheduledJobsParams := river.NewJobListParams().States(rivertype.JobStateScheduled).First(10000) + scheduledJobs, err := s.riverClient.JobList(context.Background(), scheduledJobsParams) + if err != nil { + return err + } + s.metrics.riverTotalScheduledJobs.Set(float64(len(scheduledJobs.Jobs))) + + availableJobsParams := river.NewJobListParams().States(rivertype.JobStateAvailable).First(10000) + availableJobs, err := s.riverClient.JobList(context.Background(), availableJobsParams) + if err != nil { + return err + } + s.metrics.riverTotalAvailableJobs.Set(float64(len(availableJobs.Jobs))) + + runningJobsParams := river.NewJobListParams().States(rivertype.JobStateRunning).First(10000) + runningJobs, err := s.riverClient.JobList(context.Background(), runningJobsParams) + if err != nil { + return err + } + s.metrics.riverTotalRunningJobs.Set(float64(len(runningJobs.Jobs))) + + retryableJobsParams := river.NewJobListParams().States(rivertype.JobStateRetryable).First(10000) + retryableJobs, err := s.riverClient.JobList(context.Background(), retryableJobsParams) + if err != nil { + return err + } + s.metrics.riverTotalRetryableJobs.Set(float64(len(retryableJobs.Jobs))) -func (s *Service) GatherMetrics() *prometheus.Registry { + cancelledJobsParams := river.NewJobListParams().States(rivertype.JobStateCancelled).First(10000) + cancelledJobs, err := s.riverClient.JobList(context.Background(), cancelledJobsParams) + if err != nil { + return err + } + s.metrics.riverTotalCancelledJobs.Set(float64(len(cancelledJobs.Jobs))) + + discardedJobsParams := river.NewJobListParams().States(rivertype.JobStateDiscarded).First(10000) + discardedJobs, err := s.riverClient.JobList(context.Background(), discardedJobsParams) + if err != nil { + return err + } + s.metrics.riverTotalDiscardedJobs.Set(float64(len(discardedJobs.Jobs))) + + cancelledJobsParams = river.NewJobListParams().States(rivertype.JobStateCancelled).First(10000) + cancelledJobs, err = s.riverClient.JobList(context.Background(), cancelledJobsParams) + if err != nil { + return err + } + s.metrics.riverTotalCancelledJobs.Set(float64(len(cancelledJobs.Jobs))) + + completedJobsParams := river.NewJobListParams().States(rivertype.JobStateCompleted).First(10000) + completedJobs, err := s.riverClient.JobList(context.Background(), completedJobsParams) + if err != nil { + return err + } + s.metrics.riverTotalCompletedJobs.Set(float64(len(completedJobs.Jobs))) + + return nil +} + +func (s *Service) GatherMetrics() (*prometheus.Registry, error) { // Gather metric data // Total number of Vods vCount, err := s.Store.Client.Vod.Query().Count(context.Background()) if err != nil { log.Error().Err(err).Msg("error getting total vods") - totalVods.Set(0) + s.metrics.totalVods.Set(0) } + s.metrics.totalVods.Set(float64(vCount)) // Total number of Channels cCount, err := s.Store.Client.Channel.Query().Count(context.Background()) if err != nil { log.Error().Err(err).Msg("error getting total channels") - totalChannels.Set(0) + s.metrics.totalChannels.Set(0) } + s.metrics.totalChannels.Set(float64(cCount)) // Total number of Users uCount, err := s.Store.Client.User.Query().Count(context.Background()) if err != nil { log.Error().Err(err).Msg("error getting total users") - totalUsers.Set(0) + s.metrics.totalUsers.Set(0) } + s.metrics.totalUsers.Set(float64(uCount)) // Total number of Live Watched Channels lwCount, err := s.Store.Client.Live.Query().Count(context.Background()) if err != nil { log.Error().Err(err).Msg("error getting total live watched channels") - totalLiveWatchedChannels.Set(0) + s.metrics.totalLiveWatchedChannels.Set(0) } + s.metrics.totalLiveWatchedChannels.Set(float64(lwCount)) // Get all channels and the number of VODs they have channels, err := s.Store.Client.Channel.Query().WithVods().All(context.Background()) if err != nil { log.Error().Err(err).Msg("error getting all channels") - return nil + return nil, err } for _, channel := range channels { cVCount := len(channel.Edges.Vods) - channelVodCount.With(prometheus.Labels{"channel": channel.Name}).Set(float64(cVCount)) - + s.metrics.channelVodCount.With(prometheus.Labels{"channel": channel.Name}).Set(float64(cVCount)) } // Total VODs in queue qCount, err := s.Store.Client.Queue.Query().Where(queue.Processing(true)).Count(context.Background()) if err != nil { log.Error().Err(err).Msg("error getting total vods in queue") - totalVodsInQueue.Set(0) - } - - // Set metric data - totalVods.Set(float64(vCount)) - totalChannels.Set(float64(cCount)) - totalUsers.Set(float64(uCount)) - totalLiveWatchedChannels.Set(float64(lwCount)) - totalVodsInQueue.Set(float64(qCount)) - - // Create registry - r := prometheus.NewRegistry() - r.MustRegister(totalVods) - r.MustRegister(totalChannels) - r.MustRegister(totalUsers) - r.MustRegister(totalLiveWatchedChannels) - r.MustRegister(channelVodCount) - r.MustRegister(totalVodsInQueue) - return r + s.metrics.totalVodsInQueue.Set(0) + } + s.metrics.totalVodsInQueue.Set(float64(qCount)) + + // gather River job metrics + err = s.gatherRiverJobMetrics() + if err != nil { + log.Error().Err(err).Msg("error gathering river job metrics") + return nil, err + } + + return s.Registry, nil } diff --git a/internal/notification/notification.go b/internal/notification/notification.go index 94fcb2ec..9a47cdb3 100644 --- a/internal/notification/notification.go +++ b/internal/notification/notification.go @@ -9,8 +9,8 @@ import ( "strings" "github.com/rs/zerolog/log" - "github.com/spf13/viper" "github.com/zibbp/ganymede/ent" + "github.com/zibbp/ganymede/internal/config" ) var ( @@ -47,9 +47,9 @@ func sendWebhook(url string, body []byte) error { func SendVideoArchiveSuccessNotification(channelItem *ent.Channel, vodItem *ent.Vod, qItem *ent.Queue) { // Get notification settings - videoSuccessWebhookUrl := viper.GetString("notifications.video_success_webhook_url") - videoSuccessTemplate := viper.GetString("notifications.video_success_template") - videoSuccessEnabled := viper.GetBool("notifications.video_success_enabled") + videoSuccessWebhookUrl := config.Get().Notification.VideoSuccessWebhookUrl + videoSuccessTemplate := config.Get().Notification.VideoSuccessTemplate + videoSuccessEnabled := config.Get().Notification.VideoSuccessEnabled if (!videoSuccessEnabled) || (videoSuccessWebhookUrl == "") || (videoSuccessTemplate == "") { log.Debug().Msg("Video archive success notification is disabled") @@ -91,9 +91,9 @@ func SendVideoArchiveSuccessNotification(channelItem *ent.Channel, vodItem *ent. func SendLiveArchiveSuccessNotification(channelItem *ent.Channel, vodItem *ent.Vod, qItem *ent.Queue) { // Get notification settings - liveSuccessWebhookUrl := viper.GetString("notifications.live_success_webhook_url") - liveSuccessTemplate := viper.GetString("notifications.live_success_template") - liveSuccessEnabled := viper.GetBool("notifications.live_success_enabled") + liveSuccessWebhookUrl := config.Get().Notification.LiveSuccessWebhookUrl + liveSuccessTemplate := config.Get().Notification.LiveSuccessTemplate + liveSuccessEnabled := config.Get().Notification.LiveSuccessEnabled if (!liveSuccessEnabled) || (liveSuccessWebhookUrl == "") || (liveSuccessTemplate == "") { log.Debug().Msg("Live archive success notification is disabled") @@ -135,9 +135,9 @@ func SendLiveArchiveSuccessNotification(channelItem *ent.Channel, vodItem *ent.V func SendErrorNotification(channelItem *ent.Channel, vodItem *ent.Vod, qItem *ent.Queue, failedTask string) { // Get notification settings - errorWebhookUrl := viper.GetString("notifications.error_webhook_url") - errorTemplate := viper.GetString("notifications.error_template") - errorEnabled := viper.GetBool("notifications.error_enabled") + errorWebhookUrl := config.Get().Notification.ErrorWebhookUrl + errorTemplate := config.Get().Notification.ErrorTemplate + errorEnabled := config.Get().Notification.ErrorEnabled if (!errorEnabled) || (errorWebhookUrl == "") || (errorTemplate == "") { log.Debug().Msg("Error notification is disabled") @@ -179,9 +179,9 @@ func SendErrorNotification(channelItem *ent.Channel, vodItem *ent.Vod, qItem *en func SendLiveNotification(channelItem *ent.Channel, vodItem *ent.Vod, qItem *ent.Queue) { // Get notification settings - liveWebhookUrl := viper.GetString("notifications.is_live_webhook_url") - liveTemplate := viper.GetString("notifications.is_live_template") - liveEnabled := viper.GetBool("notifications.is_live_enabled") + liveWebhookUrl := config.Get().Notification.IsLiveWebhookUrl + liveTemplate := config.Get().Notification.IsLiveTemplate + liveEnabled := config.Get().Notification.IsLiveEnabled if (!liveEnabled) || (liveWebhookUrl == "") || (liveTemplate == "") { log.Debug().Msg("Live notification is disabled") diff --git a/internal/platform/badge.go b/internal/platform/badge.go new file mode 100644 index 00000000..90088e12 --- /dev/null +++ b/internal/platform/badge.go @@ -0,0 +1,18 @@ +package platform + +type Badges struct { + Badges []Badge `json:"badges"` +} + +type Badge struct { + Version string `json:"version"` + Name string `json:"name"` + IamgeUrl string `json:"image_url"` + ImageUrl1X string `json:"image_url_1x"` + ImageUrl2X string `json:"image_url_2x"` + ImageUrl4X string `json:"image_url_4x"` + Description string `json:"description"` + Title string `json:"title"` + ClickAction string `json:"click_action"` + ClickUrl string `json:"click_url"` +} diff --git a/internal/platform/emote.go b/internal/platform/emote.go new file mode 100644 index 00000000..6dd36819 --- /dev/null +++ b/internal/platform/emote.go @@ -0,0 +1,31 @@ +package platform + +type Emotes struct { + Emotes []Emote `json:"emotes"` +} + +type Emote struct { + ID string `json:"id"` + Name string `json:"name"` + URL string `json:"url"` + Format EmoteFormat `json:"format"` + Type EmoteType `json:"type"` + Scale string `json:"scale"` + Source string `json:"source"` + Width int64 `json:"width"` + Height int64 `json:"height"` +} + +type EmoteFormat string + +const ( + EmoteFormatStatic EmoteFormat = "static" + EmoteFormatAnimated EmoteFormat = "animated" +) + +type EmoteType string + +const ( + EmoteTypeGlobal EmoteType = "global" + EmoteTypeSubscription EmoteType = "subscription" +) diff --git a/internal/platform/errors.go b/internal/platform/errors.go new file mode 100644 index 00000000..ff79d822 --- /dev/null +++ b/internal/platform/errors.go @@ -0,0 +1,7 @@ +package platform + +type ErrorNoStreamsFound struct{} + +func (e ErrorNoStreamsFound) Error() string { + return "no streams found" +} diff --git a/internal/platform/interfaces.go b/internal/platform/interfaces.go new file mode 100644 index 00000000..dda9db90 --- /dev/null +++ b/internal/platform/interfaces.go @@ -0,0 +1,108 @@ +package platform + +import ( + "context" + "time" + + "github.com/zibbp/ganymede/internal/chapter" +) + +type VideoInfo struct { + ID string `json:"id"` + StreamID string `json:"stream_id"` + UserID string `json:"user_id"` + UserLogin string `json:"user_login"` + UserName string `json:"user_name"` + Title string `json:"title"` + Description string `json:"description"` + CreatedAt time.Time `json:"created_at"` + PublishedAt time.Time `json:"published_at"` + URL string `json:"url"` + ThumbnailURL string `json:"thumbnail_url"` + Viewable string `json:"viewable"` + ViewCount int64 `json:"view_count"` + Language string `json:"language"` + Type string `json:"type"` + Duration time.Duration `json:"duration"` + Category *string `json:"category"` // the default/main category of the video + Restriction *string `json:"restriction"` // video restriction + Chapters []chapter.Chapter `json:"chapters"` + MutedSegments []MutedSegment `json:"muted_segments"` +} + +type VideoRestriction string + +const ( + VideoRestrictionSubscriber VideoRestriction = "subscriber" +) + +type LiveStreamInfo struct { + ID string `json:"id"` + UserID string `json:"user_id"` + UserLogin string `json:"user_login"` + UserName string `json:"user_name"` + GameID string `json:"game_id"` + GameName string `json:"game_name"` + Type string `json:"type"` + Title string `json:"title"` + ViewerCount int64 `json:"viewer_count"` + StartedAt time.Time `json:"started_at"` + Language string `json:"language"` + ThumbnailURL string `json:"thumbnail_url"` +} + +type ChannelInfo struct { + ID string `json:"id"` + Login string `json:"login"` + DisplayName string `json:"display_name"` + Type string `json:"type"` + BroadcasterType string `json:"broadcaster_type"` + Description string `json:"description"` + ProfileImageURL string `json:"profile_image_url"` + OfflineImageURL string `json:"offline_image_url"` + ViewCount int64 `json:"view_count"` + CreatedAt time.Time `json:"created_at"` +} + +type Category struct { + ID string `json:"id"` + Name string `json:"name"` +} + +type ConnectionInfo struct { + ClientId string + ClientSecret string + AccessToken string +} + +type VideoType string + +const ( + VideoTypeArchive VideoType = "archive" + VideoTypeHighlight VideoType = "highlight" + VideoTypeUpload VideoType = "upload" +) + +type MutedSegment struct { + Duration int `json:"duration"` + Offset int `json:"offset"` +} + +const ( + maxRetryAttempts = 3 + retryDelay = 5 * time.Second +) + +type Platform interface { + Authenticate(ctx context.Context) (*ConnectionInfo, error) + GetVideo(ctx context.Context, id string, withChapters bool, withMutedSegments bool) (*VideoInfo, error) + GetLiveStream(ctx context.Context, channelName string) (*LiveStreamInfo, error) + GetLiveStreams(ctx context.Context, channelNames []string) ([]LiveStreamInfo, error) + GetChannel(ctx context.Context, channelName string) (*ChannelInfo, error) + GetVideos(ctx context.Context, channelId string, videoType VideoType, withChapters bool, withMutedSegments bool) ([]VideoInfo, error) + GetCategories(ctx context.Context) ([]Category, error) + GetGlobalBadges(ctx context.Context) ([]Badge, error) + GetChannelBadges(ctx context.Context, channelId string) ([]Badge, error) + GetGlobalEmotes(ctx context.Context) ([]Emote, error) + GetChannelEmotes(ctx context.Context, channelId string) ([]Emote, error) +} diff --git a/internal/platform/platform.go b/internal/platform/platform.go new file mode 100644 index 00000000..0d3b65ce --- /dev/null +++ b/internal/platform/platform.go @@ -0,0 +1 @@ +package platform diff --git a/internal/platform/twitch.go b/internal/platform/twitch.go new file mode 100644 index 00000000..df4a34de --- /dev/null +++ b/internal/platform/twitch.go @@ -0,0 +1,603 @@ +package platform + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "strconv" + "strings" + "time" + + "github.com/zibbp/ganymede/internal/chapter" + "github.com/zibbp/ganymede/internal/dto" + "github.com/zibbp/ganymede/internal/utils" +) + +// GetVideo implements the Platform interface to get video information from Twitch. Optional parameters are chapters and muted segments. These use the undocumented Twitch GraphQL API. +func (c *TwitchConnection) GetVideo(ctx context.Context, id string, withChapters bool, withMutedSegments bool) (*VideoInfo, error) { + params := url.Values{ + "id": []string{id}, + } + body, err := c.twitchMakeHTTPRequest("GET", "videos", params, nil) + if err != nil { + return nil, err + } + + var videoResponse TwitchGetVideosResponse + err = json.Unmarshal(body, &videoResponse) + if err != nil { + return nil, err + } + + if len(videoResponse.Data) == 0 { + return nil, fmt.Errorf("video not found") + } + + // TODO: fix for restriction (sub-only) + gqlVideo, err := c.TwitchGQLGetVideo(id) + if err != nil { + return nil, err + } + + // parse dates + createdAt, err := time.Parse(time.RFC3339, videoResponse.Data[0].CreatedAt) + if err != nil { + return nil, err + } + publishedAt, err := time.Parse(time.RFC3339, videoResponse.Data[0].PublishedAt) + if err != nil { + return nil, err + } + + // get duration + duration, err := time.ParseDuration(videoResponse.Data[0].Duration) + if err != nil { + return nil, fmt.Errorf("error parsing duration: %v", err) + } + + info := VideoInfo{ + ID: videoResponse.Data[0].ID, + StreamID: videoResponse.Data[0].StreamID, + UserID: videoResponse.Data[0].UserID, + UserLogin: videoResponse.Data[0].UserLogin, + UserName: videoResponse.Data[0].UserName, + Title: videoResponse.Data[0].Title, + Description: videoResponse.Data[0].Description, + CreatedAt: createdAt, + PublishedAt: publishedAt, + URL: videoResponse.Data[0].URL, + ThumbnailURL: videoResponse.Data[0].ThumbnailURL, + Viewable: videoResponse.Data[0].Viewable, + ViewCount: videoResponse.Data[0].ViewCount, + Language: videoResponse.Data[0].Language, + Type: videoResponse.Data[0].Type, + Duration: duration, + Category: &gqlVideo.Game.Name, + } + + // get chapters + if withChapters { + gqlChapters, err := c.TwitchGQLGetChapters(info.ID) + if err != nil { + return nil, err + } + + var chapters []chapter.Chapter + convertedChapters, err := convertTwitchChaptersToChapters(gqlChapters, int(info.Duration.Seconds())) + if err != nil { + return &info, err + } + chapters = append(chapters, convertedChapters...) + info.Chapters = chapters + } + + // get muted segments + if withMutedSegments { + gqlMutedSegments, err := c.TwitchGQLGetMutedSegments(info.ID) + if err != nil { + return nil, err + } + + var mutedSegments []MutedSegment + + for _, segment := range gqlMutedSegments { + mutedSegment := MutedSegment{ + Duration: segment.Duration, + Offset: segment.Offset, + } + mutedSegments = append(mutedSegments, mutedSegment) + } + info.MutedSegments = mutedSegments + } + + return &info, nil +} + +func (c *TwitchConnection) GetLiveStream(ctx context.Context, channelName string) (*LiveStreamInfo, error) { + params := url.Values{ + "user_login": []string{channelName}, + } + body, err := c.twitchMakeHTTPRequest("GET", "streams", params, nil) + if err != nil { + return nil, err + } + + var resp TwitchLiveStreamsRepsponse + err = json.Unmarshal(body, &resp) + if err != nil { + return nil, err + } + + if len(resp.Data) == 0 { + return nil, fmt.Errorf("no streams found") + } + + startedAt, err := time.Parse(time.RFC3339, resp.Data[0].StartedAt) + if err != nil { + return nil, err + } + + info := LiveStreamInfo{ + ID: resp.Data[0].ID, + UserID: resp.Data[0].UserID, + UserLogin: resp.Data[0].UserLogin, + UserName: resp.Data[0].UserName, + GameID: resp.Data[0].GameID, + GameName: resp.Data[0].GameName, + Type: resp.Data[0].Type, + Title: resp.Data[0].Title, + ViewerCount: resp.Data[0].ViewerCount, + StartedAt: startedAt, + Language: resp.Data[0].Language, + ThumbnailURL: resp.Data[0].ThumbnailURL, + } + + return &info, nil +} + +func (c *TwitchConnection) GetLiveStreams(ctx context.Context, channelNames []string) ([]LiveStreamInfo, error) { + params := url.Values{} + for _, channel := range channelNames { + params.Add("user_login", channel) + } + + body, err := c.twitchMakeHTTPRequest("GET", "streams", params, nil) + if err != nil { + return nil, err + } + + var resp TwitchLiveStreamsRepsponse + err = json.Unmarshal(body, &resp) + if err != nil { + return nil, err + } + + if len(resp.Data) == 0 { + return nil, &ErrorNoStreamsFound{} + } + + streams := make([]LiveStreamInfo, 0, len(resp.Data)) + for _, stream := range resp.Data { + startedAt, err := time.Parse(time.RFC3339, stream.StartedAt) + if err != nil { + return nil, err + } + + streams = append(streams, LiveStreamInfo{ + ID: stream.ID, + UserID: stream.UserID, + UserLogin: stream.UserLogin, + UserName: stream.UserName, + GameID: stream.GameID, + GameName: stream.GameName, + Type: stream.Type, + Title: stream.Title, + ViewerCount: stream.ViewerCount, + StartedAt: startedAt, + Language: stream.Language, + ThumbnailURL: stream.ThumbnailURL, + }) + } + + return streams, nil +} + +func (c *TwitchConnection) GetChannel(ctx context.Context, channelName string) (*ChannelInfo, error) { + params := url.Values{ + "login": []string{channelName}, + } + body, err := c.twitchMakeHTTPRequest("GET", "users", params, nil) + if err != nil { + return nil, err + } + + var resp TwitchChannelResponse + err = json.Unmarshal(body, &resp) + if err != nil { + return nil, err + } + + if len(resp.Data) == 0 { + return nil, fmt.Errorf("channel not found") + } + + createdAt, err := time.Parse(time.RFC3339, resp.Data[0].CreatedAt) + if err != nil { + return nil, err + } + + info := ChannelInfo{ + ID: resp.Data[0].ID, + Login: resp.Data[0].Login, + DisplayName: resp.Data[0].DisplayName, + Type: resp.Data[0].Type, + BroadcasterType: resp.Data[0].BroadcasterType, + Description: resp.Data[0].Description, + ProfileImageURL: resp.Data[0].ProfileImageURL, + OfflineImageURL: resp.Data[0].OfflineImageURL, + ViewCount: resp.Data[0].ViewCount, + CreatedAt: createdAt, + } + + return &info, nil +} + +func (c *TwitchConnection) GetVideos(ctx context.Context, channelId string, videoType VideoType, withChapters bool, withMutedSegments bool) ([]VideoInfo, error) { + params := url.Values{ + "user_id": []string{channelId}, + "first": []string{"100"}, + "type": []string{string(videoType)}, + } + body, err := c.twitchMakeHTTPRequest("GET", "videos", params, nil) + if err != nil { + return nil, err + } + + var resp TwitchGetVideosResponse + err = json.Unmarshal(body, &resp) + if err != nil { + return nil, err + } + + var videos []TwitchVideoInfo + videos = append(videos, resp.Data...) + + // pagination + cursor := resp.Pagination.Cursor + for cursor != "" { + params.Del("after") + params.Set("after", cursor) + body, err = c.twitchMakeHTTPRequest("GET", "videos", params, nil) + if err != nil { + return nil, err + } + var resp TwitchGetVideosResponse + err = json.Unmarshal(body, &resp) + if err != nil { + return nil, err + } + videos = append(videos, resp.Data...) + cursor = resp.Pagination.Cursor + } + + var info []VideoInfo + for _, video := range videos { + // if withChapters or withMutedSegments is true, get the video from the GetVideo function which fetches extra information + // else just use the video from the API response + if withChapters || withMutedSegments { + video, err := c.GetVideo(ctx, video.ID, withChapters, withMutedSegments) + if err != nil { + return nil, err + } + + info = append(info, *video) + } else { + + // parse dates + createdAt, err := time.Parse(time.RFC3339, video.CreatedAt) + if err != nil { + return nil, err + } + publishedAt, err := time.Parse(time.RFC3339, video.PublishedAt) + if err != nil { + return nil, err + } + // get duration + duration, err := time.ParseDuration(video.Duration) + if err != nil { + return nil, fmt.Errorf("error parsing duration: %v", err) + } + + info = append(info, VideoInfo{ + ID: video.ID, + StreamID: video.StreamID, + UserID: video.UserID, + UserLogin: video.UserLogin, + UserName: video.UserName, + Title: video.Title, + Description: video.Description, + CreatedAt: createdAt, + PublishedAt: publishedAt, + URL: video.URL, + ThumbnailURL: video.ThumbnailURL, + Viewable: video.Viewable, + ViewCount: video.ViewCount, + Language: video.Language, + Type: video.Type, + Duration: duration, + }) + } + } + + return info, nil +} + +func (c *TwitchConnection) GetCategories(ctx context.Context) ([]Category, error) { + params := url.Values{} + body, err := c.twitchMakeHTTPRequest("GET", "games/top", params, nil) + if err != nil { + return nil, err + } + + var resp TwitchCategoryResponse + err = json.Unmarshal(body, &resp) + if err != nil { + return nil, err + } + + var categories []TwitchCategory + categories = append(categories, resp.Data...) + + // pagination + cursor := resp.Pagination.Cursor + for cursor != "" { + params.Del("after") + params.Set("after", cursor) + body, err = c.twitchMakeHTTPRequest("GET", "games/top", params, nil) + if err != nil { + return nil, err + } + var resp TwitchCategoryResponse + err = json.Unmarshal(body, &resp) + if err != nil { + return nil, err + } + categories = append(categories, resp.Data...) + cursor = resp.Pagination.Cursor + } + + var info []Category + for _, category := range categories { + info = append(info, Category{ + ID: category.ID, + Name: category.Name, + }) + } + + return info, nil +} + +func (c *TwitchConnection) GetGlobalBadges(ctx context.Context) ([]Badge, error) { + body, err := c.twitchMakeHTTPRequest("GET", "chat/badges/global", nil, nil) + if err != nil { + return nil, err + } + + var twitchGlobalBadges TwitchGlobalBadgeResponse + err = json.Unmarshal(body, &twitchGlobalBadges) + if err != nil { + return nil, err + } + + if len(twitchGlobalBadges.Data) == 0 { + return nil, fmt.Errorf("badges not found") + } + + var badges []Badge + + for _, v := range twitchGlobalBadges.Data { + for _, b := range v.Versions { + badges = append(badges, Badge{ + Version: b.ID, + Name: v.SetID, + IamgeUrl: b.ImageURL4X, + ImageUrl1X: b.ImageURL1X, + ImageUrl2X: b.ImageURL2X, + ImageUrl4X: b.ImageURL4X, + Description: b.Description, + Title: b.Title, + ClickAction: b.ClickAction, + ClickUrl: b.ClickURL, + }) + } + } + + return badges, nil +} + +func (c *TwitchConnection) GetChannelBadges(ctx context.Context, channelId string) ([]Badge, error) { + params := url.Values{ + "broadcaster_id": []string{channelId}, + } + body, err := c.twitchMakeHTTPRequest("GET", "chat/badges", params, nil) + if err != nil { + return nil, err + } + + var twitchGlobalBadges TwitchGlobalBadgeResponse + err = json.Unmarshal(body, &twitchGlobalBadges) + if err != nil { + return nil, err + } + + if len(twitchGlobalBadges.Data) == 0 { + return nil, fmt.Errorf("badges not found") + } + + var badges []Badge + + for _, v := range twitchGlobalBadges.Data { + for _, b := range v.Versions { + badges = append(badges, Badge{ + Version: b.ID, + Name: v.SetID, + IamgeUrl: b.ImageURL4X, + ImageUrl1X: b.ImageURL1X, + ImageUrl2X: b.ImageURL2X, + ImageUrl4X: b.ImageURL4X, + Description: b.Description, + Title: b.Title, + ClickAction: b.ClickAction, + ClickUrl: b.ClickURL, + }) + } + } + + return badges, nil +} + +func (c *TwitchConnection) GetGlobalEmotes(ctx context.Context) ([]Emote, error) { + body, err := c.twitchMakeHTTPRequest("GET", "chat/emotes/global", nil, nil) + if err != nil { + return nil, err + } + + var twitchGlobalEmotes TwitchGlobalEmoteResponse + err = json.Unmarshal(body, &twitchGlobalEmotes) + if err != nil { + return nil, err + } + + if len(twitchGlobalEmotes.Data) == 0 { + return nil, fmt.Errorf("emotes not found") + } + + var emotes []Emote + + // https://dev.twitch.tv/docs/api/reference/#get-global-emotes + for _, e := range twitchGlobalEmotes.Data { + emote := Emote{ + ID: e.ID, + Name: e.Name, + Source: "twitch", + Type: EmoteTypeGlobal, + } + + // check if emote is static or animated + // format can be static or animated + if utils.Contains(e.Format, "animated") { + emote.Format = EmoteFormatAnimated + } else { + emote.Format = EmoteFormatStatic + } + + emote.Scale = twitchEmoteGetLargestScale(e.Scale) + + emote.URL = twitchTemplateEmoteURL(e.ID, string(emote.Format), "dark", emote.Scale) + + emotes = append(emotes, emote) + } + + return emotes, nil +} + +func (c *TwitchConnection) GetChannelEmotes(ctx context.Context, channelId string) ([]Emote, error) { + params := url.Values{ + "broadcaster_id": []string{channelId}, + } + body, err := c.twitchMakeHTTPRequest("GET", "chat/emotes", params, nil) + if err != nil { + return nil, err + } + + var twitchGlobalEmotes TwitchGlobalEmoteResponse + err = json.Unmarshal(body, &twitchGlobalEmotes) + if err != nil { + return nil, err + } + + if len(twitchGlobalEmotes.Data) == 0 { + return nil, fmt.Errorf("emotes not found") + } + + var emotes []Emote + + // https://dev.twitch.tv/docs/api/reference/#get-global-emotes + for _, e := range twitchGlobalEmotes.Data { + emote := Emote{ + ID: e.ID, + Name: e.Name, + Source: "twitch", + Type: EmoteTypeSubscription, + } + + // check if emote is static or animated + // format can be static or animated + if utils.Contains(e.Format, "animated") { + emote.Format = EmoteFormatAnimated + } else { + emote.Format = EmoteFormatStatic + } + + emote.Scale = twitchEmoteGetLargestScale(e.Scale) + + emote.URL = twitchTemplateEmoteURL(e.ID, string(emote.Format), "dark", emote.Scale) + + emotes = append(emotes, emote) + } + + return emotes, nil +} + +// twitchEmoteGetLargestScale returns the largest scale of the given values +// +// https://dev.twitch.tv/docs/api/reference/#get-global-emotes +func twitchEmoteGetLargestScale(values []string) string { + if len(values) == 0 { + return "0" + } + + highest, err := strconv.ParseFloat(values[0], 64) + if err != nil { + return "0" + } + + for _, v := range values[1:] { + current, err := strconv.ParseFloat(v, 64) + if err != nil { + continue + } + if current > highest { + highest = current + } + } + + return strconv.FormatFloat(highest, 'f', 1, 64) +} + +// twitchTemplateEmoteURL returns the URL of an emote +// +// https://dev.twitch.tv/docs/api/reference/#get-global-emotes +// +// Twitch recommends using the template URL rather than the raw URL +func twitchTemplateEmoteURL(id, format, themeMode string, scale string) string { + template := "https://static-cdn.jtvnw.net/emoticons/v2/{{id}}/{{format}}/{{theme_mode}}/{{scale}}" + + replacements := map[string]string{ + "{{id}}": id, + "{{format}}": format, + "{{theme_mode}}": themeMode, + "{{scale}}": scale, + } + + for placeholder, value := range replacements { + template = strings.Replace(template, placeholder, value, 1) + } + + return template +} + +func ArchiveVideoActivity(ctx context.Context, input dto.ArchiveVideoInput) error { + return nil +} diff --git a/internal/platform/twitch_api.go b/internal/platform/twitch_api.go new file mode 100644 index 00000000..16ca8ff9 --- /dev/null +++ b/internal/platform/twitch_api.go @@ -0,0 +1,226 @@ +package platform + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "time" + + "github.com/zibbp/ganymede/internal/chapter" +) + +var ( + TwitchApiUrl = "https://api.twitch.tv/helix" +) + +// authentication response +type AuthTokenResponse struct { + AccessToken string `json:"access_token"` + ExpiresIn int `json:"expires_in"` + TokenType string `json:"token_type"` +} + +type TwitchGetVideosResponse struct { + Data []TwitchVideoInfo `json:"data"` + Pagination TwitchPagination `json:"pagination"` +} + +type TwitchVideoInfo struct { + ID string `json:"id"` + StreamID string `json:"stream_id"` + UserID string `json:"user_id"` + UserLogin string `json:"user_login"` + UserName string `json:"user_name"` + Title string `json:"title"` + Description string `json:"description"` + CreatedAt string `json:"created_at"` + PublishedAt string `json:"published_at"` + URL string `json:"url"` + ThumbnailURL string `json:"thumbnail_url"` + Viewable string `json:"viewable"` + ViewCount int64 `json:"view_count"` + Language string `json:"language"` + Type string `json:"type"` + Duration string `json:"duration"` + MutedSegments interface{} `json:"muted_segments"` + Chapters []chapter.Chapter `json:"chapters"` +} + +type TwitchLivestreamInfo struct { + ID string `json:"id"` + UserID string `json:"user_id"` + UserLogin string `json:"user_login"` + UserName string `json:"user_name"` + GameID string `json:"game_id"` + GameName string `json:"game_name"` + Type string `json:"type"` + Title string `json:"title"` + ViewerCount int64 `json:"viewer_count"` + StartedAt string `json:"started_at"` + Language string `json:"language"` + ThumbnailURL string `json:"thumbnail_url"` + TagIDS []string `json:"tag_ids"` + IsMature bool `json:"is_mature"` +} + +type TwitchChannelResponse struct { + Data []TwitchChannel `json:"data"` +} + +type TwitchChannel struct { + ID string `json:"id"` + Login string `json:"login"` + DisplayName string `json:"display_name"` + Type string `json:"type"` + BroadcasterType string `json:"broadcaster_type"` + Description string `json:"description"` + ProfileImageURL string `json:"profile_image_url"` + OfflineImageURL string `json:"offline_image_url"` + ViewCount int64 `json:"view_count"` + CreatedAt string `json:"created_at"` +} + +type TwitchLiveStreamsRepsponse struct { + Data []TwitchLivestreamInfo `json:"data"` + Pagination TwitchPagination `json:"pagination"` +} + +type TwitchCategoryResponse struct { + Data []TwitchCategory `json:"data"` + Pagination TwitchPagination `json:"pagination"` +} + +type TwitchCategory struct { + ID string `json:"id"` + Name string `json:"name"` + BoxArtURL string `json:"box_art_url"` + IgdbID string `json:"igdb_id"` +} + +type TwitchPagination struct { + Cursor string `json:"cursor"` +} + +type TwitchGlobalBadgeResponse struct { + Data []struct { + SetID string `json:"set_id"` + Versions []struct { + ID string `json:"id"` + ImageURL1X string `json:"image_url_1x"` + ImageURL2X string `json:"image_url_2x"` + ImageURL4X string `json:"image_url_4x"` + Title string `json:"title"` + Description string `json:"description"` + ClickAction string `json:"click_action"` + ClickURL string `json:"click_url"` + } `json:"versions"` + } `json:"data"` +} + +type TwitchGlobalEmoteResponse struct { + Data []struct { + ID string `json:"id"` + Name string `json:"name"` + Images struct { + URL1X string `json:"url_1x"` + URL2X string `json:"url_2x"` + URL4X string `json:"url_4x"` + } `json:"images"` + Format []string `json:"format"` + Scale []string `json:"scale"` + ThemeMode []string `json:"theme_mode"` + EmoteType string `json:"emote_type"` + } `json:"data"` + Template string `json:"template"` +} + +// authenticate sends a POST request to Twitch for authentication using client credentials. An AuthenTokenResponse is returned on success containing the access token. +func twitchAuthenticate(clientId string, clientSecret string) (*AuthTokenResponse, error) { + client := &http.Client{} + + req, err := http.NewRequest("POST", "https://id.twitch.tv/oauth2/token", nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %v", err) + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + q := url.Values{} + q.Set("client_id", clientId) + q.Set("client_secret", clientSecret) + q.Set("grant_type", "client_credentials") + req.URL.RawQuery = q.Encode() + + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to authenticate: %v", err) + } + + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to authenticate: %v", resp) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %v", err) + } + + var authTokenResponse AuthTokenResponse + err = json.Unmarshal(body, &authTokenResponse) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal response: %v", err) + } + + return &authTokenResponse, nil +} + +func (c *TwitchConnection) twitchMakeHTTPRequest(method, url string, queryParams url.Values, headers map[string]string) ([]byte, error) { + client := &http.Client{} + + for attempt := 0; attempt < maxRetryAttempts; attempt++ { + req, err := http.NewRequest(method, fmt.Sprintf("%s/%s", TwitchApiUrl, url), nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %v", err) + } + + // Set headers + for key, value := range headers { + req.Header.Set(key, value) + } + + // Set auth headers + req.Header.Set("Client-ID", c.ClientId) + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", c.AccessToken)) + + // Set query parameters + req.URL.RawQuery = queryParams.Encode() + + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to make request: %v", err) + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %v", err) + } + + if resp.StatusCode == http.StatusTooManyRequests { + if attempt < maxRetryAttempts-1 { + time.Sleep(retryDelay) + continue + } + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("unexpected status code %d: %s", resp.StatusCode, body) + } + + return body, nil + } + + return nil, fmt.Errorf("max retry attempts reached") +} diff --git a/internal/platform/twitch_connection.go b/internal/platform/twitch_connection.go new file mode 100644 index 00000000..22bd807a --- /dev/null +++ b/internal/platform/twitch_connection.go @@ -0,0 +1,26 @@ +package platform + +import "context" + +type TwitchConnection struct { + ClientId string + ClientSecret string + AccessToken string +} + +func (c *TwitchConnection) Authenticate(ctx context.Context) (*ConnectionInfo, error) { + + info := ConnectionInfo{ + ClientId: c.ClientId, + ClientSecret: c.ClientSecret, + } + + authResponse, err := twitchAuthenticate(c.ClientId, c.ClientSecret) + if err != nil { + return nil, err + } + info.AccessToken = authResponse.AccessToken + c.AccessToken = authResponse.AccessToken + + return &info, nil +} diff --git a/internal/platform/twitch_gql.go b/internal/platform/twitch_gql.go new file mode 100644 index 00000000..b7f08243 --- /dev/null +++ b/internal/platform/twitch_gql.go @@ -0,0 +1,230 @@ +package platform + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + "github.com/zibbp/ganymede/internal/chapter" +) + +type TwitchGQLVideoResponse struct { + Data TwitchGQLVideoData `json:"data"` + Extensions TwitchExtensions `json:"extensions"` +} + +type TwitchGQLVideoData struct { + Video TwitchGQLVideo `json:"video"` +} + +type TwitchGQLVideo struct { + BroadcastType string `json:"broadcastType"` + ResourceRestriction TwitchResourceRestriction `json:"resourceRestriction"` + Game TwitchGQLGame `json:"game"` + Title string `json:"title"` + CreatedAt string `json:"createdAt"` +} + +type TwitchGQLGame struct { + ID string `json:"id"` + Name string `json:"name"` +} + +type TwitchResourceRestriction struct { + ID string `json:"id"` + Type string `json:"type"` +} + +type TwitchExtensions struct { + DurationMilliseconds int64 `json:"durationMilliseconds"` + RequestID string `json:"requestID"` +} + +type TwitchGQLMutedSegmentsResponse struct { + Data TwitchGQLMutedSegmentsData `json:"data"` + Extensions TwitchExtensions `json:"extensions"` +} + +type TwitchGQLMutedSegmentsData struct { + Video TwitchGQLMutedSegmentsVideo `json:"video"` +} + +type TwitchGQLMutedSegmentsVideo struct { + ID string `json:"id"` + MuteInfo TwitchMuteInfo `json:"muteInfo"` +} + +type TwitchMuteInfo struct { + MutedSegmentConnection TwitchGQLMutedSegmentConnection `json:"mutedSegmentConnection"` + TypeName string `json:"__typename"` +} + +type TwitchGQLMutedSegmentConnection struct { + Nodes []TwitchGQLMutedSegment `json:"nodes"` +} + +type TwitchGQLMutedSegment struct { + Duration int `json:"duration"` + Offset int `json:"offset"` + TypeName string `json:"__typename"` +} + +type TwitchGQLChaptersResponse struct { + Data TwitchGQLChaptersData `json:"data"` + Extensions TwitchExtensions `json:"extensions"` +} + +type TwitchGQLChaptersData struct { + Video TwitchGQLChaptersVideo `json:"video"` +} + +type TwitchGQLChaptersVideo struct { + ID string `json:"id"` + Moments TwitchGQLMoments `json:"moments"` + Typename string `json:"__typename"` +} + +type TwitchGQLChapter struct { + Moments TwitchGQLMoments `json:"moments"` + ID string `json:"id"` + DurationMilliseconds int64 `json:"durationMilliseconds"` + PositionMilliseconds int64 `json:"positionMilliseconds"` + Type string `json:"type"` + Description string `json:"description"` + SubDescription string `json:"subDescription"` + ThumbnailURL string `json:"thumbnailURL"` + Details TwitchGQLDetails `json:"details"` + Video TwitchGQLNodeVideo `json:"video"` + Typename string `json:"__typename"` +} + +type TwitchGQLChapterEdge struct { + Node TwitchGQLChapter `json:"node"` + Typename string `json:"__typename"` +} + +type TwitchGQLMoments struct { + Edges []TwitchGQLChapterEdge `json:"edges"` + Typename string `json:"__typename"` +} + +type TwitchGQLDetails struct { + Game TwitchGQLGameInfo `json:"game"` + Typename string `json:"__typename"` +} + +type TwitchGQLGameInfo struct { + ID string `json:"id"` + DisplayName string `json:"displayName"` + BoxArtURL string `json:"boxArtURL"` + Typename string `json:"__typename"` +} + +type TwitchGQLNodeVideo struct { + ID string `json:"id"` + LengthSeconds int64 `json:"lengthSeconds"` + Typename string `json:"__typename"` +} + +// GQLRequest sends a generic GQL request and returns the response. +func twitchGQLRequest(body string) ([]byte, error) { + client := &http.Client{} + req, err := http.NewRequest("POST", "https://gql.twitch.tv/gql", strings.NewReader(body)) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + + req.Header.Set("Client-ID", "kimne78kx3ncx6brgo4mv6wki5h1ko") + req.Header.Set("Content-Type", "text/plain;charset=UTF-8") + req.Header.Set("Origin", "https://www.twitch.tv") + req.Header.Set("Referer", "https://www.twitch.tv/") + req.Header.Set("Sec-Fetch-Mode", "cors") + req.Header.Set("Sec-Fetch-Site", "same-site") + // req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36") + + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + defer resp.Body.Close() + + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + + return bodyBytes, nil +} + +func (c *TwitchConnection) TwitchGQLGetMutedSegments(id string) ([]TwitchGQLMutedSegment, error) { + body := fmt.Sprintf(`{"operationName":"VideoPlayer_MutedSegmentsAlertOverlay","variables":{"vodID":"%s","includePrivate":false},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"c36e7400657815f4704e6063d265dff766ed8fc1590361c6d71e4368805e0b49"}}}`, id) + respBytes, err := twitchGQLRequest(body) + if err != nil { + return nil, fmt.Errorf("error getting video muted segments: %w", err) + } + + var resp TwitchGQLMutedSegmentsResponse + err = json.Unmarshal(respBytes, &resp) + if err != nil { + return nil, fmt.Errorf("error unmarshalling response: %w", err) + } + + return resp.Data.Video.MuteInfo.MutedSegmentConnection.Nodes, nil +} + +func (c *TwitchConnection) TwitchGQLGetVideo(id string) (*TwitchGQLVideo, error) { + body := fmt.Sprintf(`{"query": "query{video(id:\"%s\"){broadcastType,resourceRestriction{id,type},game{id,name},title,createdAt}}"}`, id) + respBytes, err := twitchGQLRequest(body) + if err != nil { + return nil, fmt.Errorf("error getting video muted segments: %w", err) + } + + var resp TwitchGQLVideoResponse + err = json.Unmarshal(respBytes, &resp) + if err != nil { + return nil, fmt.Errorf("error unmarshalling response: %w", err) + } + + return &resp.Data.Video, nil +} + +func (c *TwitchConnection) TwitchGQLGetChapters(id string) ([]TwitchGQLChapterEdge, error) { + body := fmt.Sprintf(`{"operationName":"VideoPlayer_ChapterSelectButtonVideo","variables":{"videoID":"%s","includePrivate":false},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"8d2793384aac3773beab5e59bd5d6f585aedb923d292800119e03d40cd0f9b41"}}}`, id) + respBytes, err := twitchGQLRequest(body) + if err != nil { + return nil, fmt.Errorf("error getting video chapters: %w", err) + } + + var resp TwitchGQLChaptersResponse + err = json.Unmarshal(respBytes, &resp) + if err != nil { + return nil, fmt.Errorf("error unmarshalling response: %w", err) + } + + return resp.Data.Video.Moments.Edges, nil +} + +// convertTwitchChaptersToChapters converts Twitch chapters to chapters. Twitch chapters are in milliseconds. +func convertTwitchChaptersToChapters(chapters []TwitchGQLChapterEdge, duration int) ([]chapter.Chapter, error) { + if len(chapters) == 0 { + return []chapter.Chapter{}, nil + } + + convertedChapters := make([]chapter.Chapter, len(chapters)) + for i := 0; i < len(chapters); i++ { + convertedChapters[i].ID = chapters[i].Node.ID + convertedChapters[i].Title = chapters[i].Node.Description + convertedChapters[i].Type = string(chapters[i].Node.Type) + convertedChapters[i].Start = int(chapters[i].Node.PositionMilliseconds / 1000) + + if i+1 < len(chapters) { + convertedChapters[i].End = int(chapters[i+1].Node.PositionMilliseconds / 1000) + } else { + convertedChapters[i].End = duration + } + } + + return convertedChapters, nil +} diff --git a/internal/playback/playback.go b/internal/playback/playback.go index e9cc7c1d..b9211c9c 100644 --- a/internal/playback/playback.go +++ b/internal/playback/playback.go @@ -32,6 +32,8 @@ type GetPlayback struct { Vod *ent.Vod `json:"vod"` } +var ErrorPlaybackNotFound = fmt.Errorf("playback not found") + func (s *Service) UpdateProgress(c *auth.CustomContext, vID uuid.UUID, time int) error { uID := c.User.ID @@ -64,7 +66,7 @@ func (s *Service) GetProgress(c *auth.CustomContext, vID uuid.UUID) (*ent.Playba playbackEntry, err := s.Store.Client.Playback.Query().Where(playback.UserID(uID)).Where(playback.VodID(vID)).Only(c.Request().Context()) if err != nil { if _, ok := err.(*ent.NotFoundError); ok { - return nil, fmt.Errorf("playback not found") + return nil, ErrorPlaybackNotFound } return nil, fmt.Errorf("error getting playback: %v", err) } diff --git a/internal/playlist/playlist.go b/internal/playlist/playlist.go index cedb0e9b..82623e05 100644 --- a/internal/playlist/playlist.go +++ b/internal/playlist/playlist.go @@ -67,7 +67,9 @@ func (s *Service) GetPlaylists(c echo.Context) ([]*ent.Playlist, error) { } func (s *Service) GetPlaylist(c echo.Context, playlistID uuid.UUID) (*ent.Playlist, error) { - rPlaylist, err := s.Store.Client.Playlist.Query().Where(playlist.ID(playlistID)).WithVods().Order(ent.Desc(playlist.FieldCreatedAt)).Only(c.Request().Context()) + rPlaylist, err := s.Store.Client.Playlist.Query().Where(playlist.ID(playlistID)).WithVods(func(q *ent.VodQuery) { + q.WithChannel() + }).Order(ent.Desc(playlist.FieldCreatedAt)).Only(c.Request().Context()) // Order VODs by date streamed tmpVods := rPlaylist.Edges.Vods sort.Slice(tmpVods, func(i, j int) bool { diff --git a/internal/queue/queue.go b/internal/queue/queue.go index 3dd557ac..5de5b7cc 100644 --- a/internal/queue/queue.go +++ b/internal/queue/queue.go @@ -3,17 +3,20 @@ package queue import ( "context" "fmt" - "os/exec" - "strings" "time" "github.com/google/uuid" "github.com/labstack/echo/v4" + "github.com/riverqueue/river" + "github.com/riverqueue/river/rivertype" "github.com/rs/zerolog/log" "github.com/zibbp/ganymede/ent" "github.com/zibbp/ganymede/ent/queue" "github.com/zibbp/ganymede/internal/channel" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/database" + "github.com/zibbp/ganymede/internal/tasks" + tasks_client "github.com/zibbp/ganymede/internal/tasks/client" "github.com/zibbp/ganymede/internal/utils" "github.com/zibbp/ganymede/internal/vod" ) @@ -22,10 +25,17 @@ type Service struct { Store *database.Database VodService *vod.Service ChannelService *channel.Service + RiverClient *tasks_client.RiverClient } -func NewService(store *database.Database, vodService *vod.Service, channelService *channel.Service) *Service { - return &Service{Store: store, VodService: vodService, ChannelService: channelService} +type StartQueueTaskInput struct { + QueueId uuid.UUID + TaskName string + Continue bool +} + +func NewService(store *database.Database, vodService *vod.Service, channelService *channel.Service, riverClient *tasks_client.RiverClient) *Service { + return &Service{Store: store, VodService: vodService, ChannelService: channelService, RiverClient: riverClient} } type Queue struct { @@ -45,13 +55,15 @@ type Queue struct { TaskChatConvert utils.TaskStatus `json:"task_chat_convert"` TaskChatRender utils.TaskStatus `json:"task_chat_render"` TaskChatMove utils.TaskStatus `json:"task_chat_move"` + ArchiveChat bool `json:"archive_chat"` + RenderChat bool `json:"render_chat"` UpdatedAt time.Time `json:"updated_at"` CreatedAt time.Time `json:"created_at"` } func (s *Service) CreateQueueItem(queueDto Queue, vID uuid.UUID) (*ent.Queue, error) { if queueDto.LiveArchive { - q, err := s.Store.Client.Queue.Create().SetVodID(vID).SetLiveArchive(true).Save(context.Background()) + q, err := s.Store.Client.Queue.Create().SetVodID(vID).SetLiveArchive(true).SetArchiveChat(queueDto.ArchiveChat).SetRenderChat(queueDto.RenderChat).Save(context.Background()) if err != nil { if _, ok := err.(*ent.ConstraintError); ok { return nil, fmt.Errorf("queue item exists for vod or vod does not exist") @@ -61,7 +73,7 @@ func (s *Service) CreateQueueItem(queueDto Queue, vID uuid.UUID) (*ent.Queue, er } return q, nil } else { - q, err := s.Store.Client.Queue.Create().SetVodID(vID).Save(context.Background()) + q, err := s.Store.Client.Queue.Create().SetVodID(vID).SetArchiveChat(queueDto.ArchiveChat).SetRenderChat(queueDto.RenderChat).Save(context.Background()) if err != nil { if _, ok := err.(*ent.ConstraintError); ok { return nil, fmt.Errorf("queue item exists for vod or vod does not exist") @@ -75,7 +87,7 @@ func (s *Service) CreateQueueItem(queueDto Queue, vID uuid.UUID) (*ent.Queue, er } func (s *Service) UpdateQueueItem(queueDto Queue, qID uuid.UUID) (*ent.Queue, error) { - q, err := s.Store.Client.Queue.UpdateOneID(qID).SetLiveArchive(queueDto.LiveArchive).SetOnHold(queueDto.OnHold).SetVideoProcessing(queueDto.VideoProcessing).SetChatProcessing(queueDto.ChatProcessing).SetProcessing(queueDto.Processing).SetTaskVodCreateFolder(queueDto.TaskVodCreateFolder).SetTaskVodDownloadThumbnail(queueDto.TaskVodDownloadThumbnail).SetTaskVodSaveInfo(queueDto.TaskVodSaveInfo).SetTaskVideoDownload(queueDto.TaskVideoDownload).SetTaskVideoConvert(queueDto.TaskVideoConvert).SetTaskVideoMove(queueDto.TaskVideoMove).SetTaskChatDownload(queueDto.TaskChatDownload).SetTaskChatConvert(queueDto.TaskChatConvert).SetTaskChatRender(queueDto.TaskChatRender).SetTaskChatMove(queueDto.TaskChatMove).Save(context.Background()) + q, err := s.Store.Client.Queue.UpdateOneID(qID).SetLiveArchive(queueDto.LiveArchive).SetOnHold(queueDto.OnHold).SetVideoProcessing(queueDto.VideoProcessing).SetChatProcessing(queueDto.ChatProcessing).SetProcessing(queueDto.Processing).SetTaskVodCreateFolder(queueDto.TaskVodCreateFolder).SetTaskVodDownloadThumbnail(queueDto.TaskVodDownloadThumbnail).SetTaskVodSaveInfo(queueDto.TaskVodSaveInfo).SetTaskVideoDownload(queueDto.TaskVideoDownload).SetTaskVideoConvert(queueDto.TaskVideoConvert).SetTaskVideoMove(queueDto.TaskVideoMove).SetTaskChatDownload(queueDto.TaskChatDownload).SetTaskChatConvert(queueDto.TaskChatConvert).SetArchiveChat(queueDto.ArchiveChat).SetRenderChat(queueDto.RenderChat).SetTaskChatRender(queueDto.TaskChatRender).SetTaskChatMove(queueDto.TaskChatMove).Save(context.Background()) if err != nil { return nil, fmt.Errorf("error updating queue: %v", err) } @@ -83,14 +95,18 @@ func (s *Service) UpdateQueueItem(queueDto Queue, qID uuid.UUID) (*ent.Queue, er } func (s *Service) GetQueueItems(c echo.Context) ([]*ent.Queue, error) { - q, err := s.Store.Client.Queue.Query().WithVod().Order(ent.Desc(queue.FieldCreatedAt)).All(c.Request().Context()) + q, err := s.Store.Client.Queue.Query().WithVod(func(q *ent.VodQuery) { + q.WithChannel() + }).Order(ent.Desc(queue.FieldCreatedAt)).All(c.Request().Context()) if err != nil { return nil, fmt.Errorf("error getting queue task: %v", err) } return q, nil } func (s *Service) GetQueueItemsFilter(c echo.Context, processing bool) ([]*ent.Queue, error) { - q, err := s.Store.Client.Queue.Query().Where(queue.Processing(processing)).WithVod().Order(ent.Asc(queue.FieldCreatedAt)).All(c.Request().Context()) + q, err := s.Store.Client.Queue.Query().Where(queue.Processing(processing)).WithVod(func(q *ent.VodQuery) { + q.WithChannel() + }).Order(ent.Asc(queue.FieldCreatedAt)).All(c.Request().Context()) if err != nil { return nil, fmt.Errorf("error getting queue task: %v", err) } @@ -114,11 +130,12 @@ func (s *Service) GetQueueItem(qID uuid.UUID) (*ent.Queue, error) { } func (s *Service) ReadLogFile(c echo.Context, qID uuid.UUID, logType string) ([]byte, error) { + env := config.GetEnvConfig() q, err := s.GetQueueItem(qID) if err != nil { return nil, err } - path := fmt.Sprintf("/logs/%s-%s.log", q.Edges.Vod.ID, logType) + path := fmt.Sprintf("%s/%s-%s.log", env.LogsDir, q.Edges.Vod.ID, logType) logLines, err := utils.ReadLastLines(path, 20) if err != nil { return nil, err @@ -134,33 +151,112 @@ func (s *Service) ArchiveGetQueueItem(qID uuid.UUID) (*ent.Queue, error) { return q, nil } -// StopQueueItem -// kills the streamlink process for a queue item -// which in turn will stop the chat download and proceed to post processing -func (s *Service) StopQueueItem(c echo.Context, id uuid.UUID) error { - // get vod - v, err := database.DB().Client.Queue.Query().Where(queue.ID(id)).WithVod().First(c.Request().Context()) +// StopQueueItem stops a queue item's tasks by canceling each job's context +func (s *Service) StopQueueItem(ctx context.Context, id uuid.UUID) error { + + err := s.RiverClient.CancelJobsForQueueId(ctx, id) if err != nil { - return fmt.Errorf("error getting queue item: %v", err) + return err } - log.Debug().Msgf("running: pgrep -f 'streamlink.*%s' | xargs kill\n", v.Edges.Vod.ExtID) - // get pid using the vod id - getPid := exec.Command("pgrep", "-f", fmt.Sprintf("streamlink.*%s", v.Edges.Vod.ExtID)) - // kill pid - killPid := exec.Command("xargs", "kill", "-INT") - getPidOutput, err := getPid.Output() + + return nil +} + +func (s *Service) StartQueueTask(ctx context.Context, input StartQueueTaskInput) (*rivertype.JobRow, error) { + + // ensure queue exists + _, err := s.GetQueueItem(input.QueueId) if err != nil { - log.Error().Err(err).Msgf("error getting pid for queue item: %v", err) - return fmt.Errorf("error getting pid queue item: %v", err) + return nil, err + } + + var task river.JobArgs + + taskInput := tasks.ArchiveVideoInput{ + QueueId: input.QueueId, } - killPid.Stdin = strings.NewReader(string(getPidOutput)) + switch input.TaskName { + case "task_vod_create_folder": + task = tasks.CreateDirectoryArgs{ + Continue: input.Continue, + Input: taskInput, + } + + case "task_vod_download_thumbnail": + task = tasks.DownloadThumbnailArgs{ + Continue: input.Continue, + Input: taskInput, + } + + case "task_vod_save_info": + task = tasks.SaveVideoInfoArgs{ + Continue: input.Continue, + Input: taskInput, + } - err = killPid.Run() + case "task_video_download": + task = tasks.DownloadVideoArgs{ + Continue: input.Continue, + Input: taskInput, + } + + case "task_video_convert": + task = tasks.PostProcessVideoArgs{ + Continue: input.Continue, + Input: taskInput, + } + + case "task_video_move": + task = tasks.MoveVideoArgs{ + Continue: input.Continue, + Input: taskInput, + } + + case "task_chat_download": + task = tasks.DownloadChatArgs{ + Continue: input.Continue, + Input: taskInput, + } + + case "task_chat_convert": + task = tasks.ConvertLiveChatArgs{ + Continue: input.Continue, + Input: taskInput, + } + + case "task_chat_render": + task = tasks.RenderChatArgs{ + Continue: input.Continue, + Input: taskInput, + } + + case "task_chat_move": + task = tasks.MoveChatArgs{ + Continue: input.Continue, + Input: taskInput, + } + + case "task_live_chat_download": + task = tasks.DownloadLiveChatArgs{ + Continue: input.Continue, + Input: taskInput, + } + + case "task_live_video_download": + task = tasks.DownloadLiveVideoArgs{ + Continue: input.Continue, + Input: taskInput, + } + + default: + return nil, fmt.Errorf("unknown task: %s", input.TaskName) + } + + job, err := s.RiverClient.Client.Insert(ctx, task, nil) if err != nil { - log.Error().Err(err).Msgf("error killing pid for queue item: %v", err) - return fmt.Errorf("error killing pid queue item: %v", err) + return nil, err } - return nil + return job.Job, err } diff --git a/internal/scheduler/scheduler.go b/internal/scheduler/scheduler.go index 37f824b4..f25a9671 100644 --- a/internal/scheduler/scheduler.go +++ b/internal/scheduler/scheduler.go @@ -1,17 +1,14 @@ package scheduler import ( - "os" + "context" "time" "github.com/go-co-op/gocron" "github.com/rs/zerolog/log" - "github.com/spf13/viper" "github.com/zibbp/ganymede/internal/archive" - "github.com/zibbp/ganymede/internal/auth" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/live" - "github.com/zibbp/ganymede/internal/task" - "github.com/zibbp/ganymede/internal/twitch" ) type Service struct { @@ -23,14 +20,6 @@ func NewService(liveService *live.Service, archiveService *archive.Service) *Ser return &Service{LiveService: liveService, ArchiveService: archiveService} } -func (s *Service) StartAppScheduler() { - scheduler := gocron.NewScheduler(time.UTC) - - s.twitchAuthSchedule(scheduler) - - scheduler.StartAsync() -} - func (s *Service) StartLiveScheduler() { time.Sleep(time.Second * 5) scheduler := gocron.NewScheduler(time.UTC) @@ -40,85 +29,14 @@ func (s *Service) StartLiveScheduler() { scheduler.StartAsync() } -func (s *Service) StartWatchVideoScheduler() { - time.Sleep(time.Second * 5) - // get tz - var tz string - tz = os.Getenv("TZ") - if tz == "" { - tz = "UTC" - } - loc, err := time.LoadLocation(tz) - if err != nil { - log.Info().Err(err).Msg("failed to load location, defaulting to UTC") - loc = time.UTC - } - scheduler := gocron.NewScheduler(loc) - - s.checkWatchedChannelVideos(scheduler) - - scheduler.StartAsync() -} - -func (s *Service) StartJwksScheduler() { - time.Sleep(time.Second * 5) - scheduler := gocron.NewScheduler(time.UTC) - - s.fetchJwksSchedule(scheduler) - - scheduler.StartAsync() -} - -func (s *Service) StartTwitchCategoriesScheduler() { - time.Sleep(time.Second * 5) - scheduler := gocron.NewScheduler(time.UTC) - - s.setTwitchCategoriesSchedule(scheduler) - - scheduler.StartAsync() -} - -func (s *Service) StartPruneVideoScheduler() { - time.Sleep(time.Second * 5) - // get tz - var tz string - tz = os.Getenv("TZ") - if tz == "" { - tz = "UTC" - } - loc, err := time.LoadLocation(tz) - if err != nil { - log.Info().Err(err).Msg("failed to load location, defaulting to UTC") - loc = time.UTC - } - scheduler := gocron.NewScheduler(loc) - - s.pruneVideoSchedule(scheduler) - - scheduler.StartAsync() -} - -func (s *Service) twitchAuthSchedule(scheduler *gocron.Scheduler) { - log.Debug().Msg("setting up twitch auth schedule") - _, err := scheduler.Every(7).Days().Do(func() { - log.Debug().Msg("running twitch auth schedule") - err := twitch.Authenticate() - if err != nil { - log.Error().Err(err).Msg("failed to authenticate with twitch") - } - }) - if err != nil { - log.Error().Err(err).Msg("failed to set up twitch auth schedule") - } -} - func (s *Service) checkLiveStreamSchedule(scheduler *gocron.Scheduler) { log.Debug().Msg("setting up check live stream schedule") - configLiveCheckInterval := viper.GetInt("live_check_interval_seconds") - log.Debug().Msgf("setting live check interval to run every %d seconds", configLiveCheckInterval) - _, err := scheduler.Every(configLiveCheckInterval).Seconds().Do(func() { + config := config.Get() + log.Debug().Msgf("setting live check interval to run every %d seconds", config.LiveCheckInterval) + _, err := scheduler.Every(config.LiveCheckInterval).Seconds().Do(func() { + ctx := context.Background() log.Debug().Msg("running check live stream schedule") - err := s.LiveService.Check() + err := s.LiveService.Check(ctx) if err != nil { log.Error().Err(err).Msg("failed to check live streams") } @@ -127,56 +45,3 @@ func (s *Service) checkLiveStreamSchedule(scheduler *gocron.Scheduler) { log.Error().Err(err).Msg("failed to set up check live stream schedule") } } - -func (s *Service) checkWatchedChannelVideos(schedule *gocron.Scheduler) { - log.Info().Msg("setting up check watched channel videos schedule") - - configCheckVideoInterval := viper.GetInt("video_check_interval_minutes") - log.Debug().Msgf("setting video check interval to run every %d minutes", configCheckVideoInterval) - _, err := schedule.Every(configCheckVideoInterval).Minutes().Do(func() { - log.Info().Msg("running check watched channel videos schedule") - s.LiveService.CheckVodWatchedChannels() - }) - if err != nil { - log.Error().Err(err).Msg("failed to set up check watched channel videos schedule") - } -} - -func (s *Service) fetchJwksSchedule(scheduler *gocron.Scheduler) { - log.Debug().Msg("setting up fetch jwks schedule") - _, err := scheduler.Every(1).Days().Do(func() { - log.Debug().Msg("running fetch jwks schedule") - err := auth.FetchJWKS() - if err != nil { - log.Error().Err(err).Msg("failed to fetch jwks") - } - }) - if err != nil { - log.Error().Err(err).Msg("failed to set up fetch jwks schedule") - } -} - -func (s *Service) setTwitchCategoriesSchedule(scheduler *gocron.Scheduler) { - log.Debug().Msg("setting up twitch categories schedule") - _, err := scheduler.Every(7).Days().Do(func() { - log.Debug().Msg("running set twitch categories schedule") - err := twitch.SetTwitchCategories() - if err != nil { - log.Error().Err(err).Msg("failed to set twitch categories") - } - }) - if err != nil { - log.Error().Err(err).Msg("failed to set up set twitch categories schedule") - } -} - -func (s *Service) pruneVideoSchedule(scheduler *gocron.Scheduler) { - log.Debug().Msg("setting up prune video schedule") - _, err := scheduler.Every(1).Day().At("01:00").Do(func() { - log.Info().Msg("running prune videos task") - task.PruneVideos() - }) - if err != nil { - log.Error().Err(err).Msg("failed to set up prune videos schedule") - } -} diff --git a/internal/server/server.go b/internal/server/server.go new file mode 100644 index 00000000..348b4855 --- /dev/null +++ b/internal/server/server.go @@ -0,0 +1,169 @@ +package server + +import ( + "context" + "fmt" + + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" + "github.com/rs/zerolog/pkgerrors" + "github.com/zibbp/ganymede/internal/admin" + "github.com/zibbp/ganymede/internal/archive" + "github.com/zibbp/ganymede/internal/auth" + "github.com/zibbp/ganymede/internal/blocked" + "github.com/zibbp/ganymede/internal/category" + "github.com/zibbp/ganymede/internal/channel" + "github.com/zibbp/ganymede/internal/chapter" + "github.com/zibbp/ganymede/internal/config" + "github.com/zibbp/ganymede/internal/database" + _ "github.com/zibbp/ganymede/internal/kv" + "github.com/zibbp/ganymede/internal/live" + "github.com/zibbp/ganymede/internal/metrics" + "github.com/zibbp/ganymede/internal/platform" + "github.com/zibbp/ganymede/internal/playback" + "github.com/zibbp/ganymede/internal/playlist" + "github.com/zibbp/ganymede/internal/queue" + "github.com/zibbp/ganymede/internal/scheduler" + "github.com/zibbp/ganymede/internal/task" + tasks_client "github.com/zibbp/ganymede/internal/tasks/client" + transportHttp "github.com/zibbp/ganymede/internal/transport/http" + "github.com/zibbp/ganymede/internal/user" + "github.com/zibbp/ganymede/internal/vod" +) + +type Application struct { + EnvConfig config.EnvConfig + Database *database.Database + Store *database.Database + ArchiveService *archive.Service + PlatformTwitch platform.Platform + AdminService *admin.Service + AuthService *auth.Service + ChannelService *channel.Service + VodService *vod.Service + QueueService *queue.Service + UserService *user.Service + LiveService *live.Service + SchedulerService *scheduler.Service + PlaybackService *playback.Service + MetricsService *metrics.Service + PlaylistService *playlist.Service + TaskService *task.Service + ChapterService *chapter.Service + CategoryService *category.Service + BlockedVodService *blocked.Service +} + +func SetupApplication(ctx context.Context) (*Application, error) { + envConfig := config.GetEnvConfig() + envAppConfig := config.GetEnvApplicationConfig() + _, err := config.Init() + if err != nil { + log.Panic().Err(err).Msg("error getting config") + } + + zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack + if envConfig.DEBUG { + log.Info().Msg("debug mode enabled") + zerolog.SetGlobalLevel(zerolog.DebugLevel) + } else { + zerolog.SetGlobalLevel(zerolog.InfoLevel) + } + + dbString := fmt.Sprintf("user=%s password=%s host=%s port=%s dbname=%s sslmode=%s", envAppConfig.DB_USER, envAppConfig.DB_PASS, envAppConfig.DB_HOST, envAppConfig.DB_PORT, envAppConfig.DB_NAME, envAppConfig.DB_SSL) + + db := database.NewDatabase(ctx, database.DatabaseConnectionInput{ + DBString: dbString, + IsWorker: false, + }) + + // application migrations + // check if VideosDir changed + if err := db.VideosDirMigrate(ctx, envConfig.VideosDir); err != nil { + return nil, fmt.Errorf("error migrating videos dir: %v", err) + } + if err := db.TempDirMigrate(ctx, envConfig.TempDir); err != nil { + return nil, fmt.Errorf("error migrating videos dir: %v", err) + } + + // Initialize river client + riverClient, err := tasks_client.NewRiverClient(tasks_client.RiverClientInput{ + DB_URL: dbString, + }) + if err != nil { + return nil, fmt.Errorf("error creating river client: %v", err) + } + + err = riverClient.RunMigrations() + if err != nil { + return nil, fmt.Errorf("error running migrations: %v", err) + } + + var platformTwitch platform.Platform + // setup twitch platform + if envConfig.TwitchClientId != "" && envConfig.TwitchClientSecret != "" { + platformTwitch = &platform.TwitchConnection{ + ClientId: envConfig.TwitchClientId, + ClientSecret: envConfig.TwitchClientSecret, + } + _, err = platformTwitch.Authenticate(ctx) + if err != nil { + log.Panic().Err(err).Msg("Error authenticating to Twitch") + } + } + + authService := auth.NewService(db) + channelService := channel.NewService(db, platformTwitch) + vodService := vod.NewService(db, riverClient, platformTwitch) + queueService := queue.NewService(db, vodService, channelService, riverClient) + blockedVodService := blocked.NewService(db) + archiveService := archive.NewService(db, channelService, vodService, queueService, blockedVodService, riverClient, platformTwitch) + adminService := admin.NewService(db) + userService := user.NewService(db) + liveService := live.NewService(db, archiveService, platformTwitch) + schedulerService := scheduler.NewService(liveService, archiveService) + playbackService := playback.NewService(db) + metricsService := metrics.NewService(db, riverClient) + playlistService := playlist.NewService(db) + taskService := task.NewService(db, liveService, riverClient) + chapterService := chapter.NewService(db) + categoryService := category.NewService(db) + + return &Application{ + EnvConfig: envConfig, + Database: db, + AuthService: authService, + ChannelService: channelService, + VodService: vodService, + QueueService: queueService, + BlockedVodService: blockedVodService, + ArchiveService: archiveService, + AdminService: adminService, + UserService: userService, + LiveService: liveService, + SchedulerService: schedulerService, + PlaybackService: playbackService, + MetricsService: metricsService, + PlaylistService: playlistService, + TaskService: taskService, + ChapterService: chapterService, + CategoryService: categoryService, + PlatformTwitch: platformTwitch, + }, nil +} + +func Run(ctx context.Context) error { + + app, err := SetupApplication(ctx) + if err != nil { + return err + } + + httpHandler := transportHttp.NewHandler(app.AuthService, app.ChannelService, app.VodService, app.QueueService, app.ArchiveService, app.AdminService, app.UserService, app.LiveService, app.SchedulerService, app.PlaybackService, app.MetricsService, app.PlaylistService, app.TaskService, app.ChapterService, app.CategoryService, app.BlockedVodService, app.PlatformTwitch) + + if err := httpHandler.Serve(ctx); err != nil { + return err + } + + return nil +} diff --git a/internal/task/task.go b/internal/task/task.go index a19dfbdc..b6064e84 100644 --- a/internal/task/task.go +++ b/internal/task/task.go @@ -3,70 +3,84 @@ package task import ( "context" "fmt" - "net/http" "os" "path" "strings" - "time" - "github.com/labstack/echo/v4" + "github.com/google/uuid" "github.com/rs/zerolog/log" - "github.com/zibbp/ganymede/ent/channel" - entChannel "github.com/zibbp/ganymede/ent/channel" - entVod "github.com/zibbp/ganymede/ent/vod" "github.com/zibbp/ganymede/internal/archive" - "github.com/zibbp/ganymede/internal/auth" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/database" "github.com/zibbp/ganymede/internal/live" - "github.com/zibbp/ganymede/internal/twitch" - "github.com/zibbp/ganymede/internal/vod" + "github.com/zibbp/ganymede/internal/tasks" + tasks_client "github.com/zibbp/ganymede/internal/tasks/client" + tasks_periodic "github.com/zibbp/ganymede/internal/tasks/periodic" ) type Service struct { - Store *database.Database - LiveService *live.Service - ArchiveService *archive.Service + Store *database.Database + LiveService *live.Service + RiverClient *tasks_client.RiverClient } -func NewService(store *database.Database, liveService *live.Service, archiveService *archive.Service) *Service { - return &Service{Store: store, LiveService: liveService, ArchiveService: archiveService} +func NewService(store *database.Database, liveService *live.Service, riverClient *tasks_client.RiverClient) *Service { + return &Service{Store: store, LiveService: liveService, RiverClient: riverClient} } -func (s *Service) StartTask(c echo.Context, task string) error { - log.Info().Msgf("Manually starting task %s", task) +func (s *Service) StartTask(ctx context.Context, task string) error { + log.Info().Msgf("manually starting task %s", task) switch task { case "check_live": - err := s.LiveService.Check() + err := s.LiveService.Check(ctx) if err != nil { return fmt.Errorf("error checking live: %v", err) } case "check_vod": - go s.LiveService.CheckVodWatchedChannels() - - case "get_jwks": - err := auth.FetchJWKS() + task, err := s.RiverClient.Client.Insert(ctx, tasks_periodic.CheckChannelsForNewVideosArgs{}, nil) if err != nil { - return fmt.Errorf("error fetching jwks: %v", err) + return fmt.Errorf("error inserting task: %v", err) } + log.Info().Str("task_id", fmt.Sprintf("%d", task.Job.ID)).Msgf("task created") - case "twitch_auth": - err := twitch.Authenticate() + case "get_jwks": + task, err := s.RiverClient.Client.Insert(ctx, tasks_periodic.FetchJWKSArgs{}, nil) if err != nil { - return fmt.Errorf("error authenticating twitch: %v", err) + return fmt.Errorf("error inserting task: %v", err) } + log.Info().Str("task_id", fmt.Sprintf("%d", task.Job.ID)).Msgf("task created") case "storage_migration": go func() { err := s.StorageMigration() if err != nil { - log.Error().Err(err).Msg("Error migrating storage") + log.Error().Err(err).Msg("error migrating storage") } }() case "prune_videos": - go PruneVideos() + task, err := s.RiverClient.Client.Insert(ctx, tasks_periodic.PruneVideosArgs{}, nil) + if err != nil { + return fmt.Errorf("error inserting task: %v", err) + } + log.Info().Str("task_id", fmt.Sprintf("%d", task.Job.ID)).Msgf("task created") + + case "save_chapters": + task, err := s.RiverClient.Client.Insert(ctx, tasks_periodic.SaveVideoChaptersArgs{}, nil) + if err != nil { + return fmt.Errorf("error inserting task: %v", err) + } + log.Info().Str("task_id", fmt.Sprintf("%d", task.Job.ID)).Msgf("task created") + + case "update_stream_vod_ids": + task, err := s.RiverClient.Client.Insert(ctx, tasks.UpdateStreamVideoIdArgs{Input: tasks.ArchiveVideoInput{QueueId: uuid.Nil}}, nil) + if err != nil { + return fmt.Errorf("error inserting task: %v", err) + } + log.Info().Str("task_id", fmt.Sprintf("%d", task.Job.ID)).Msgf("task created") + } return nil @@ -83,19 +97,20 @@ func (s *Service) StorageMigration() error { for _, video := range videos { // Populate templates - vDto := twitch.Vod{ - ID: video.ExtID, - UserLogin: video.Edges.Channel.Name, - Title: video.Title, - Type: string(video.Type), - CreatedAt: video.StreamedAt.Format(time.RFC3339), + storageTemplateInput := archive.StorageTemplateInput{ + UUID: video.ID, + ID: video.ExtID, + Channel: video.Edges.Channel.Name, + Title: video.Title, + Type: string(video.Type), + Date: video.CreatedAt.Format("2006-01-02"), } - folderName, err := archive.GetFolderName(video.ID, vDto) + folderName, err := archive.GetFolderName(video.ID, storageTemplateInput) if err != nil { log.Error().Err(err).Msgf("Error getting folder name for video %s", video.ID) continue } - fileName, err := archive.GetFileName(video.ID, vDto) + fileName, err := archive.GetFileName(video.ID, storageTemplateInput) if err != nil { log.Error().Err(err).Msgf("Error getting file name for video %s", video.ID) continue @@ -107,7 +122,8 @@ func (s *Service) StorageMigration() error { // Add array of strings together seperated by / oldRootFolderPath := strings.Join(tmpRootFolder, "/") - newRootFolderPath := fmt.Sprintf("/vods/%s/%s", video.Edges.Channel.Name, folderName) + envConfig := config.GetEnvConfig() + newRootFolderPath := fmt.Sprintf("/%s/%s/%s", envConfig.VideosDir, video.Edges.Channel.Name, folderName) // Rename files first // Video @@ -248,49 +264,3 @@ func (s *Service) StorageMigration() error { return nil } - -func PruneVideos() { - // setup - vodService := &vod.Service{Store: database.DB()} - req := &http.Request{} - ctx := context.Background() - echoCtx := echo.New().NewContext(req, nil) - echoCtx.SetRequest(req.WithContext(ctx)) - - // fetch all channels that have retention enable - channels, err := database.DB().Client.Channel.Query().Where(channel.Retention(true)).All(context.Background()) - if err != nil { - log.Error().Err(err).Msg("Error fetching channels") - return - } - log.Debug().Msgf("Found %d channels with retention enabled", len(channels)) - - // loop over channels - for _, channel := range channels { - log.Debug().Msgf("Processing channel %s", channel.ID) - // fetch all videos for channel - videos, err := database.DB().Client.Vod.Query().Where(entVod.HasChannelWith(entChannel.ID(channel.ID))).All(context.Background()) - if err != nil { - log.Error().Err(err).Msgf("Error fetching videos for channel %s", channel.ID) - continue - } - - // loop over videos - for _, video := range videos { - // check if video is locked - if video.Locked { - continue - } - // check if video is older than retention - if video.CreatedAt.Add(time.Duration(channel.RetentionDays) * 24 * time.Hour).Before(time.Now()) { - // delete video - err := vodService.DeleteVod(echoCtx, video.ID, true) - if err != nil { - log.Error().Err(err).Msgf("Error deleting video %s", video.ID) - continue - } - } - } - - } -} diff --git a/internal/tasks/chat.go b/internal/tasks/chat.go new file mode 100644 index 00000000..ef7c2896 --- /dev/null +++ b/internal/tasks/chat.go @@ -0,0 +1,316 @@ +package tasks + +import ( + "context" + "time" + + "github.com/jackc/pgx/v5" + "github.com/riverqueue/river" + "github.com/zibbp/ganymede/internal/database" + "github.com/zibbp/ganymede/internal/errors" + "github.com/zibbp/ganymede/internal/exec" + "github.com/zibbp/ganymede/internal/utils" +) + +// ////////////////////// +// Download Chat (VOD) // +// ////////////////////// +type DownloadChatArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (DownloadChatArgs) Kind() string { return string(utils.TaskDownloadChat) } + +func (args DownloadChatArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Queue: QueueChatDownload, + Tags: []string{"archive"}, + } +} + +func (w DownloadChatArgs) Timeout(job *river.Job[DownloadChatArgs]) time.Duration { + return 49 * time.Hour +} + +type DownloadChatWorker struct { + river.WorkerDefaults[DownloadChatArgs] +} + +func (w DownloadChatWorker) Work(ctx context.Context, job *river.Job[DownloadChatArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + // set queue status to running + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskDownloadChat, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + // download video + err = exec.DownloadTwitchChat(ctx, dbItems.Video) + if err != nil { + return err + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskDownloadChat, + }) + if err != nil { + return err + } + + // continue with next job + if job.Args.Continue { + client := river.ClientFromContext[pgx.Tx](ctx) + if dbItems.Queue.RenderChat { + _, err = client.Insert(ctx, &RenderChatArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + } else { + _, err = client.Insert(ctx, &MoveChatArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + } + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} + +// //////////////////// +// Render Chat (VOD) // +// //////////////////// +type RenderChatArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (RenderChatArgs) Kind() string { return string(utils.TaskRenderChat) } + +func (args RenderChatArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Queue: QueueChatRender, + Tags: []string{"archive"}, + } +} + +func (w RenderChatArgs) Timeout(job *river.Job[RenderChatArgs]) time.Duration { + return 49 * time.Hour +} + +type RenderChatWorker struct { + river.WorkerDefaults[RenderChatArgs] +} + +func (w RenderChatWorker) Work(ctx context.Context, job *river.Job[RenderChatArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + // set queue status to running + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskRenderChat, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + continueArchive := true + + // download video + err = exec.RenderTwitchChat(ctx, dbItems.Video) + if err != nil { + + // check if chat render has no messages + // not a real error - continue with next job + if errors.Is(err, errors.ErrNoChatMessages) { + continueArchive = false + // set video chat path to empty + _, err = database.DB().Client.Vod.UpdateOneID(dbItems.Video.ID).SetChatPath("").SetChatVideoPath("").Save(ctx) + if err != nil { + return err + } + // set queue chat to completed + _, err = database.DB().Client.Queue.UpdateOneID(job.Args.Input.QueueId).SetChatProcessing(false).SetTaskChatMove(utils.Success).Save(ctx) + if err != nil { + return err + } + } else { + return err + } + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskRenderChat, + }) + if err != nil { + return err + } + + // continue with next job + if job.Args.Continue && continueArchive { + client := river.ClientFromContext[pgx.Tx](ctx) + _, err := client.Insert(ctx, &MoveChatArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} + +// //////////// +// Move Chat // +// /////////// +type MoveChatArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (MoveChatArgs) Kind() string { return string(utils.TaskMoveChat) } + +func (args MoveChatArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Tags: []string{"archive"}, + } +} + +func (w MoveChatArgs) Timeout(job *river.Job[MoveChatArgs]) time.Duration { + return 49 * time.Hour +} + +type MoveChatWorker struct { + river.WorkerDefaults[MoveChatArgs] +} + +func (w MoveChatWorker) Work(ctx context.Context, job *river.Job[MoveChatArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + // set queue status to running + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskMoveChat, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + err = utils.MoveFile(ctx, dbItems.Video.TmpChatDownloadPath, dbItems.Video.ChatPath) + if err != nil { + return err + } + + if dbItems.Queue.LiveArchive { + err = utils.MoveFile(ctx, dbItems.Video.TmpLiveChatDownloadPath, dbItems.Video.LiveChatPath) + if err != nil { + return err + } + err = utils.MoveFile(ctx, dbItems.Video.TmpLiveChatConvertPath, dbItems.Video.LiveChatConvertPath) + if err != nil { + return err + } + } + + if dbItems.Queue.RenderChat { + err = utils.MoveFile(ctx, dbItems.Video.TmpChatRenderPath, dbItems.Video.ChatVideoPath) + if err != nil { + return err + } + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskMoveChat, + }) + if err != nil { + return err + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} diff --git a/internal/tasks/client/client.go b/internal/tasks/client/client.go new file mode 100644 index 00000000..0f062111 --- /dev/null +++ b/internal/tasks/client/client.go @@ -0,0 +1,124 @@ +package tasks_client + +import ( + "context" + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/riverqueue/river" + "github.com/riverqueue/river/riverdriver/riverpgxv5" + "github.com/riverqueue/river/rivermigrate" + "github.com/riverqueue/river/rivertype" + "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/internal/tasks" + "github.com/zibbp/ganymede/internal/utils" +) + +type RiverClientInput struct { + DB_URL string +} + +type RiverClient struct { + Ctx context.Context + PgxPool *pgxpool.Pool + RiverPgxDriver *riverpgxv5.Driver + Client *river.Client[pgx.Tx] +} + +func NewRiverClient(input RiverClientInput) (*RiverClient, error) { + rc := &RiverClient{} + rc.Ctx = context.Background() + + // create postgres pool connection + pool, err := pgxpool.New(rc.Ctx, input.DB_URL) + if err != nil { + return rc, err + } + rc.PgxPool = pool + + // create river pgx driver + rc.RiverPgxDriver = riverpgxv5.New(rc.PgxPool) + + // periodicJobs := setupPeriodicJobs() + + // create river client + riverClient, err := river.NewClient(rc.RiverPgxDriver, &river.Config{ + JobTimeout: -1, + RescueStuckJobsAfter: 49 * time.Hour, + // PeriodicJobs: periodicJobs, + }) + if err != nil { + return rc, err + } + + rc.Client = riverClient + + return rc, nil +} + +func (rc *RiverClient) Stop() error { + if err := rc.Client.Stop(rc.Ctx); err != nil { + return err + } + return nil +} + +// Run river database migrations +func (rc *RiverClient) RunMigrations() error { + migrator := rivermigrate.New(rc.RiverPgxDriver, nil) + + _, err := migrator.Migrate(rc.Ctx, rivermigrate.DirectionUp, &rivermigrate.MigrateOpts{}) + if err != nil { + return fmt.Errorf("error running river migrations: %v", err) + } + + log.Info().Msg("successfully applied river migrations") + + return nil +} + +// params := river.NewJobListParams().States(rivertype.JobStateRunning).First(10000) +func (rc *RiverClient) JobList(ctx context.Context, params *river.JobListParams) (*river.JobListResult, error) { + // fetch jobs + jobs, err := rc.Client.JobList(ctx, params) + if err != nil { + return nil, err + } + + return jobs, nil +} + +// CancelJobsForQueueId cancels all jobs for a queue. This fetches all jobs and chekc if the queue id of the job matches by unmarshalling the job args +func (rc *RiverClient) CancelJobsForQueueId(ctx context.Context, queueId uuid.UUID) error { + params := river.NewJobListParams().States(rivertype.JobStateRunning, rivertype.JobStatePending, rivertype.JobStateScheduled, rivertype.JobStateRetryable).First(10000) + jobs, err := rc.Client.JobList(ctx, params) + if err != nil { + return err + } + + // check jobs + for _, job := range jobs.Jobs { + // only check archive jobs + if utils.Contains(job.Tags, "archive") { + // unmarshal args + var args tasks.RiverJobArgs + + if err := json.Unmarshal(job.EncodedArgs, &args); err != nil { + return err + } + + if args.Input.QueueId == queueId { + _, err := rc.Client.JobCancel(ctx, job.ID) + if err != nil { + return err + } + } + } + } + + return nil +} diff --git a/internal/tasks/common.go b/internal/tasks/common.go new file mode 100644 index 00000000..14651b67 --- /dev/null +++ b/internal/tasks/common.go @@ -0,0 +1,593 @@ +package tasks + +import ( + "context" + "fmt" + "time" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/riverqueue/river" + "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/ent" + entChannel "github.com/zibbp/ganymede/ent/channel" + "github.com/zibbp/ganymede/ent/vod" + "github.com/zibbp/ganymede/internal/chapter" + "github.com/zibbp/ganymede/internal/config" + "github.com/zibbp/ganymede/internal/platform" + "github.com/zibbp/ganymede/internal/utils" +) + +// //////////////////// +// Create Directory // +// /////////////////// +type CreateDirectoryArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (CreateDirectoryArgs) Kind() string { return string(utils.TaskCreateFolder) } + +func (w CreateDirectoryArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Queue: "default", + Tags: []string{"archive"}, + } +} + +func (w CreateDirectoryArgs) Timeout(job *river.Job[CreateDirectoryArgs]) time.Duration { + return 1 * time.Minute +} + +type CreateDirectoryWorker struct { + river.WorkerDefaults[CreateDirectoryArgs] +} + +func (w CreateDirectoryWorker) Work(ctx context.Context, job *river.Job[CreateDirectoryArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + // set queue status to running + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskCreateFolder, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + // create directory + // uses the videos directory from the the environment config + c := config.GetEnvConfig() + path := fmt.Sprintf("%s/%s/%s", c.VideosDir, dbItems.Channel.Name, dbItems.Video.FolderName) + err = utils.CreateDirectory(path) + if err != nil { + return err + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskCreateFolder, + }) + if err != nil { + return err + } + + // continue with next job + if job.Args.Continue { + client := river.ClientFromContext[pgx.Tx](ctx) + _, err := client.Insert(ctx, &SaveVideoInfoArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} + +// ////////////////// +// Save Video Info // +// ////////////////// +type SaveVideoInfoArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (SaveVideoInfoArgs) Kind() string { return string(utils.TaskSaveInfo) } + +func (args SaveVideoInfoArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Queue: "default", + Tags: []string{"archive"}, + } +} + +func (w SaveVideoInfoArgs) Timeout(job *river.Job[SaveVideoInfoArgs]) time.Duration { + return 1 * time.Minute +} + +type SaveVideoInfoWorker struct { + river.WorkerDefaults[SaveVideoInfoArgs] +} + +func (w SaveVideoInfoWorker) Work(ctx context.Context, job *river.Job[SaveVideoInfoArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + // set queue status to running + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskSaveInfo, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + platformService, err := PlatformFromContext(ctx) + if err != nil { + return err + } + + var info interface{} + + if dbItems.Queue.LiveArchive { + info, err = platformService.GetLiveStream(ctx, dbItems.Channel.Name) + if err != nil { + return err + } + } else { + videoInfo, err := platformService.GetVideo(ctx, dbItems.Video.ExtID, true, true) + if err != nil { + return err + } + + // add chapters to database + chapterService := chapter.NewService(store) + for _, chapter := range videoInfo.Chapters { + _, err = chapterService.CreateChapter(chapter, dbItems.Video.ID) + if err != nil { + return err + } + } + + // add muted segments to database + for _, segment := range videoInfo.MutedSegments { + // parse twitch duration + segmentEnd := segment.Offset + segment.Duration + if segmentEnd > int(videoInfo.Duration.Seconds()) { + segmentEnd = int(videoInfo.Duration.Seconds()) + } + // insert into database + _, err := store.Client.MutedSegment.Create().SetStart(segment.Offset).SetEnd(segmentEnd).SetVod(&dbItems.Video).Save(ctx) + if err != nil { + return err + } + } + + info = videoInfo + } + + // write info to file + err = utils.WriteJsonFile(info, fmt.Sprintf("%s/%s/%s/%s-info.json", config.GetEnvConfig().VideosDir, dbItems.Channel.Name, dbItems.Video.FolderName, dbItems.Video.FileName)) + if err != nil { + return err + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskSaveInfo, + }) + if err != nil { + return err + } + + // continue with next job + if job.Args.Continue { + client := river.ClientFromContext[pgx.Tx](ctx) + _, err := client.Insert(ctx, &DownloadThumbnailArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} + +// ////////////////////// +// Download Thumbnails // +// ////////////////////// +type DownloadThumbnailArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (DownloadThumbnailArgs) Kind() string { return string(utils.TaskDownloadThumbnail) } + +func (args DownloadThumbnailArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Queue: "default", + Tags: []string{"archive"}, + } +} + +func (w DownloadThumbnailArgs) Timeout(job *river.Job[DownloadThumbnailArgs]) time.Duration { + return 1 * time.Minute +} + +type DownloadTumbnailsWorker struct { + river.WorkerDefaults[DownloadThumbnailArgs] +} + +func (w DownloadTumbnailsWorker) Work(ctx context.Context, job *river.Job[DownloadThumbnailArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + // set queue status to running + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskDownloadThumbnail, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + platformService, err := PlatformFromContext(ctx) + if err != nil { + return err + } + + var thumbnailUrl string + + if dbItems.Queue.LiveArchive { + info, err := platformService.GetLiveStream(ctx, dbItems.Channel.Name) + if err != nil { + return err + } + thumbnailUrl = info.ThumbnailURL + + } else { + info, err := platformService.GetVideo(ctx, dbItems.Video.ExtID, false, false) + if err != nil { + return err + } + thumbnailUrl = info.ThumbnailURL + } + + fullResThumbnailUrl := replaceThumbnailPlaceholders(thumbnailUrl, "1920", "1080", dbItems.Queue.LiveArchive) + webResThumbnailUrl := replaceThumbnailPlaceholders(thumbnailUrl, "640", "360", dbItems.Queue.LiveArchive) + + err = utils.DownloadAndSaveFile(fullResThumbnailUrl, dbItems.Video.ThumbnailPath) + if err != nil { + return err + } + err = utils.DownloadAndSaveFile(webResThumbnailUrl, dbItems.Video.WebThumbnailPath) + if err != nil { + return err + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskDownloadThumbnail, + }) + if err != nil { + return err + } + + // continue with next jobs + if job.Args.Continue { + client := river.ClientFromContext[pgx.Tx](ctx) + if dbItems.Queue.LiveArchive { + _, err := client.Insert(ctx, &DownloadLiveVideoArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + + _, err = client.Insert(ctx, &DownloadThumbnailsMinimalArgs{ + Continue: false, + Input: job.Args.Input, + }, &river.InsertOpts{ + ScheduledAt: time.Now().Add(10 * time.Minute), + }) + if err != nil { + return err + } + + } else { + _, err = client.Insert(ctx, &DownloadVideoArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + + // download chat if needed + if dbItems.Queue.ArchiveChat { + _, err = client.Insert(ctx, &DownloadChatArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + } + } + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} + +// ////////////////////////////// +// Minimal Download Thumbnails // +// ////////////////////////////// +// +// Minimal version of the DownloadThumbnails task that is run X minutes after a live stream is archived. +// +// This is used to prevent a blank thumbnail as Twitch is slow at generating them when the stream goes live. +type DownloadThumbnailsMinimalArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (DownloadThumbnailsMinimalArgs) Kind() string { return string(utils.TaskDownloadThumbnail) } + +func (args DownloadThumbnailsMinimalArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Tags: []string{archive_tag, allow_fail_tag}, + } +} + +func (w DownloadThumbnailsMinimalArgs) Timeout(job *river.Job[DownloadThumbnailsMinimalArgs]) time.Duration { + return 1 * time.Minute +} + +type DownloadThumbnailsMinimalWorker struct { + river.WorkerDefaults[DownloadThumbnailsMinimalArgs] +} + +func (w DownloadThumbnailsMinimalWorker) Work(ctx context.Context, job *river.Job[DownloadThumbnailsMinimalArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + platformService, err := PlatformFromContext(ctx) + if err != nil { + return err + } + + var thumbnailUrl string + + if dbItems.Queue.LiveArchive { + info, err := platformService.GetLiveStream(ctx, dbItems.Channel.Name) + if err != nil { + return err + } + thumbnailUrl = info.ThumbnailURL + + } else { + info, err := platformService.GetVideo(ctx, dbItems.Video.ExtID, false, false) + if err != nil { + return err + } + thumbnailUrl = info.ThumbnailURL + } + + fullResThumbnailUrl := replaceThumbnailPlaceholders(thumbnailUrl, "1920", "1080", dbItems.Queue.LiveArchive) + webResThumbnailUrl := replaceThumbnailPlaceholders(thumbnailUrl, "640", "360", dbItems.Queue.LiveArchive) + + err = utils.DownloadAndSaveFile(fullResThumbnailUrl, dbItems.Video.ThumbnailPath) + if err != nil { + return err + } + err = utils.DownloadAndSaveFile(webResThumbnailUrl, dbItems.Video.WebThumbnailPath) + if err != nil { + return err + } + + return nil +} + +// UpdateStreamVideoId is scheduled to run after a livestream archive finishes. It will attempt to update the external ID of the stream video (vod). +// +// Has two use modes: +// - Supply a Queue ID to update the video ID of the video related to the queue +// - Do not supply a Queue ID (set to uuid.Nil) to update the video IDs of all videos +type UpdateStreamVideoIdArgs struct { + Input ArchiveVideoInput `json:"input"` +} + +func (UpdateStreamVideoIdArgs) Kind() string { return TaskUpdateStreamVideoId } + +func (args UpdateStreamVideoIdArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 2, + Queue: "default", + Tags: []string{"archive"}, + } +} + +func (w UpdateStreamVideoIdArgs) Timeout(job *river.Job[UpdateStreamVideoIdArgs]) time.Duration { + return 10 * time.Minute +} + +type UpdateStreamVideoIdWorker struct { + river.WorkerDefaults[UpdateStreamVideoIdArgs] +} + +func (w UpdateStreamVideoIdWorker) Work(ctx context.Context, job *river.Job[UpdateStreamVideoIdArgs]) error { + logger := log.With().Str("task", job.Kind).Str("job_id", fmt.Sprintf("%d", job.ID)).Logger() + logger.Info().Msg("starting task") + + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + platformService, err := PlatformFromContext(ctx) + if err != nil { + return err + } + + var channels []*ent.Channel + var videos []*ent.Vod + + // check if queue id is set and only one video needs to be updated + if job.Args.Input.QueueId != uuid.Nil { + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + channels = []*ent.Channel{&dbItems.Channel} + videos = []*ent.Vod{&dbItems.Video} + } + + if len(channels) == 0 { + channels, err = store.Client.Channel.Query().All(ctx) + if err != nil { + return err + } + } + + // loop over each channel and get all channel videos + // this is necessary because the 'streamid' is not an id we can query from APIs + for _, channel := range channels { + logger.Info().Str("channel", channel.Name).Msg("fetching channel videos") + + // only get videos if no queue id is set + if len(videos) == 0 { + videos, err = store.Client.Vod.Query().Where(vod.HasChannelWith(entChannel.ID(channel.ID))).All(ctx) + if err != nil { + return err + } + } + + // get all channel videos from platform + platformVideos, err := platformService.GetVideos(ctx, channel.ExtID, platform.VideoTypeArchive, false, false) + if err != nil { + return err + } + + logger.Info().Str("channel", channel.Name).Msgf("found %d videos in platform", len(platformVideos)) + + for _, video := range videos { + if video.Type != utils.Live { + continue + } + if video.ExtID == "" { + continue + } + + // attempt to find video in list of platform videos + for _, platformVideo := range platformVideos { + if platformVideo.StreamID == video.ExtStreamID { + logger.Info().Str("channel", channel.Name).Str("video_id", video.ID.String()).Msg("found video in platform") + _, err := store.Client.Vod.UpdateOneID(video.ID).SetExtID(platformVideo.ID).Save(ctx) + if err != nil { + return err + } + // TODO: kick off job to save chapters and muted segments? + break + } + } + + } + + } + + logger.Info().Msg("task completed") + + return nil +} diff --git a/internal/tasks/heartbeat.go b/internal/tasks/heartbeat.go new file mode 100644 index 00000000..3c1560b3 --- /dev/null +++ b/internal/tasks/heartbeat.go @@ -0,0 +1,131 @@ +package tasks + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "time" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/rs/zerolog/log" +) + +type RiverJobRow struct { + ID int64 + State string + Args RiverJobArgs +} + +type RiverJobArgs struct { + Input ArchiveVideoInput `json:"input"` + Continue bool `json:"continue"` +} + +type HeartBeatInput struct { + TaskId int64 + conn *pgxpool.Pool +} + +func startHeartBeatForTask(ctx context.Context, input HeartBeatInput) { + logger := log.With().Str("task_id", fmt.Sprintf("%d", input.TaskId)).Logger() + logger.Debug().Msg("starting heartbeat") + + // perform one-time update before starting the ticker + if err := updateHeartbeat(ctx, input); err != nil { + logger.Error().Err(err).Msg("failed to update heartbeat") + return + } + + ticker := time.NewTicker(1 * time.Minute) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + logger.Debug().Msg("heartbeat stopped due to context cancellation") + return + case <-ticker.C: + if err := updateHeartbeat(ctx, input); err != nil { + logger.Error().Err(err).Msg("failed to update heartbeat") + return + } + logger.Debug().Msg("heartbeat updated") + } + } +} + +func updateHeartbeat(ctx context.Context, input HeartBeatInput) error { + + if ctx.Err() == context.Canceled { + return nil + } + + jobRow, err := getRiverJobById(ctx, input.conn, input.TaskId) + if err != nil { + if err == context.Canceled || errors.Is(err, context.Canceled) { + return nil + } + return fmt.Errorf("failed to get river job: %w", err) + } + + jobRow.Args.Input.HeartBeatTime = time.Now() + err = updateRiverJobArgs(ctx, input.conn, input.TaskId, jobRow.Args) + if err != nil { + if err == context.Canceled || errors.Is(err, context.Canceled) { + return nil + } + return fmt.Errorf("failed to update river job args: %w", err) + } + + return nil +} + +func getRiverJobById(ctx context.Context, conn *pgxpool.Pool, id int64) (*RiverJobRow, error) { + query := ` + SELECT id, state, args + FROM river_job + WHERE id = $1 + ` + + var job RiverJobRow + err := conn.QueryRow(ctx, query, id).Scan( + &job.ID, + &job.State, + &job.Args, + ) + + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, fmt.Errorf("no riber job found with id %d", id) + } + return nil, fmt.Errorf("error querying for river job: %w", err) + } + + return &job, nil +} + +func updateRiverJobArgs(ctx context.Context, conn *pgxpool.Pool, id int64, args RiverJobArgs) error { + jsonBytes, err := json.Marshal(args) + if err != nil { + return fmt.Errorf("error marshalling args: %w", err) + } + + query := ` + UPDATE river_job + SET args = $1 + WHERE id = $2 + ` + + r, err := conn.Exec(ctx, query, jsonBytes, id) + if err != nil { + return fmt.Errorf("error updating river job: %w", err) + } + + if r.RowsAffected() == 0 { + return fmt.Errorf("no river job found with id %d", id) + } + + return nil +} diff --git a/internal/tasks/live_chat.go b/internal/tasks/live_chat.go new file mode 100644 index 00000000..9ddac9a3 --- /dev/null +++ b/internal/tasks/live_chat.go @@ -0,0 +1,278 @@ +package tasks + +import ( + "context" + "errors" + "fmt" + "strconv" + "time" + + "github.com/jackc/pgx/v5" + "github.com/riverqueue/river" + "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/internal/exec" + "github.com/zibbp/ganymede/internal/utils" +) + +// ////////////////////// +// Download Chat (VOD) // +// ////////////////////// +type DownloadLiveChatArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (DownloadLiveChatArgs) Kind() string { return string(utils.TaskDownloadLiveChat) } + +func (args DownloadLiveChatArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 1, + Tags: []string{"archive"}, + } +} + +func (w DownloadLiveChatArgs) Timeout(job *river.Job[DownloadLiveChatArgs]) time.Duration { + return 49 * time.Hour +} + +type DownloadLiveChatWorker struct { + river.WorkerDefaults[DownloadLiveChatArgs] +} + +func (w DownloadLiveChatWorker) Work(ctx context.Context, job *river.Job[DownloadLiveChatArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + client := river.ClientFromContext[pgx.Tx](ctx) + + // set queue status to running + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskDownloadChat, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + // download video + err = exec.DownloadTwitchLiveChat(ctx, dbItems.Video, dbItems.Channel, dbItems.Queue) + if err != nil { + if errors.Is(err, context.Canceled) { + // create new context to finish the task + ctx = context.Background() + } else { + return err + } + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskDownloadChat, + }) + if err != nil { + return err + } + + // continue with next job + if job.Args.Continue { + _, err := client.Insert(ctx, &ConvertLiveChatArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} + +// //////////////////// +// Convert Live Chat // +// /////////////////// +type ConvertLiveChatArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (ConvertLiveChatArgs) Kind() string { return string(utils.TaskConvertChat) } + +func (args ConvertLiveChatArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Tags: []string{"archive"}, + } +} + +func (w ConvertLiveChatArgs) Timeout(job *river.Job[ConvertLiveChatArgs]) time.Duration { + return 49 * time.Hour +} + +type ConvertLiveChatWorker struct { + river.WorkerDefaults[ConvertLiveChatArgs] +} + +func (w ConvertLiveChatWorker) Work(ctx context.Context, job *river.Job[ConvertLiveChatArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + // set queue status to running + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskConvertChat, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + // check that the chat file exists + if !utils.FileExists(dbItems.Video.TmpLiveChatDownloadPath) { + log.Info().Str("task_id", fmt.Sprintf("%d", job.ID)).Msg("chat file does not exist; setting chat status to complete") + + // set queue status to completed + _, err := dbItems.Queue.Update().SetTaskChatConvert(utils.Success).SetTaskChatRender(utils.Success).SetTaskChatMove(utils.Success).Save(ctx) + if err != nil { + return err + } + + // set video chat to empty + _, err = dbItems.Video.Update().SetChatPath("").SetChatVideoPath("").Save(ctx) + if err != nil { + return err + } + + return nil + } + + // get channel + platform, err := PlatformFromContext(ctx) + if err != nil { + return err + } + channel, err := platform.GetChannel(ctx, dbItems.Channel.Name) + if err != nil { + return err + } + channelIdInt, err := strconv.Atoi(channel.ID) + if err != nil { + return err + } + + // need the ID of a previous video for channel emotes and badges + videos, err := platform.GetVideos(ctx, channel.ID, "archive", false, false) + if err != nil { + return err + } + + // TODO: repalce with something else? + // attempt to find video of current livestream + var previousVideoID string + for _, video := range videos { + if video.ID == dbItems.Video.ExtID { + previousVideoID = video.ID + // update the video item in the database + _, err = dbItems.Video.Update().SetExtID(video.ID).Save(ctx) + if err != nil { + return err + } + break + } + } + + // if no previous video, use the first video + if previousVideoID == "" && len(videos) > 0 { + previousVideoID = videos[0].ID + // if no videos at all, use a random id + } else if previousVideoID == "" { + previousVideoID = "132195945" + } + + // convert chat + err = utils.ConvertTwitchLiveChatToTDLChat(dbItems.Video.TmpLiveChatDownloadPath, dbItems.Video.TmpLiveChatConvertPath, dbItems.Channel.Name, dbItems.Video.ID.String(), dbItems.Video.ExtID, channelIdInt, dbItems.Queue.ChatStart, string(previousVideoID)) + if err != nil { + return err + } + + // run TwitchDownloader "chatupdate" to embed emotes and badges + err = exec.UpdateTwitchChat(ctx, dbItems.Video) + if err != nil { + return err + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskConvertChat, + }) + if err != nil { + return err + } + + // continue with next job + if job.Args.Continue { + client := river.ClientFromContext[pgx.Tx](ctx) + // render chat if needed + if dbItems.Queue.TaskChatRender != utils.Success { + _, err := client.Insert(ctx, &RenderChatArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + // else move chat as rendering is not needed + } else { + _, err := client.Insert(ctx, &MoveChatArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + } + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} diff --git a/internal/tasks/live_video.go b/internal/tasks/live_video.go new file mode 100644 index 00000000..f6dc86b5 --- /dev/null +++ b/internal/tasks/live_video.go @@ -0,0 +1,167 @@ +package tasks + +import ( + "context" + "errors" + "time" + + "github.com/jackc/pgx/v5" + "github.com/riverqueue/river" + "github.com/riverqueue/river/rivertype" + "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/internal/exec" + "github.com/zibbp/ganymede/internal/utils" +) + +// ////////////////////// +// Download Live Video // +// ////////////////////// +// This task is special as it will create it's own context if the task is cancelled so the rest of the task can be completed. +type DownloadLiveVideoArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (DownloadLiveVideoArgs) Kind() string { return string(utils.TaskDownloadLiveVideo) } + +func (args DownloadLiveVideoArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 1, + Tags: []string{"archive"}, + } +} + +func (w DownloadLiveVideoArgs) Timeout(job *river.Job[DownloadLiveVideoArgs]) time.Duration { + return 49 * time.Hour +} + +type DownloadLiveVideoWorker struct { + river.WorkerDefaults[DownloadLiveVideoArgs] +} + +func (w DownloadLiveVideoWorker) Work(ctx context.Context, job *river.Job[DownloadLiveVideoArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskDownloadVideo, + }) + if err != nil { + return err + } + client := river.ClientFromContext[pgx.Tx](ctx) + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + startChatDownload := make(chan bool) + + go func() { + for { + select { + case <-startChatDownload: + // start chat download if requested + if dbItems.Queue.ArchiveChat { + log.Debug().Str("channel", dbItems.Channel.Name).Msgf("starting chat download for %s", dbItems.Video.ExtID) + client := river.ClientFromContext[pgx.Tx](ctx) + _, err = client.Insert(ctx, &DownloadLiveChatArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + log.Error().Err(err).Msg("failed to start chat download") + } + } + case <-ctx.Done(): + return + } + } + }() + + // download live video + err = exec.DownloadTwitchLiveVideo(ctx, dbItems.Video, dbItems.Channel, startChatDownload) + if err != nil { + if errors.Is(err, context.Canceled) { + // create new context to finish the task + ctx = context.Background() + } else { + return err + } + } + + // cancel chat download when video download is done + // get chat download job id + params := river.NewJobListParams().States(rivertype.JobStateRunning, rivertype.JobStateRetryable).First(10000) + chatDownloadJobId, err := getTaskId(ctx, client, GetTaskFilter{ + Kind: string(utils.TaskDownloadLiveChat), + QueueId: job.Args.Input.QueueId, + Tags: []string{"archive"}, + }, params) + if err != nil { + return err + } + // cancel chat download if it exists + if chatDownloadJobId != 0 { + _, err = client.JobCancel(ctx, chatDownloadJobId) + if err != nil { + return err + } + } + + // mark channel as not live + if err := setWatchChannelAsNotLive(ctx, store, dbItems.Channel.ID); err != nil { + return err + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskDownloadVideo, + }) + if err != nil { + return err + } + + // continue with next job + if job.Args.Continue { + _, err = client.Insert(ctx, &PostProcessVideoArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + + // insert task to update stream id with video id + _, err := client.Insert(ctx, &UpdateStreamVideoIdArgs{ + Input: job.Args.Input, + }, &river.InsertOpts{ + // schedule task to run after 10 minutes to ensure the video is processed by the platform + ScheduledAt: time.Now().Add(10 * time.Minute), + }) + if err != nil { + return err + } + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} diff --git a/internal/tasks/periodic/periodic.go b/internal/tasks/periodic/periodic.go new file mode 100644 index 00000000..1ea7fa8d --- /dev/null +++ b/internal/tasks/periodic/periodic.go @@ -0,0 +1,226 @@ +package tasks_periodic + +import ( + "context" + "fmt" + "time" + + "github.com/riverqueue/river" + "github.com/rs/zerolog/log" + entTwitchCategory "github.com/zibbp/ganymede/ent/twitchcategory" + "github.com/zibbp/ganymede/internal/auth" + "github.com/zibbp/ganymede/internal/errors" + "github.com/zibbp/ganymede/internal/live" + "github.com/zibbp/ganymede/internal/tasks" + tasks_shared "github.com/zibbp/ganymede/internal/tasks/shared" + "github.com/zibbp/ganymede/internal/vod" +) + +func liveServiceFromContext(ctx context.Context) (*live.Service, error) { + liveService, exists := ctx.Value(tasks_shared.LiveServiceKey).(*live.Service) + if !exists || liveService == nil { + return nil, errors.New("live service not found in context") + } + + return liveService, nil +} + +// Check watched channels for new videos +type CheckChannelsForNewVideosArgs struct{} + +func (CheckChannelsForNewVideosArgs) Kind() string { return tasks.TaskCheckChannelForNewVideos } + +func (w CheckChannelsForNewVideosArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + } +} + +func (w CheckChannelsForNewVideosArgs) Timeout(job *river.Job[CheckChannelsForNewVideosArgs]) time.Duration { + return 10 * time.Minute +} + +type CheckChannelsForNewVideosWorker struct { + river.WorkerDefaults[CheckChannelsForNewVideosArgs] +} + +func (w CheckChannelsForNewVideosWorker) Work(ctx context.Context, job *river.Job[CheckChannelsForNewVideosArgs]) error { + logger := log.With().Str("task", job.Kind).Str("job_id", fmt.Sprintf("%d", job.ID)).Logger() + logger.Info().Msg("starting task") + + liveService, err := liveServiceFromContext(ctx) + if err != nil { + return err + } + + err = liveService.CheckVodWatchedChannels(ctx, logger) + if err != nil { + return err + } + + logger.Info().Msg("task completed") + + return nil +} + +// Prune videos +type PruneVideosArgs struct{} + +func (PruneVideosArgs) Kind() string { return tasks.TaskPruneVideos } + +func (w PruneVideosArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + } +} + +func (w PruneVideosArgs) Timeout(job *river.Job[PruneVideosArgs]) time.Duration { + return 1 * time.Minute +} + +type PruneVideosWorker struct { + river.WorkerDefaults[PruneVideosArgs] +} + +func (w PruneVideosWorker) Work(ctx context.Context, job *river.Job[PruneVideosArgs]) error { + logger := log.With().Str("task", job.Kind).Str("job_id", fmt.Sprintf("%d", job.ID)).Logger() + logger.Info().Msg("starting task") + + store, err := tasks.StoreFromContext(ctx) + if err != nil { + return err + } + + err = vod.PruneVideos(ctx, store) + if err != nil { + return err + } + + logger.Info().Msg("task completed") + + return nil +} + +// Import Twitch categories +type ImportCategoriesArgs struct{} + +func (ImportCategoriesArgs) Kind() string { return tasks.TaskImportVideos } + +func (w ImportCategoriesArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + } +} + +func (w ImportCategoriesArgs) Timeout(job *river.Job[ImportCategoriesArgs]) time.Duration { + return 1 * time.Minute +} + +type ImportCategoriesWorker struct { + river.WorkerDefaults[ImportCategoriesArgs] +} + +func (w ImportCategoriesWorker) Work(ctx context.Context, job *river.Job[ImportCategoriesArgs]) error { + logger := log.With().Str("task", job.Kind).Str("job_id", fmt.Sprintf("%d", job.ID)).Logger() + logger.Info().Msg("starting task") + + store, err := tasks.StoreFromContext(ctx) + if err != nil { + return err + } + + platform, err := tasks.PlatformFromContext(ctx) + if err != nil { + return err + } + + categories, err := platform.GetCategories(ctx) + if err != nil { + return err + } + + logger.Info().Msgf("importing %d categories", len(categories)) + + // upsert categories + for _, category := range categories { + err = store.Client.TwitchCategory.Create().SetID(category.ID).SetName(category.Name).OnConflictColumns(entTwitchCategory.FieldID).UpdateNewValues().Exec(context.Background()) + if err != nil { + return fmt.Errorf("failed to upsert twitch category: %v", err) + } + } + + logger.Info().Msg("task completed") + + return nil +} + +// Authenticate with Platform +type AuthenticatePlatformArgs struct{} + +func (AuthenticatePlatformArgs) Kind() string { return tasks.TaskAuthenticatePlatform } + +func (w AuthenticatePlatformArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + } +} + +func (w AuthenticatePlatformArgs) Timeout(job *river.Job[AuthenticatePlatformArgs]) time.Duration { + return 1 * time.Minute +} + +type AuthenticatePlatformWorker struct { + river.WorkerDefaults[AuthenticatePlatformArgs] +} + +func (w AuthenticatePlatformWorker) Work(ctx context.Context, job *river.Job[AuthenticatePlatformArgs]) error { + logger := log.With().Str("task", job.Kind).Str("job_id", fmt.Sprintf("%d", job.ID)).Logger() + logger.Info().Msg("starting task") + + platform, err := tasks.PlatformFromContext(ctx) + if err != nil { + return err + } + + _, err = platform.Authenticate(ctx) + if err != nil { + return err + } + + logger.Info().Msg("task completed") + + return nil +} + +// Fetch Json Web Keys if using OIDC +type FetchJWKSArgs struct{} + +func (FetchJWKSArgs) Kind() string { return tasks.TaskFetchJWKS } + +func (w FetchJWKSArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + } +} + +func (w FetchJWKSArgs) Timeout(job *river.Job[FetchJWKSArgs]) time.Duration { + return 1 * time.Minute +} + +type FetchJWKSWorker struct { + river.WorkerDefaults[FetchJWKSArgs] +} + +func (w FetchJWKSWorker) Work(ctx context.Context, job *river.Job[FetchJWKSArgs]) error { + logger := log.With().Str("task", job.Kind).Str("job_id", fmt.Sprintf("%d", job.ID)).Logger() + logger.Info().Msg("starting task") + + err := auth.FetchJWKS(ctx) + if err != nil { + return err + } + + logger.Info().Msg("task completed") + + return nil +} diff --git a/internal/tasks/periodic/process.go b/internal/tasks/periodic/process.go new file mode 100644 index 00000000..0e4c8565 --- /dev/null +++ b/internal/tasks/periodic/process.go @@ -0,0 +1,125 @@ +package tasks_periodic + +import ( + "context" + "fmt" + "time" + + "github.com/riverqueue/river" + "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/ent/mutedsegment" + "github.com/zibbp/ganymede/ent/vod" + "github.com/zibbp/ganymede/internal/chapter" + "github.com/zibbp/ganymede/internal/tasks" + "github.com/zibbp/ganymede/internal/utils" +) + +// Save chapters for all archived videos. Going forward this is done as part of the archive task, it's here to backfill old data. +type SaveVideoChaptersArgs struct{} + +func (SaveVideoChaptersArgs) Kind() string { return tasks.TaskSaveVideoChapters } + +func (w SaveVideoChaptersArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + } +} + +func (w SaveVideoChaptersArgs) Timeout(job *river.Job[SaveVideoChaptersArgs]) time.Duration { + return 10 * time.Minute +} + +type SaveVideoChaptersWorker struct { + river.WorkerDefaults[SaveVideoChaptersArgs] +} + +func (w SaveVideoChaptersWorker) Work(ctx context.Context, job *river.Job[SaveVideoChaptersArgs]) error { + logger := log.With().Str("task", job.Kind).Str("job_id", fmt.Sprintf("%d", job.ID)).Logger() + logger.Info().Msg("starting task") + + store, err := tasks.StoreFromContext(ctx) + if err != nil { + return err + } + + platform, err := tasks.PlatformFromContext(ctx) + if err != nil { + return err + } + + // get all videos + videos, err := store.Client.Vod.Query().All(ctx) + if err != nil { + return err + } + + for _, video := range videos { + if video.Type == utils.Live { + continue + } + if video.ExtID == "" { + continue + } + + log.Info().Msgf("saving chapters for video %s", video.ExtID) + platformVideo, err := platform.GetVideo(ctx, video.ExtID, true, true) + if err != nil { + return err + } + + if len(platformVideo.Chapters) > 0 { + chapterService := chapter.NewService(store) + + existingVideoChapters, err := chapterService.GetVideoChapters(video.ID) + if err != nil { + return err + } + + if len(existingVideoChapters) == 0 { + + // save chapters to database + for _, c := range platformVideo.Chapters { + _, err := chapterService.CreateChapter(c, video.ID) + if err != nil { + return err + } + } + + log.Info().Str("video_id", fmt.Sprintf("%d", video.ID)).Str("chapters", fmt.Sprintf("%d", len(platformVideo.Chapters))).Msgf("saved chapters for video") + } + } + + if len(platformVideo.MutedSegments) > 0 { + existingMutedSegments, err := store.Client.MutedSegment.Query().Where(mutedsegment.HasVodWith(vod.ID(video.ID))).All(ctx) + if err != nil { + return err + } + + if len(existingMutedSegments) == 0 { + + // save muted segments to database + for _, segment := range platformVideo.MutedSegments { + // parse twitch duration + segmentEnd := segment.Offset + segment.Duration + if segmentEnd > int(platformVideo.Duration.Seconds()) { + segmentEnd = int(platformVideo.Duration.Seconds()) + } + // insert into database + _, err := store.Client.MutedSegment.Create().SetStart(segment.Offset).SetEnd(segmentEnd).SetVod(video).Save(ctx) + if err != nil { + return err + } + } + + log.Info().Str("video_id", fmt.Sprintf("%d", video.ID)).Str("muted_segments", fmt.Sprintf("%d", len(platformVideo.MutedSegments))).Msgf("saved muted segments for video") + } + } + + // avoid rate limiting + time.Sleep(250 * time.Millisecond) + } + + logger.Info().Msg("task completed") + + return nil +} diff --git a/internal/tasks/shared.go b/internal/tasks/shared.go new file mode 100644 index 00000000..953a8cb7 --- /dev/null +++ b/internal/tasks/shared.go @@ -0,0 +1,373 @@ +package tasks + +import ( + "context" + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/riverqueue/river" + "github.com/riverqueue/river/rivertype" + "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/ent" + entChannel "github.com/zibbp/ganymede/ent/channel" + entLive "github.com/zibbp/ganymede/ent/live" + "github.com/zibbp/ganymede/ent/queue" + "github.com/zibbp/ganymede/internal/database" + "github.com/zibbp/ganymede/internal/errors" + "github.com/zibbp/ganymede/internal/notification" + "github.com/zibbp/ganymede/internal/platform" + tasks_shared "github.com/zibbp/ganymede/internal/tasks/shared" + "github.com/zibbp/ganymede/internal/utils" +) + +var archive_tag = "archive" +var allow_fail_tag = "allow_fail" + +var ( + TaskUpdateStreamVideoId = "update_stream_video_id" + TaskGenerateStaticThumbnails = "generate_static_thumbnails" + TaskArchiveWatchdog = "archive_watchdog" + TaskCheckChannelForNewVideos = "check_channel_for_new_videos" + TaskPruneVideos = "prune_videos" + TaskImportVideos = "import_videos" + TaskAuthenticatePlatform = "authenticate_platform" + TaskFetchJWKS = "fetch_jwks" + TaskSaveVideoChapters = "save_video_chapters" +) + +var ( + QueueVideoDownload = "video-download" + QueueVideoPostProcess = "video-postprocess" + QueueChatDownload = "chat-download" + QueueChatRender = "chat-render" +) + +type ArchiveVideoInput struct { + QueueId uuid.UUID + HeartBeatTime time.Time // do not set this field +} + +type GetDatabaseItemsResponse struct { + Queue ent.Queue + Video ent.Vod + Channel ent.Channel +} + +type QueueStatusInput struct { + Status utils.TaskStatus + QueueId uuid.UUID + Task utils.TaskName +} + +func StoreFromContext(ctx context.Context) (*database.Database, error) { + store, exists := ctx.Value(tasks_shared.StoreKey).(*database.Database) + if !exists || store == nil { + return nil, errors.New("store not found in context") + } + + return store, nil +} + +func PlatformFromContext(ctx context.Context) (platform.Platform, error) { + platform, exists := ctx.Value(tasks_shared.PlatformTwitchKey).(platform.Platform) + if !exists || platform == nil { + return nil, errors.New("platform not found in context") + } + + return platform, nil +} + +// getDatabaseItems retrieves the database items associated with the provided queueId. This is used instead of passing all the structs to each job so that they can be easily updated in the database. +func getDatabaseItems(ctx context.Context, entClient *ent.Client, queueId uuid.UUID) (*GetDatabaseItemsResponse, error) { + queue, err := entClient.Queue.Query().Where(queue.ID(queueId)).WithVod().Only(ctx) + if err != nil { + return nil, err + } + + qC := queue.Edges.Vod.QueryChannel() + channel, err := qC.Only(ctx) + if err != nil { + return nil, err + } + + return &GetDatabaseItemsResponse{ + Queue: *queue, + Video: *queue.Edges.Vod, + Channel: *channel, + }, nil + +} + +// setQueueStatus updates the status of a queue item in the database based on the provided queueStatusInput. +func setQueueStatus(ctx context.Context, entClient *ent.Client, queueStatusInput QueueStatusInput) error { + + q := entClient.Queue.UpdateOneID(queueStatusInput.QueueId) + + switch queueStatusInput.Task { + case utils.TaskCreateFolder: + q = q.SetTaskVodCreateFolder(queueStatusInput.Status) + case utils.TaskDownloadThumbnail: + q = q.SetTaskVodDownloadThumbnail(queueStatusInput.Status) + case utils.TaskSaveInfo: + q = q.SetTaskVodSaveInfo(queueStatusInput.Status) + case utils.TaskDownloadVideo: + q = q.SetTaskVideoDownload(queueStatusInput.Status) + case utils.TaskPostProcessVideo: + q = q.SetTaskVideoConvert(queueStatusInput.Status) + case utils.TaskMoveVideo: + q = q.SetTaskVideoMove(queueStatusInput.Status) + case utils.TaskDownloadChat: + q = q.SetTaskChatDownload(queueStatusInput.Status) + case utils.TaskConvertChat: + q = q.SetTaskChatConvert(queueStatusInput.Status) + case utils.TaskRenderChat: + q = q.SetTaskChatRender(queueStatusInput.Status) + case utils.TaskMoveChat: + q = q.SetTaskChatMove(queueStatusInput.Status) + } + + _, err := q.Save(ctx) + if err != nil { + return err + } + + return nil +} + +// replaceThumbnailPlaceholders replaces the placeholders in the provided url with the provided width and height. +func replaceThumbnailPlaceholders(url, width, height string, isLive bool) string { + if isLive { + url = strings.ReplaceAll(url, "{width}", width) + url = strings.ReplaceAll(url, "{height}", height) + } else { + url = strings.ReplaceAll(url, "%{width}", width) + url = strings.ReplaceAll(url, "%{height}", height) + } + return url +} +func checkIfTasksAreDone(ctx context.Context, entClient *ent.Client, input ArchiveVideoInput) error { + dbItems, err := getDatabaseItems(ctx, entClient, input.QueueId) + if err != nil { + return err + } + + if dbItems.Queue.LiveArchive { + if dbItems.Queue.TaskVideoDownload == utils.Success && dbItems.Queue.TaskVideoConvert == utils.Success && dbItems.Queue.TaskVideoMove == utils.Success && dbItems.Queue.TaskChatDownload == utils.Success && dbItems.Queue.TaskChatConvert == utils.Success && dbItems.Queue.TaskChatRender == utils.Success && dbItems.Queue.TaskChatMove == utils.Success { + log.Debug().Msgf("all tasks for video %s are done", dbItems.Video.ID.String()) + + _, err := dbItems.Queue.Update().SetVideoProcessing(false).SetChatProcessing(false).SetProcessing(false).Save(context.Background()) + if err != nil { + return err + } + + _, err = entClient.Vod.UpdateOneID(dbItems.Video.ID).SetProcessing(false).Save(context.Background()) + if err != nil { + return err + } + + notification.SendLiveArchiveSuccessNotification(&dbItems.Channel, &dbItems.Video, &dbItems.Queue) + } + } else { + if dbItems.Queue.TaskVideoDownload == utils.Success && dbItems.Queue.TaskVideoConvert == utils.Success && dbItems.Queue.TaskVideoMove == utils.Success && dbItems.Queue.TaskChatDownload == utils.Success && dbItems.Queue.TaskChatRender == utils.Success && dbItems.Queue.TaskChatMove == utils.Success { + log.Debug().Msgf("all tasks for video %s are done", dbItems.Video.ID.String()) + + _, err := dbItems.Queue.Update().SetVideoProcessing(false).SetChatProcessing(false).SetProcessing(false).Save(context.Background()) + if err != nil { + return err + } + + _, err = entClient.Vod.UpdateOneID(dbItems.Video.ID).SetProcessing(false).Save(context.Background()) + if err != nil { + return err + } + + notification.SendVideoArchiveSuccessNotification(&dbItems.Channel, &dbItems.Video, &dbItems.Queue) + } + } + + return nil +} + +// forceJobRetry forces a job to be retried. River's retry function does not touch running jobs, so we have to do it ourselves. +func forceJobRetry(ctx context.Context, conn *pgxpool.Pool, id int64) error { + query := ` + UPDATE river_job + SET state = $1 + WHERE id = $2 + ` + + r, err := conn.Exec(ctx, query, rivertype.JobStateRetryable, id) + if err != nil { + return err + } + if r.RowsAffected() == 0 { + return fmt.Errorf("job not found") + } + + return nil +} + +// forceDeleteJob forces a job to be deleted. River's delete function does not touch running jobs, so we have to do it ourselves. +func forceDeleteJob(ctx context.Context, conn *pgxpool.Pool, id int64) error { + query := ` + DELETE FROM river_job + WHERE id = $1 + RETURNING id + ` + + r, err := conn.Exec(ctx, query, id) + if err != nil { + return err + } + if r.RowsAffected() == 0 { + return fmt.Errorf("job not found") + } + + return nil +} + +type GetTaskFilter struct { + Kind string + QueueId uuid.UUID + Tags []string +} + +func getTaskId(ctx context.Context, client *river.Client[pgx.Tx], filter GetTaskFilter, params *river.JobListParams) (int64, error) { + jobs, err := client.JobList(ctx, params) + if err != nil { + return 0, err + } + + for _, job := range jobs.Jobs { + var args RiverJobArgs + if err := json.Unmarshal(job.EncodedArgs, &args); err != nil { + return 0, err + } + + // Apply filters + if filter.Kind != "" && job.Kind != filter.Kind { + continue + } + if filter.QueueId != uuid.Nil && args.Input.QueueId != filter.QueueId { + continue + } + if len(filter.Tags) > 0 && !containsAllTags(job.Tags, filter.Tags) { + continue + } + + // If all filters pass, return the job ID + return job.ID, nil + } + return 0, nil +} + +// Helper function to check if job tags contain all filter tags +func containsAllTags(jobTags, filterTags []string) bool { + tagSet := make(map[string]struct{}) + for _, tag := range jobTags { + tagSet[tag] = struct{}{} + } + + for _, tag := range filterTags { + if _, exists := tagSet[tag]; !exists { + return false + } + } + return true +} + +type CustomErrorHandler struct{} + +func (*CustomErrorHandler) HandleError(ctx context.Context, job *rivertype.JobRow, err error) *river.ErrorHandlerResult { + log.Error().Str("job_id", fmt.Sprintf("%d", job.ID)).Str("attempt", fmt.Sprintf("%d", job.Attempt)).Str("attempted_by", job.AttemptedBy[job.Attempt-1]).Str("args", string(job.EncodedArgs)).Err(err).Msg("task error") + + // if the job is an archive job, mark it as failed in the queue and send an error notification + if utils.Contains(job.Tags, archive_tag) && !utils.Contains(job.Tags, allow_fail_tag) { + // unmarshal custom arguments + var args RiverJobArgs + if err := json.Unmarshal(job.EncodedArgs, &args); err != nil { + return nil + } + // get store + store, err := StoreFromContext(ctx) + if err != nil { + return nil + } + // set queue status to failed + if err := setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Failed, + QueueId: args.Input.QueueId, + Task: utils.GetTaskName(job.Kind), + }); err != nil { + return nil + } + + dbItems, err := getDatabaseItems(ctx, store.Client, args.Input.QueueId) + if err != nil { + return nil + } + // send error notification + notification.SendErrorNotification(&dbItems.Channel, &dbItems.Video, &dbItems.Queue, job.Kind) + } + return nil +} + +func (*CustomErrorHandler) HandlePanic(ctx context.Context, job *rivertype.JobRow, panicVal any, trace string) *river.ErrorHandlerResult { + log.Error().Str("job_id", fmt.Sprintf("%d", job.ID)).Str("attempt", fmt.Sprintf("%d", job.Attempt)).Str("attempted_by", job.AttemptedBy[job.Attempt-1]).Str("args", string(job.EncodedArgs)).Str("panic_val", fmt.Sprintf("%v", panicVal)).Str("trace", trace).Msg("task error") + + // if the job is an archive job, mark it as failed in the queue and send an error notification + if utils.Contains(job.Tags, archive_tag) && !utils.Contains(job.Tags, allow_fail_tag) { + // unmarshal custom arguments + var args RiverJobArgs + if err := json.Unmarshal(job.EncodedArgs, &args); err != nil { + return nil + } + store, err := StoreFromContext(ctx) + if err != nil { + return nil + } + // set queue status to failed + if err := setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Failed, + QueueId: args.Input.QueueId, + Task: utils.GetTaskName(job.Kind), + }); err != nil { + return nil + } + + dbItems, err := getDatabaseItems(ctx, store.Client, args.Input.QueueId) + if err != nil { + return nil + } + // send error notification + notification.SendErrorNotification(&dbItems.Channel, &dbItems.Video, &dbItems.Queue, job.Kind) + } + + return nil +} + +// setWatchChannelAsNotLive marks the watched channel as not live +func setWatchChannelAsNotLive(ctx context.Context, store *database.Database, channelId uuid.UUID) error { + watchedChannel, err := store.Client.Live.Query().Where(entLive.HasChannelWith(entChannel.ID(channelId))).Only(ctx) + if err != nil { + if _, ok := err.(*ent.NotFoundError); ok { + log.Debug().Str("channel_id", channelId.String()).Msg("watched channel not found") + } else { + return err + } + } + // mark channel as not live if it exists + if watchedChannel != nil { + err = store.Client.Live.UpdateOneID(watchedChannel.ID).SetIsLive(false).Exec(ctx) + if err != nil { + return err + } + } + + return nil +} diff --git a/internal/tasks/shared/shared.go b/internal/tasks/shared/shared.go new file mode 100644 index 00000000..b63b07e7 --- /dev/null +++ b/internal/tasks/shared/shared.go @@ -0,0 +1,7 @@ +package tasks_shared + +type contextKey string + +const StoreKey contextKey = "store" +const PlatformTwitchKey contextKey = "platform_twitch" +const LiveServiceKey contextKey = "live_service" diff --git a/internal/tasks/tasks.go b/internal/tasks/tasks.go new file mode 100644 index 00000000..9b29ce4d --- /dev/null +++ b/internal/tasks/tasks.go @@ -0,0 +1 @@ +package tasks diff --git a/internal/tasks/thumbnail.go b/internal/tasks/thumbnail.go new file mode 100644 index 00000000..d9293d35 --- /dev/null +++ b/internal/tasks/thumbnail.go @@ -0,0 +1,66 @@ +package tasks + +import ( + "context" + "math/rand" + "time" + + "github.com/google/uuid" + "github.com/riverqueue/river" + "github.com/zibbp/ganymede/internal/exec" +) + +type GenerateStaticThumbnailArgs struct { + VideoId string `json:"video_id"` +} + +func (GenerateStaticThumbnailArgs) Kind() string { return TaskGenerateStaticThumbnails } + +func (args GenerateStaticThumbnailArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 1, + } +} + +func (w GenerateStaticThumbnailArgs) Timeout(job *river.Job[GenerateStaticThumbnailArgs]) time.Duration { + return 1 * time.Minute +} + +type GenerateStaticThubmnailWorker struct { + river.WorkerDefaults[GenerateStaticThumbnailArgs] +} + +func (w GenerateStaticThubmnailWorker) Work(ctx context.Context, job *river.Job[GenerateStaticThumbnailArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + videoUUID, err := uuid.Parse(job.Args.VideoId) + if err != nil { + return err + } + + video, err := store.Client.Vod.Get(ctx, videoUUID) + if err != nil { + return err + } + + // get random time + time := rand.Intn(video.Duration) + + // generate full-res thumbnail + err = exec.GenerateStaticThumbnail(ctx, video.VideoPath, time, video.ThumbnailPath, "") + if err != nil { + return err + } + + // generate webp thumbnail + err = exec.GenerateStaticThumbnail(ctx, video.VideoPath, time, video.WebThumbnailPath, "640x360") + if err != nil { + return err + } + + return nil +} diff --git a/internal/tasks/video.go b/internal/tasks/video.go new file mode 100644 index 00000000..c551931c --- /dev/null +++ b/internal/tasks/video.go @@ -0,0 +1,323 @@ +package tasks + +import ( + "context" + "time" + + "github.com/jackc/pgx/v5" + "github.com/riverqueue/river" + "github.com/zibbp/ganymede/internal/config" + "github.com/zibbp/ganymede/internal/exec" + "github.com/zibbp/ganymede/internal/utils" +) + +// /////////////////////// +// Download Video (VOD) // +// /////////////////////// +type DownloadVideoArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (DownloadVideoArgs) Kind() string { return string(utils.TaskDownloadVideo) } + +func (args DownloadVideoArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Queue: QueueVideoDownload, + Tags: []string{"archive"}, + } +} + +func (w DownloadVideoArgs) Timeout(job *river.Job[DownloadVideoArgs]) time.Duration { + return 49 * time.Hour +} + +type DownloadVideoWorker struct { + river.WorkerDefaults[DownloadVideoArgs] +} + +func (w DownloadVideoWorker) Work(ctx context.Context, job *river.Job[DownloadVideoArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskDownloadVideo, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + // download video + err = exec.DownloadTwitchVideo(ctx, dbItems.Video) + if err != nil { + return err + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskDownloadVideo, + }) + if err != nil { + return err + } + + // continue with next job + if job.Args.Continue { + client := river.ClientFromContext[pgx.Tx](ctx) + _, err = client.Insert(ctx, &PostProcessVideoArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} + +// //////////////////// +// Postprocess Video // +// //////////////////// +type PostProcessVideoArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (PostProcessVideoArgs) Kind() string { return string(utils.TaskPostProcessVideo) } + +func (args PostProcessVideoArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Queue: QueueVideoPostProcess, + Tags: []string{"archive"}, + } +} + +func (w *PostProcessVideoArgs) Timeout(job *river.Job[PostProcessVideoArgs]) time.Duration { + return 24 * time.Hour +} + +type PostProcessVideoWorker struct { + river.WorkerDefaults[PostProcessVideoArgs] +} + +func (w PostProcessVideoWorker) Work(ctx context.Context, job *river.Job[PostProcessVideoArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + // set queue status to running + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskPostProcessVideo, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + // download video + err = exec.PostProcessVideo(ctx, dbItems.Video) + if err != nil { + return err + } + + // update video duration for live archive + if dbItems.Queue.LiveArchive { + duration, err := exec.GetVideoDuration(ctx, dbItems.Video.TmpVideoConvertPath) + if err != nil { + return err + } + _, err = dbItems.Video.Update().SetDuration(duration).Save(ctx) + if err != nil { + return err + } + } + + // convert to HLS if needed + if config.Get().Archive.SaveAsHls { + // create temp hls directory + if err := utils.CreateDirectory(dbItems.Video.TmpVideoHlsPath); err != nil { + return err + } + + // convert to hls + err = exec.ConvertVideoToHLS(ctx, dbItems.Video) + if err != nil { + return err + } + } + + // delete source video + if utils.FileExists(dbItems.Video.TmpVideoDownloadPath) { + err = utils.DeleteFile(dbItems.Video.TmpVideoDownloadPath) + if err != nil { + return err + } + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskPostProcessVideo, + }) + if err != nil { + return err + } + + // continue with next job + if job.Args.Continue { + client := river.ClientFromContext[pgx.Tx](ctx) + _, err = client.Insert(ctx, &MoveVideoArgs{ + Continue: true, + Input: job.Args.Input, + }, nil) + if err != nil { + return err + } + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} + +// ///////////// +// Move Video // +// ///////////// +type MoveVideoArgs struct { + Continue bool `json:"continue"` + Input ArchiveVideoInput `json:"input"` +} + +func (MoveVideoArgs) Kind() string { return string(utils.TaskMoveVideo) } + +func (args MoveVideoArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 5, + Queue: "default", + Tags: []string{"archive"}, + } +} + +func (w *MoveVideoArgs) Timeout(job *river.Job[MoveVideoArgs]) time.Duration { + return 24 * time.Hour +} + +type MoveVideoWorker struct { + river.WorkerDefaults[MoveVideoArgs] +} + +func (w MoveVideoWorker) Work(ctx context.Context, job *river.Job[MoveVideoArgs]) error { + // get store from context + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + + // set queue status to running + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Running, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskMoveVideo, + }) + if err != nil { + return err + } + + // start task heartbeat + go startHeartBeatForTask(ctx, HeartBeatInput{ + TaskId: job.ID, + conn: store.ConnPool, + }) + + dbItems, err := getDatabaseItems(ctx, store.Client, job.Args.Input.QueueId) + if err != nil { + return err + } + + // move standard video + if dbItems.Video.VideoHlsPath == "" { + err := utils.MoveFile(ctx, dbItems.Video.TmpVideoConvertPath, dbItems.Video.VideoPath) + if err != nil { + return err + } + } else { + // move hls video + err := utils.MoveDirectory(ctx, dbItems.Video.TmpVideoHlsPath, dbItems.Video.VideoHlsPath) + if err != nil { + return err + } + + // clean up temp hls directory + if err := utils.DeleteDirectory(dbItems.Video.TmpVideoHlsPath); err != nil { + return err + } + // delete temp converted video + if utils.FileExists(dbItems.Video.TmpVideoConvertPath) { + err = utils.DeleteFile(dbItems.Video.TmpVideoConvertPath) + if err != nil { + return err + } + } + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: job.Args.Input.QueueId, + Task: utils.TaskMoveVideo, + }) + if err != nil { + return err + } + + // check if tasks are done + if err := checkIfTasksAreDone(ctx, store.Client, job.Args.Input); err != nil { + return err + } + + return nil +} diff --git a/internal/tasks/watchdog.go b/internal/tasks/watchdog.go new file mode 100644 index 00000000..3934ce99 --- /dev/null +++ b/internal/tasks/watchdog.go @@ -0,0 +1,176 @@ +package tasks + +import ( + "context" + "encoding/json" + "fmt" + "time" + + "github.com/jackc/pgx/v5" + "github.com/riverqueue/river" + "github.com/riverqueue/river/rivertype" + "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/internal/utils" +) + +// /////////// +// Watchdog // +// ////////// +type WatchdogArgs struct{} + +func (WatchdogArgs) Kind() string { return TaskArchiveWatchdog } + +func (w WatchdogArgs) InsertOpts() river.InsertOpts { + return river.InsertOpts{ + MaxAttempts: 1, + Queue: "default", + } +} + +func (w WatchdogArgs) Timeout(job *river.Job[WatchdogArgs]) time.Duration { + return 1 * time.Minute +} + +type WatchdogWorker struct { + river.WorkerDefaults[WatchdogArgs] +} + +func (w WatchdogWorker) Work(ctx context.Context, job *river.Job[WatchdogArgs]) error { + + client := river.ClientFromContext[pgx.Tx](ctx) + + if err := runWatchdog(ctx, client); err != nil { + return err + } + + return nil +} + +// Watchdog tasks that checks the status of archive jobs every minute. It checks if the job is still running and if it has timed out. If it has timed out, it sets the status of the job to retryable. +func runWatchdog(ctx context.Context, riverClient *river.Client[pgx.Tx]) error { + logger := log.With().Str("task", "watchdog").Logger() + store, err := StoreFromContext(ctx) + if err != nil { + return err + } + // get jobs + params := river.NewJobListParams().States(rivertype.JobStateRunning).First(10000) + jobs, err := riverClient.JobList(ctx, params) + if err != nil { + return err + } + + logger.Debug().Str("jobs", fmt.Sprintf("%d", len(jobs.Jobs))).Msg("jobs found") + + // check jobs + for _, job := range jobs.Jobs { + // only check archive jobs + if utils.Contains(job.Tags, "archive") { + // unmarshal args + var args RiverJobArgs + + if err := json.Unmarshal(job.EncodedArgs, &args); err != nil { + return err + } + + // check if job has timed out + if !args.Input.HeartBeatTime.IsZero() && time.Since(args.Input.HeartBeatTime) > 90*time.Second { + // job heartbeat timed out + logger.Info().Str("job_id", fmt.Sprintf("%d", job.ID)).Msg("job heartbeat timed out") + + if job.Attempt < job.MaxAttempts { + // set job to retryable + err := forceJobRetry(ctx, store.ConnPool, job.ID) + if err != nil { + return err + } + logger.Info().Str("job_id", fmt.Sprintf("%d", job.ID)).Msg("job set to retryable") + } else { + // set job to failed + _, err := riverClient.JobCancel(ctx, job.ID) + if err != nil { + return err + } + err = forceDeleteJob(ctx, store.ConnPool, job.ID) + if err != nil { + return err + } + logger.Info().Str("job_id", fmt.Sprintf("%d", job.ID)).Msg("job set to failed and deleted") + + // attempt to finish archiving live video + // if job was live video download then proceed with next jobs + if job.Kind == string(utils.TaskDownloadLiveVideo) { + logger.Info().Str("job_id", fmt.Sprintf("%d", job.ID)).Msg("detected job was live video download; proceeding with next jobs") + // get db items + dbItems, err := getDatabaseItems(ctx, store.Client, args.Input.QueueId) + if err != nil { + return err + } + + // mark channel as not live + if err := setWatchChannelAsNotLive(ctx, store, dbItems.Channel.ID); err != nil { + return err + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: dbItems.Queue.ID, + Task: utils.TaskDownloadVideo, + }) + if err != nil { + return err + } + // queue video postprocess + _, err = riverClient.Insert(ctx, &PostProcessVideoArgs{ + Continue: true, + Input: ArchiveVideoInput{ + QueueId: args.Input.QueueId, + }, + }, nil) + if err != nil { + return err + } + } + + // if job was chat download then proceed with next jobs + if job.Kind == string(utils.TaskDownloadLiveChat) { + logger.Info().Str("job_id", fmt.Sprintf("%d", job.ID)).Msg("detected job was live chat download; proceeding with next jobs") + // get db items + dbItems, err := getDatabaseItems(ctx, store.Client, args.Input.QueueId) + if err != nil { + return err + } + + // mark channel as not live + if err := setWatchChannelAsNotLive(ctx, store, dbItems.Channel.ID); err != nil { + return err + } + + // set queue status to completed + err = setQueueStatus(ctx, store.Client, QueueStatusInput{ + Status: utils.Success, + QueueId: dbItems.Queue.ID, + Task: utils.TaskDownloadChat, + }) + if err != nil { + return err + } + // queue chat convert + _, err = riverClient.Insert(ctx, &ConvertLiveChatArgs{ + Continue: true, + Input: ArchiveVideoInput{ + QueueId: args.Input.QueueId, + }, + }, nil) + if err != nil { + return err + } + } + } + } + } + } + + return nil +} diff --git a/internal/tasks/worker/worker.go b/internal/tasks/worker/worker.go new file mode 100644 index 00000000..b2a4ca70 --- /dev/null +++ b/internal/tasks/worker/worker.go @@ -0,0 +1,246 @@ +package tasks_worker + +import ( + "context" + "fmt" + "strconv" + "time" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/riverqueue/river" + "github.com/riverqueue/river/riverdriver/riverpgxv5" + "github.com/robfig/cron/v3" + "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/internal/config" + "github.com/zibbp/ganymede/internal/database" + "github.com/zibbp/ganymede/internal/live" + "github.com/zibbp/ganymede/internal/platform" + "github.com/zibbp/ganymede/internal/tasks" + tasks_periodic "github.com/zibbp/ganymede/internal/tasks/periodic" + tasks_shared "github.com/zibbp/ganymede/internal/tasks/shared" +) + +type RiverWorkerInput struct { + DB_URL string + DB *database.Database + PlatformTwitch platform.Platform + VideoDownloadWorkers int + VideoPostProcessWorkers int + ChatDownloadWorkers int + ChatRenderWorkers int +} + +type RiverWorkerClient struct { + Ctx context.Context + PgxPool *pgxpool.Pool + RiverPgxDriver *riverpgxv5.Driver + Client *river.Client[pgx.Tx] +} + +func NewRiverWorker(input RiverWorkerInput) (*RiverWorkerClient, error) { + rc := &RiverWorkerClient{} + + workers := river.NewWorkers() + if err := river.AddWorkerSafely(workers, &tasks.WatchdogWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.CreateDirectoryWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.SaveVideoInfoWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.DownloadTumbnailsWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.DownloadVideoWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.PostProcessVideoWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.MoveVideoWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.DownloadChatWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.RenderChatWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.MoveChatWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.DownloadLiveVideoWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.DownloadLiveChatWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.ConvertLiveChatWorker{}); err != nil { + return rc, err + } + // periodic tasks + if err := river.AddWorkerSafely(workers, &tasks_periodic.CheckChannelsForNewVideosWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks_periodic.PruneVideosWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks_periodic.ImportCategoriesWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks_periodic.AuthenticatePlatformWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks_periodic.FetchJWKSWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks_periodic.SaveVideoChaptersWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.UpdateStreamVideoIdWorker{}); err != nil { + return rc, err + } + if err := river.AddWorkerSafely(workers, &tasks.GenerateStaticThubmnailWorker{}); err != nil { + return rc, err + } + + rc.Ctx = context.Background() + + // create postgres pool connection + pool, err := pgxpool.New(rc.Ctx, input.DB_URL) + if err != nil { + return rc, fmt.Errorf("error connecting to postgres: %v", err) + } + rc.PgxPool = pool + + // create river pgx driver + rc.RiverPgxDriver = riverpgxv5.New(rc.PgxPool) + + // create river client + riverClient, err := river.NewClient(rc.RiverPgxDriver, &river.Config{ + Queues: map[string]river.QueueConfig{ + river.QueueDefault: {MaxWorkers: 100}, // non-resource intensive tasks or time sensitive tasks (live videos and chat) + tasks.QueueVideoDownload: {MaxWorkers: input.VideoDownloadWorkers}, + tasks.QueueVideoPostProcess: {MaxWorkers: input.VideoPostProcessWorkers}, + tasks.QueueChatDownload: {MaxWorkers: input.ChatDownloadWorkers}, + tasks.QueueChatRender: {MaxWorkers: input.ChatRenderWorkers}, + }, + Workers: workers, + JobTimeout: -1, + RescueStuckJobsAfter: 49 * time.Hour, + ErrorHandler: &tasks.CustomErrorHandler{}, + }) + if err != nil { + return rc, fmt.Errorf("error creating river client: %v", err) + } + + log.Info().Str("default_workers", "100").Str("download_workers", strconv.Itoa(input.VideoDownloadWorkers)).Str("post_process_workers", strconv.Itoa(input.VideoPostProcessWorkers)).Str("chat_download_workers", strconv.Itoa(input.ChatDownloadWorkers)).Str("chat_render_workers", strconv.Itoa(input.ChatRenderWorkers)).Msg("created river client") + + rc.Client = riverClient + + // put store in context for workers + rc.Ctx = context.WithValue(rc.Ctx, tasks_shared.StoreKey, input.DB) + + // put platform in context for workers + rc.Ctx = context.WithValue(rc.Ctx, tasks_shared.PlatformTwitchKey, input.PlatformTwitch) + + return rc, nil +} + +func (rc *RiverWorkerClient) Start() error { + log.Info().Str("name", rc.Client.ID()).Msg("starting worker") + if err := rc.Client.Start(rc.Ctx); err != nil { + return err + } + return nil +} + +func (rc *RiverWorkerClient) Stop() error { + if err := rc.Client.Stop(rc.Ctx); err != nil { + return err + } + return nil +} + +func (rc *RiverWorkerClient) GetPeriodicTasks(liveService *live.Service) ([]*river.PeriodicJob, error) { + env := config.GetEnvConfig() + midnightCron, err := cron.ParseStandard("0 0 * * *") + if err != nil { + return nil, err + } + + // put services in ctx for workers + rc.Ctx = context.WithValue(rc.Ctx, tasks_shared.LiveServiceKey, liveService) + + // check videos interval + configCheckVideoInterval := config.Get().VideoCheckInterval + + periodicJobs := []*river.PeriodicJob{ + // archive watchdog + // runs every 5 minutes + river.NewPeriodicJob( + river.PeriodicInterval(5*time.Minute), + func() (river.JobArgs, *river.InsertOpts) { + return tasks.WatchdogArgs{}, nil + }, + &river.PeriodicJobOpts{RunOnStart: true}, + ), + + // check watched channels for new videos + // run at specified interval + river.NewPeriodicJob( + river.PeriodicInterval(time.Duration(configCheckVideoInterval)*time.Minute), + func() (river.JobArgs, *river.InsertOpts) { + return tasks_periodic.CheckChannelsForNewVideosArgs{}, nil + }, + &river.PeriodicJobOpts{RunOnStart: false}, + ), + + // prune videos + // runs once a day at midnight + river.NewPeriodicJob( + midnightCron, + func() (river.JobArgs, *river.InsertOpts) { + return tasks_periodic.PruneVideosArgs{}, nil + }, + &river.PeriodicJobOpts{RunOnStart: false}, + ), + + // import categories + // runs once a day at midnight + river.NewPeriodicJob( + midnightCron, + func() (river.JobArgs, *river.InsertOpts) { + return tasks_periodic.ImportCategoriesArgs{}, nil + }, + &river.PeriodicJobOpts{RunOnStart: true}, + ), + + // authenticate to platform + // runs once a day at midnight + river.NewPeriodicJob( + midnightCron, + func() (river.JobArgs, *river.InsertOpts) { + return tasks_periodic.AuthenticatePlatformArgs{}, nil + }, + &river.PeriodicJobOpts{RunOnStart: false}, + ), + } + + // check jwks + if env.OAuthEnabled { + // runs once a day at midnight + periodicJobs = append(periodicJobs, river.NewPeriodicJob( + midnightCron, + func() (river.JobArgs, *river.InsertOpts) { + return tasks_periodic.FetchJWKSArgs{}, nil + }, + &river.PeriodicJobOpts{RunOnStart: true}, + )) + } + + return periodicJobs, nil +} diff --git a/internal/temporal/client.go b/internal/temporal/client.go deleted file mode 100644 index 86292524..00000000 --- a/internal/temporal/client.go +++ /dev/null @@ -1,64 +0,0 @@ -package temporal - -import ( - "context" - "os" - "time" - - "github.com/rs/zerolog/log" - "google.golang.org/protobuf/types/known/durationpb" - - "go.temporal.io/api/namespace/v1" - "go.temporal.io/api/workflowservice/v1" - "go.temporal.io/sdk/client" -) - -var temporalClient *Temporal - -type Temporal struct { - Client client.Client -} - -func InitializeTemporalClient() { - // TODO: config env parsed - temporalUrl := os.Getenv("TEMPORAL_URL") - clientOptions := client.Options{ - HostPort: temporalUrl, - } - - c, err := client.Dial(clientOptions) - if err != nil { - log.Panic().Msgf("Unable to create client: %v", err) - } - - // update temporal default namespace retention - namespaceClient, err := client.NewNamespaceClient(clientOptions) - if err != nil { - log.Error().Msgf("Unable to create namespace client: %v", err) - } - - // 30 day ttl - retentionTtlTime := 30 * 24 * time.Hour - - retentionTtl := durationpb.Duration{ - Seconds: int64(retentionTtlTime.Seconds()), - } - - err = namespaceClient.Update(context.Background(), &workflowservice.UpdateNamespaceRequest{ - Namespace: "default", - Config: &namespace.NamespaceConfig{ - WorkflowExecutionRetentionTtl: &retentionTtl, - }, - }) - if err != nil { - log.Error().Msgf("Unable to update default namespace: %v", err) - } - - log.Info().Msgf("Connected to temporal at %s", clientOptions.HostPort) - - temporalClient = &Temporal{Client: c} -} - -func GetTemporalClient() *Temporal { - return temporalClient -} diff --git a/internal/temporal/workflows.go b/internal/temporal/workflows.go deleted file mode 100644 index 516c6875..00000000 --- a/internal/temporal/workflows.go +++ /dev/null @@ -1,193 +0,0 @@ -package temporal - -import ( - "context" - "encoding/base64" - "encoding/json" - "fmt" - - "github.com/google/uuid" - "github.com/rs/zerolog/log" - "github.com/zibbp/ganymede/ent" - entVod "github.com/zibbp/ganymede/ent/vod" - "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/dto" - "go.temporal.io/api/enums/v1" - "go.temporal.io/api/history/v1" - "go.temporal.io/api/workflow/v1" - "go.temporal.io/api/workflowservice/v1" - "go.temporal.io/sdk/client" -) - -type WorkflowHistory struct { - *history.HistoryEvent -} - -type WorkflowVideoIdResult struct { - VideoId string `json:"video_id"` - ExternalVideoId string `json:"external_video_id"` -} - -type WorkflowExecutionResponse struct { - Executions []*workflow.WorkflowExecutionInfo `json:"executions"` - NextPageToken string `json:"next_page_token"` -} - -func GetActiveWorkflows(ctx context.Context, inputPageToken []byte) (*WorkflowExecutionResponse, error) { - listRequest := &workflowservice.ListOpenWorkflowExecutionsRequest{ - MaximumPageSize: 30, - } - - if inputPageToken != nil { - listRequest.NextPageToken = inputPageToken - } - - w, err := temporalClient.Client.ListOpenWorkflow(ctx, listRequest) - if err != nil { - log.Error().Err(err).Msg("failed to list closed workflows") - return nil, nil - } - - var nextPageToken string - if w.NextPageToken != nil { - token := string(w.NextPageToken) - // base64 encode - nextPageToken = base64.StdEncoding.EncodeToString([]byte(token)) - } - - return &WorkflowExecutionResponse{ - Executions: w.Executions, - NextPageToken: nextPageToken, - }, nil -} - -func GetClosedWorkflows(ctx context.Context, inputPageToken []byte) (*WorkflowExecutionResponse, error) { - listRequest := &workflowservice.ListClosedWorkflowExecutionsRequest{ - MaximumPageSize: 30, - } - - if inputPageToken != nil { - listRequest.NextPageToken = inputPageToken - } - - w, err := temporalClient.Client.ListClosedWorkflow(ctx, listRequest) - if err != nil { - log.Error().Err(err).Msg("failed to list closed workflows") - return nil, nil - } - - var nextPageToken string - if w.NextPageToken != nil { - token := string(w.NextPageToken) - // base64 encode - nextPageToken = base64.StdEncoding.EncodeToString([]byte(token)) - } - - return &WorkflowExecutionResponse{ - Executions: w.Executions, - NextPageToken: nextPageToken, - }, nil -} - -func GetWorkflowById(ctx context.Context, workflowId string, runId string) (*workflow.WorkflowExecutionInfo, error) { - w, err := temporalClient.Client.DescribeWorkflowExecution(ctx, workflowId, runId) - if err != nil { - log.Error().Err(err).Msg("failed to describe workflow") - return nil, nil - } - - return w.WorkflowExecutionInfo, nil -} - -func GetWorkflowHistory(ctx context.Context, workflowId string, runId string) ([]*history.HistoryEvent, error) { - iterator := temporalClient.Client.GetWorkflowHistory(ctx, workflowId, runId, false, 1) - - var history []*history.HistoryEvent - for iterator.HasNext() { - event, err := iterator.Next() - if err != nil { - log.Error().Err(err).Msg("failed to get workflow history") - return nil, nil - } - - history = append(history, event) - } - - return history, nil -} - -func RestartArchiveWorkflow(ctx context.Context, videoId uuid.UUID, workflowName string) (string, error) { - // fetch items to create a dto.ArchiveVideoInput - var input dto.ArchiveVideoInput - - vod, err := database.DB().Client.Vod.Query().Where(entVod.ID(videoId)).WithChannel().WithQueue().Only(context.Background()) - if err != nil { - log.Error().Err(err).Msg("failed to fetch vod") - return "", nil - } - - // check if a live watch exists - liveWatch, err := vod.Edges.Channel.QueryLive().Only(context.Background()) - if err != nil { - if _, ok := err.(*ent.NotFoundError); ok { - log.Debug().Msg("no live watch found") - } else { - log.Error().Err(err).Msg("failed to fetch live watch") - return "", nil - } - } - - input.Vod = vod - input.Channel = vod.Edges.Channel - input.Queue = vod.Edges.Queue - input.VideoID = vod.ExtID - input.Type = string(vod.Type) - input.Platform = string(vod.Platform) - input.Resolution = vod.Resolution - input.RenderChat = input.Queue.RenderChat - input.DownloadChat = true - input.LiveWatchChannel = liveWatch - - workflowOptions := client.StartWorkflowOptions{ - TaskQueue: "archive", - } - - workflowRun, err := temporalClient.Client.ExecuteWorkflow(ctx, workflowOptions, workflowName, input) - if err != nil { - log.Error().Err(err).Msg("failed to start workflow") - return "", nil - } - - log.Info().Msgf("Started workflow %s", workflowRun.GetID()) - - return workflowRun.GetID(), nil -} - -func GetVideoIdFromWorkflow(ctx context.Context, workflowId string, runId string) (WorkflowVideoIdResult, error) { - var result WorkflowVideoIdResult - history, err := GetWorkflowHistory(ctx, workflowId, runId) - if err != nil { - return WorkflowVideoIdResult{}, err - } - - for _, event := range history { - if event.GetEventType() == enums.EVENT_TYPE_WORKFLOW_EXECUTION_STARTED { - attributes := event.GetWorkflowExecutionStartedEventAttributes() - if attributes != nil { - input := attributes.Input - if input != nil { - data := input.Payloads[0].GetData() - var input dto.ArchiveVideoInput - err := json.Unmarshal(data, &input) - if err != nil { - return WorkflowVideoIdResult{}, fmt.Errorf("failed to unmarshal input: %w", err) - } - result.VideoId = input.Vod.ID.String() - result.ExternalVideoId = input.Vod.ExtID - } - } - } - } - - return result, nil -} diff --git a/internal/transport/http/admin.go b/internal/transport/http/admin.go index 1549152c..aa1ea95f 100644 --- a/internal/transport/http/admin.go +++ b/internal/transport/http/admin.go @@ -1,6 +1,7 @@ package http import ( + "context" "net/http" "github.com/labstack/echo/v4" @@ -8,8 +9,8 @@ import ( ) type AdminService interface { - GetStats(c echo.Context) (admin.GetStatsResp, error) - GetInfo(c echo.Context) (admin.InfoResp, error) + GetStats(ctx context.Context) (admin.GetStatsResp, error) + GetInfo(ctx context.Context) (admin.InfoResp, error) } // GetStats godoc @@ -24,7 +25,7 @@ type AdminService interface { // @Router /admin/stats [get] // @Security ApiKeyCookieAuth func (h *Handler) GetStats(c echo.Context) error { - resp, err := h.Service.AdminService.GetStats(c) + resp, err := h.Service.AdminService.GetStats(c.Request().Context()) if err != nil { return err } @@ -43,7 +44,7 @@ func (h *Handler) GetStats(c echo.Context) error { // @Router /admin/info [get] // @Security ApiKeyCookieAuth func (h *Handler) GetInfo(c echo.Context) error { - resp, err := h.Service.AdminService.GetInfo(c) + resp, err := h.Service.AdminService.GetInfo(c.Request().Context()) if err != nil { return c.JSON(http.StatusInternalServerError, err.Error()) } diff --git a/internal/transport/http/admin_test.go b/internal/transport/http/admin_test.go new file mode 100644 index 00000000..d1eec33e --- /dev/null +++ b/internal/transport/http/admin_test.go @@ -0,0 +1,110 @@ +package http_test + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/labstack/echo/v4" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/zibbp/ganymede/internal/admin" + httpHandler "github.com/zibbp/ganymede/internal/transport/http" +) + +type MockAdminService struct { + mock.Mock +} + +func (m *MockAdminService) GetStats(ctx context.Context) (admin.GetStatsResp, error) { + args := m.Called(ctx) + return args.Get(0).(admin.GetStatsResp), args.Error(1) +} + +func (m *MockAdminService) GetInfo(ctx context.Context) (admin.InfoResp, error) { + args := m.Called(ctx) + return args.Get(0).(admin.InfoResp), args.Error(1) +} + +func setupAdminHandler() *httpHandler.Handler { + e := setupEcho() + mockAdminService := new(MockAdminService) + + services := httpHandler.Services{ + AdminService: mockAdminService, + } + + handler := &httpHandler.Handler{ + Server: e, + Service: services, + } + + return handler +} + +// TestGetStats is a test function for getting the ganymede stats. +func TestGetStats(t *testing.T) { + handler := setupAdminHandler() + e := handler.Server + mockService := handler.Service.AdminService.(*MockAdminService) + + expected := admin.GetStatsResp{ + VodCount: 0, + ChannelCount: 0, + } + + mockService.On("GetStats", mock.Anything).Return(expected, nil) + + req := httptest.NewRequest(http.MethodPost, "/admin/stats", nil) + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + rec := httptest.NewRecorder() + c := e.NewContext(req, rec) + + if assert.NoError(t, handler.GetStats(c)) { + assert.Equal(t, http.StatusOK, rec.Code) + var response admin.GetStatsResp + err := json.Unmarshal(rec.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, expected, response) + } + + mockService.AssertExpectations(t) +} + +// TestGetInfo is a test function for getting the ganymede info. +func TestGetInfo(t *testing.T) { + handler := setupAdminHandler() + e := handler.Server + mockService := handler.Service.AdminService.(*MockAdminService) + + expected := admin.InfoResp{ + CommitHash: "test", + BuildTime: "test", + Uptime: "test", + ProgramVersions: admin.ProgramVersions{ + FFmpeg: "test", + TwitchDownloader: "test", + ChatDownloader: "test", + Streamlink: "test", + }, + } + + mockService.On("GetInfo", mock.Anything).Return(expected, nil) + + req := httptest.NewRequest(http.MethodPost, "/admin/info", nil) + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + rec := httptest.NewRecorder() + c := e.NewContext(req, rec) + + if assert.NoError(t, handler.GetInfo(c)) { + assert.Equal(t, http.StatusOK, rec.Code) + var response admin.InfoResp + err := json.Unmarshal(rec.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, expected, response) + } + + mockService.AssertExpectations(t) +} diff --git a/internal/transport/http/archive.go b/internal/transport/http/archive.go index ef616a07..1475b541 100644 --- a/internal/transport/http/archive.go +++ b/internal/transport/http/archive.go @@ -1,32 +1,38 @@ package http import ( + "context" + "fmt" "net/http" "strconv" "time" + "github.com/google/uuid" "github.com/labstack/echo/v4" "github.com/zibbp/ganymede/ent" "github.com/zibbp/ganymede/internal/archive" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/utils" ) type ArchiveService interface { - ArchiveTwitchChannel(cName string) (*ent.Channel, error) - ArchiveTwitchVod(vID string, quality string, chat bool, renderChat bool) (*archive.TwitchVodResponse, error) + ArchiveChannel(ctx context.Context, channelName string) (*ent.Channel, error) + ArchiveVideo(ctx context.Context, input archive.ArchiveVideoInput) error + ArchiveLivestream(ctx context.Context, input archive.ArchiveVideoInput) error } type ArchiveChannelRequest struct { ChannelName string `json:"channel_name" validate:"required"` } -type ArchiveVodRequest struct { - VodID string `json:"vod_id" validate:"required"` - Quality utils.VodQuality `json:"quality" validate:"required,oneof=best source 720p60 480p30 360p30 160p30 480p 360p 160p audio"` - Chat bool `json:"chat"` - RenderChat bool `json:"render_chat"` +type ArchiveVideoRequest struct { + VideoId string `json:"video_id"` + ChannelId string `json:"channel_id"` + Quality utils.VodQuality `json:"quality" validate:"required,oneof=best source 720p60 480p30 360p30 160p30 480p 360p 160p audio"` + ArchiveChat bool `json:"archive_chat"` + RenderChat bool `json:"render_chat"` } -// ArchiveTwitchChannel godoc +// ArchiveChannel godoc // // @Summary Archive a twitch channel // @Description Archive a twitch channel (creates channel in database and download profile image) @@ -39,22 +45,22 @@ type ArchiveVodRequest struct { // @Failure 500 {object} utils.ErrorResponse // @Router /archive/channel [post] // @Security ApiKeyCookieAuth -func (h *Handler) ArchiveTwitchChannel(c echo.Context) error { - acr := new(ArchiveChannelRequest) - if err := c.Bind(acr); err != nil { +func (h *Handler) ArchiveChannel(c echo.Context) error { + body := new(ArchiveChannelRequest) + if err := c.Bind(body); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - if err := c.Validate(acr); err != nil { + if err := c.Validate(body); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - channel, err := h.Service.ArchiveService.ArchiveTwitchChannel(acr.ChannelName) + channel, err := h.Service.ArchiveService.ArchiveChannel(c.Request().Context(), body.ChannelName) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } return c.JSON(http.StatusOK, channel) } -// ArchiveTwitchVod godoc +// ArchiveVideo godoc // // @Summary Archive a twitch vod // @Description Archive a twitch vod @@ -67,19 +73,52 @@ func (h *Handler) ArchiveTwitchChannel(c echo.Context) error { // @Failure 500 {object} utils.ErrorResponse // @Router /archive/vod [post] // @Security ApiKeyCookieAuth -func (h *Handler) ArchiveTwitchVod(c echo.Context) error { - avr := new(ArchiveVodRequest) - if err := c.Bind(avr); err != nil { +func (h *Handler) ArchiveVideo(c echo.Context) error { + body := new(ArchiveVideoRequest) + if err := c.Bind(body); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - if err := c.Validate(avr); err != nil { + if err := c.Validate(body); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - vod, err := h.Service.ArchiveService.ArchiveTwitchVod(avr.VodID, string(avr.Quality), avr.Chat, avr.RenderChat) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + + if body.VideoId == "" && body.ChannelId == "" { + return echo.NewHTTPError(http.StatusBadRequest, "either channel_id or video_id must be set") + } + + if body.VideoId != "" && body.ChannelId != "" { + return echo.NewHTTPError(http.StatusBadRequest, "either channel_id or video_id must be set") + } + + if body.ChannelId != "" { + // validate channel id + parsedChannelId, err := uuid.Parse(body.ChannelId) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + + err = h.Service.ArchiveService.ArchiveLivestream(c.Request().Context(), archive.ArchiveVideoInput{ + ChannelId: parsedChannelId, + Quality: body.Quality, + ArchiveChat: body.ArchiveChat, + RenderChat: body.RenderChat, + }) + if err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + } else if body.VideoId != "" { + err := h.Service.ArchiveService.ArchiveVideo(c.Request().Context(), archive.ArchiveVideoInput{ + VideoId: body.VideoId, + Quality: body.Quality, + ArchiveChat: body.ArchiveChat, + RenderChat: body.RenderChat, + }) + if err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } } - return c.JSON(http.StatusOK, vod) + + return c.JSON(http.StatusOK, nil) } // debug route to test converting chat files @@ -108,7 +147,10 @@ func (h *Handler) ConvertTwitchChat(c echo.Context) error { t := time.Unix(seconds, nanoseconds) - err = utils.ConvertTwitchLiveChatToTDLChat(body.LiveChatPath, body.ChannelName, body.VideoID, body.VideoExternalID, body.ChannelID, t, body.PreviousVideoID) + envConfig := config.GetEnvConfig() + outPath := fmt.Sprintf("%s/%s-chat-convert.json", envConfig.TempDir, body.VideoID) + + err = utils.ConvertTwitchLiveChatToTDLChat(body.LiveChatPath, outPath, body.ChannelName, body.VideoID, body.VideoExternalID, body.ChannelID, t, body.PreviousVideoID) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } diff --git a/internal/transport/http/archive_test.go b/internal/transport/http/archive_test.go index 473e64ef..a2b22bac 100644 --- a/internal/transport/http/archive_test.go +++ b/internal/transport/http/archive_test.go @@ -1,95 +1,164 @@ package http_test import ( + "bytes" + "context" "encoding/json" "net/http" "net/http/httptest" - "os" - "strings" "testing" "github.com/go-playground/validator/v10" + "github.com/google/uuid" "github.com/labstack/echo/v4" - _ "github.com/mattn/go-sqlite3" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "github.com/zibbp/ganymede/ent" - "github.com/zibbp/ganymede/ent/enttest" "github.com/zibbp/ganymede/internal/archive" - "github.com/zibbp/ganymede/internal/channel" - "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/queue" httpHandler "github.com/zibbp/ganymede/internal/transport/http" - "github.com/zibbp/ganymede/internal/twitch" + "github.com/zibbp/ganymede/internal/utils" - "github.com/zibbp/ganymede/internal/vod" ) -var ( - // The following are used for testing. - testArchiveChannelJson = `{ - "channel_name": "test" - }` -) +type MockArchiveService struct { + mock.Mock +} + +func (m *MockArchiveService) ArchiveChannel(ctx context.Context, channelName string) (*ent.Channel, error) { + args := m.Called(ctx, channelName) + return args.Get(0).(*ent.Channel), args.Error(1) +} -type ServiceFuncMock struct{} +func (m *MockArchiveService) ArchiveVideo(ctx context.Context, input archive.ArchiveVideoInput) error { + args := m.Called(ctx, input) + return args.Error(0) +} + +func (m *MockArchiveService) ArchiveLivestream(ctx context.Context, input archive.ArchiveVideoInput) error { + args := m.Called(ctx, input) + return args.Error(0) +} -func (m ServiceFuncMock) GetUserByLogin(login string) (twitch.Channel, error) { - return twitch.Channel{ - ID: "123", - Login: "test", - DisplayName: "test", - ProfileImageURL: "https://raw.githubusercontent.com/Zibbp/ganymede/main/.github/ganymede-logo.png", - }, nil +func setupEcho() *echo.Echo { + e := echo.New() + e.Validator = &utils.CustomValidator{Validator: validator.New()} + return e } -// * TestArchiveChannel tests the archiving of a twitch channel functionality. -// Test fetches a mock channel, creates a db entry, and downloads the channel image. -func TestArchiveTwitchChannel(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), +func setupArchiveHandler() *httpHandler.Handler { + e := setupEcho() + mockArchiveService := new(MockArchiveService) + + services := httpHandler.Services{ + ArchiveService: mockArchiveService, + } + + handler := &httpHandler.Handler{ + Server: e, + Service: services, } - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() + return handler +} + +// TestArchiveChannel is a test function for archiving a channel. +// +// It tests the functionality of archiving a channel by sending a POST request with the channel name and verifying the response. +func TestArchiveChannel(t *testing.T) { + handler := setupArchiveHandler() + e := handler.Server + mockService := handler.Service.ArchiveService.(*MockArchiveService) - twitch.API = ServiceFuncMock{} + channelName := "test_channel" + mockChannel := &ent.Channel{Name: channelName} - twitchService := twitch.NewService() - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - queueService := queue.NewService(&database.Database{Client: client}, vodService, channelService) + mockService.On("ArchiveChannel", mock.Anything, channelName).Return(mockChannel, nil) - archiveService := archive.NewService(&database.Database{Client: client}, twitchService, channelService, vodService, queueService) + reqBody, _ := json.Marshal(httpHandler.ArchiveChannelRequest{ChannelName: channelName}) + req := httptest.NewRequest(http.MethodPost, "/archive/channel", bytes.NewBuffer(reqBody)) + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + rec := httptest.NewRecorder() + c := e.NewContext(req, rec) - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - ArchiveService: archiveService, - }, + if assert.NoError(t, handler.ArchiveChannel(c)) { + assert.Equal(t, http.StatusOK, rec.Code) + var responseChannel ent.Channel + err := json.Unmarshal(rec.Body.Bytes(), &responseChannel) + assert.NoError(t, err) + assert.Equal(t, mockChannel.Name, responseChannel.Name) } - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + mockService.AssertExpectations(t) +} + +func TestArchiveVideo(t *testing.T) { + handler := setupArchiveHandler() + e := handler.Server + mockService := handler.Service.ArchiveService.(*MockArchiveService) + + // test archive video + archiveVideoBody := httpHandler.ArchiveVideoRequest{ + VideoId: "123456789", + Quality: "best", + ArchiveChat: true, + RenderChat: false, + } - req := httptest.NewRequest(http.MethodPost, "/api/v1/archive/channel", strings.NewReader(testArchiveChannelJson)) + expectedInput := archive.ArchiveVideoInput{ + VideoId: "123456789", + Quality: "best", + ArchiveChat: true, + RenderChat: false, + } + + mockService.On("ArchiveVideo", mock.Anything, expectedInput).Return(nil) + + reqBody, _ := json.Marshal(archiveVideoBody) + req := httptest.NewRequest(http.MethodPost, "/archive/video", bytes.NewBuffer(reqBody)) req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) + c := e.NewContext(req, rec) - if assert.NoError(t, h.ArchiveTwitchChannel(c)) { + if assert.NoError(t, handler.ArchiveVideo(c)) { assert.Equal(t, http.StatusOK, rec.Code) + } - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test", response["name"]) + mockService.AssertExpectations(t) +} - // Check channel folder was created - _, err = os.Stat("/vods/test") - assert.NoError(t, err) +func TestArchiveLivestream(t *testing.T) { + handler := setupArchiveHandler() + e := handler.Server + mockService := handler.Service.ArchiveService.(*MockArchiveService) - // Check channel image was downloaded - _, err = os.Stat("/vods/test/profile.png") - assert.NoError(t, err) + channelId := uuid.New() + + // test archive livestream + archiveLivestreamBody := httpHandler.ArchiveVideoRequest{ + ChannelId: channelId.String(), + Quality: "best", + ArchiveChat: true, + RenderChat: false, } + + expectedInput := archive.ArchiveVideoInput{ + ChannelId: channelId, + Quality: "best", + ArchiveChat: true, + RenderChat: false, + } + + mockService.On("ArchiveLivestream", mock.Anything, expectedInput).Return(nil) + + reqBody, _ := json.Marshal(archiveLivestreamBody) + req := httptest.NewRequest(http.MethodPost, "/archive/livestream", bytes.NewBuffer(reqBody)) + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + rec := httptest.NewRecorder() + c := e.NewContext(req, rec) + + if assert.NoError(t, handler.ArchiveVideo(c)) { + assert.Equal(t, http.StatusOK, rec.Code) + } + + mockService.AssertExpectations(t) } diff --git a/internal/transport/http/auth.go b/internal/transport/http/auth.go index a958bb01..e71dcd2d 100644 --- a/internal/transport/http/auth.go +++ b/internal/transport/http/auth.go @@ -1,18 +1,18 @@ package http import ( + "context" "net/http" - "os" "github.com/labstack/echo/v4" - "github.com/spf13/viper" "github.com/zibbp/ganymede/ent" "github.com/zibbp/ganymede/internal/auth" + "github.com/zibbp/ganymede/internal/config" "github.com/zibbp/ganymede/internal/user" ) type AuthService interface { - Register(c echo.Context, userDto user.User) (*ent.User, error) + Register(ctx context.Context, userDto user.User) (*ent.User, error) Login(c echo.Context, userDto user.User) (*ent.User, error) Refresh(c echo.Context, refreshToken string) error Me(c *auth.CustomContext) (*ent.User, error) @@ -53,10 +53,6 @@ type ChangePasswordRequest struct { // @Failure 500 {object} utils.ErrorResponse // @Router /auth/register [post] func (h *Handler) Register(c echo.Context) error { - // Check if registration is enabled - if !viper.Get("registration_enabled").(bool) { - return echo.NewHTTPError(http.StatusForbidden, "registration is disabled") - } rr := new(RegisterRequest) if err := c.Bind(rr); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) @@ -70,7 +66,7 @@ func (h *Handler) Register(c echo.Context) error { Password: rr.Password, } - u, err := h.Service.AuthService.Register(c, userDto) + u, err := h.Service.AuthService.Register(c.Request().Context(), userDto) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } @@ -124,8 +120,8 @@ func (h *Handler) Login(c echo.Context) error { // @Failure 500 {object} utils.ErrorResponse // @Router /auth/oauth/login [get] func (h *Handler) OAuthLogin(c echo.Context) error { - oAuthEnabled := viper.GetBool("oauth_enabled") - if !oAuthEnabled { + env := config.GetEnvConfig() + if !env.OAuthEnabled { return echo.NewHTTPError(http.StatusForbidden, "OAuth is disabled") } // Redirect to OAuth provider @@ -240,11 +236,12 @@ func (h *Handler) ChangePassword(c echo.Context) error { // @Failure 500 {object} utils.ErrorResponse // @Router /auth/oauth/callback [get] func (h *Handler) OAuthCallback(c echo.Context) error { + env := config.GetEnvApplicationConfig() err := h.Service.AuthService.OAuthCallback(c) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } - return c.Redirect(http.StatusFound, os.Getenv("FRONTEND_HOST")) + return c.Redirect(http.StatusFound, env.FrontendHost) } // OAuthTokenRefresh godoc @@ -287,10 +284,10 @@ func (h *Handler) OAuthTokenRefresh(c echo.Context) error { // @Failure 500 {object} utils.ErrorResponse // @Router /auth/oauth/logout [get] func (h *Handler) OAuthLogout(c echo.Context) error { - + env := config.GetEnvApplicationConfig() err := h.Service.AuthService.OAuthLogout(c) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } - return c.Redirect(http.StatusFound, os.Getenv("FRONTEND_HOST")) + return c.Redirect(http.StatusFound, env.FrontendHost) } diff --git a/internal/transport/http/auth_test.go b/internal/transport/http/auth_test.go index 60e9305f..bf6ac96d 100644 --- a/internal/transport/http/auth_test.go +++ b/internal/transport/http/auth_test.go @@ -1,180 +1,164 @@ package http_test import ( + "bytes" + "context" "encoding/json" "net/http" "net/http/httptest" - "os" - "strings" "testing" - "github.com/go-playground/validator/v10" "github.com/labstack/echo/v4" - _ "github.com/mattn/go-sqlite3" - "github.com/spf13/viper" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "github.com/zibbp/ganymede/ent" - "github.com/zibbp/ganymede/ent/enttest" "github.com/zibbp/ganymede/internal/auth" - "github.com/zibbp/ganymede/internal/database" - httpTransport "github.com/zibbp/ganymede/internal/transport/http" - "github.com/zibbp/ganymede/internal/utils" + httpHandler "github.com/zibbp/ganymede/internal/transport/http" + "github.com/zibbp/ganymede/internal/user" ) -var ( - // The following are used for testing. - testUserJson = `{ - "username": "test", - "password": "test1234" - }` -) +type MockAuthService struct { + mock.Mock +} -// * TestRegister tests the Register function. -// Test registers a new user. -func TestRegister(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } +func (m *MockAuthService) Register(ctx context.Context, userDto user.User) (*ent.User, error) { + args := m.Called(ctx, userDto) + return args.Get(0).(*ent.User), args.Error(1) +} - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() +func (m *MockAuthService) Login(c echo.Context, userDto user.User) (*ent.User, error) { + args := m.Called(c, userDto) + return args.Get(0).(*ent.User), args.Error(1) +} - viper.Set("registration_enabled", true) +func (m *MockAuthService) Refresh(c echo.Context, refreshToken string) error { + args := m.Called(c, refreshToken) + return args.Error(0) +} - h := &httpTransport.Handler{ - Server: echo.New(), - Service: httpTransport.Services{ - AuthService: auth.NewService(&database.Database{Client: client}), - }, - } +func (m *MockAuthService) Me(c *auth.CustomContext) (*ent.User, error) { + args := m.Called(c) + return args.Get(0).(*ent.User), args.Error(1) +} - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} +func (m *MockAuthService) ChangePassword(c *auth.CustomContext, passwordDto auth.ChangePassword) error { + args := m.Called(c, passwordDto) + return args.Error(0) +} - req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/register", strings.NewReader(testUserJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) +func (m *MockAuthService) OAuthRedirect(c echo.Context) error { + args := m.Called(c) + return args.Error(0) +} - if assert.NoError(t, h.Register(c)) { - assert.Equal(t, http.StatusOK, rec.Code) +func (m *MockAuthService) OAuthCallback(c echo.Context) error { + args := m.Called(c) + return args.Error(0) +} - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test", response["username"]) - } +func (m *MockAuthService) OAuthTokenRefresh(c echo.Context, refreshToken string) error { + args := m.Called(c, refreshToken) + return args.Error(0) } -// * TestLogin tests the Login function. -// Test logs in a user. -func TestLogin(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), +func (m *MockAuthService) OAuthLogout(c echo.Context) error { + args := m.Called(c) + return args.Error(0) +} + +func setupAuthHandler() *httpHandler.Handler { + e := setupEcho() + mockAuthService := new(MockAuthService) + + services := httpHandler.Services{ + AuthService: mockAuthService, } - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() + handler := &httpHandler.Handler{ + Server: e, + Service: services, + } - viper.Set("registration_enabled", true) - os.Setenv("JWT_SECRET", "test") - os.Setenv("JWT_REFRESH_SECRET", "test") + return handler +} - h := &httpTransport.Handler{ - Server: echo.New(), - Service: httpTransport.Services{ - AuthService: auth.NewService(&database.Database{Client: client}), - }, +func TestRegister(t *testing.T) { + handler := setupAuthHandler() + e := handler.Server + mockService := handler.Service.AuthService.(*MockAuthService) + + // test register + registerBody := httpHandler.RegisterRequest{ + Username: "username", + Password: "password", } - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + expectedInput := user.User{ + Username: "username", + Password: "password", + } - // Register a new user - req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/register", strings.NewReader(testUserJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - err := h.Register(c) - assert.NoError(t, err) + expectedOutput := &ent.User{ + Username: "username", + } + + mockService.On("Register", mock.Anything, expectedInput).Return(expectedOutput, nil) - // Login the user - req = httptest.NewRequest(http.MethodPost, "/api/v1/auth/login", strings.NewReader(testUserJson)) + b, err := json.Marshal(registerBody) + if err != nil { + t.Fatal(err) + } + req := httptest.NewRequest(http.MethodPost, "/auth/register", bytes.NewBuffer(b)) req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec = httptest.NewRecorder() - c = h.Server.NewContext(req, rec) + rec := httptest.NewRecorder() + c := e.NewContext(req, rec) - if assert.NoError(t, h.Login(c)) { + if assert.NoError(t, handler.Register(c)) { assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} + var response *ent.User err := json.Unmarshal(rec.Body.Bytes(), &response) assert.NoError(t, err) - assert.Equal(t, "test", response["username"]) + assert.Equal(t, expectedOutput, response) } } -// * TestRefresh tests the Refresh function. -// Test refreshes a user's access token. -func TestRefresh(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), +// TestLogin is a test function for login. +func TestLogin(t *testing.T) { + handler := setupAuthHandler() + e := handler.Server + mockService := handler.Service.AuthService.(*MockAuthService) + + // test login + loginBody := httpHandler.LoginRequest{ + Username: "username", + Password: "password", } - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - viper.Set("registration_enabled", true) - os.Setenv("JWT_SECRET", "test") - os.Setenv("JWT_REFRESH", "test") + expectedInput := user.User{ + Username: "username", + Password: "password", + } - h := &httpTransport.Handler{ - Server: echo.New(), - Service: httpTransport.Services{ - AuthService: auth.NewService(&database.Database{Client: client}), - }, + expectedOutput := &ent.User{ + Username: "username", } - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + mockService.On("Login", mock.Anything, expectedInput).Return(expectedOutput, nil) - // Register a new user - req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/register", strings.NewReader(testUserJson)) + b, err := json.Marshal(loginBody) + if err != nil { + t.Fatal(err) + } + req := httptest.NewRequest(http.MethodPost, "/auth/login", bytes.NewBuffer(b)) req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - err := h.Register(c) - assert.NoError(t, err) - - // Login the user - req = httptest.NewRequest(http.MethodPost, "/api/v1/auth/login", strings.NewReader(testUserJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec = httptest.NewRecorder() - c = h.Server.NewContext(req, rec) - err = h.Login(c) - assert.NoError(t, err) - - // Refresh the user's access token - - // Get the refresh token from the response cookie - cookies := rec.Result().Cookies() - var refreshToken string - for _, cookie := range cookies { - if cookie.Name == "refresh-token" { - refreshToken = cookie.Value - } - } + c := e.NewContext(req, rec) - // Create a new request with the refresh token - req = httptest.NewRequest(http.MethodPost, "/api/v1/auth/refresh", nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - req.AddCookie(&http.Cookie{ - Name: "refresh-token", - Value: refreshToken, - }) - rec = httptest.NewRecorder() - c = h.Server.NewContext(req, rec) - - if assert.NoError(t, h.Refresh(c)) { + if assert.NoError(t, handler.Login(c)) { assert.Equal(t, http.StatusOK, rec.Code) + var response *ent.User + err := json.Unmarshal(rec.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, expectedOutput, response) } } diff --git a/internal/transport/http/blocked.go b/internal/transport/http/blocked.go new file mode 100644 index 00000000..866fc82c --- /dev/null +++ b/internal/transport/http/blocked.go @@ -0,0 +1,73 @@ +package http + +import ( + "context" + "net/http" + + "github.com/labstack/echo/v4" + "github.com/zibbp/ganymede/ent" +) + +type BlockedVideoService interface { + IsVideoBlocked(ctx context.Context, id string) (bool, error) + CreateBlockedVideo(ctx context.Context, id string) error + DeleteBlockedVideo(ctx context.Context, id string) error + GetBlockedVideos(ctx context.Context) ([]*ent.BlockedVideos, error) +} + +type ID struct { + ID string `json:"id" validate:"required,alphanum"` +} + +func (h *Handler) IsVideoBlocked(c echo.Context) error { + id := c.Param("id") + + err := h.Server.Validator.Validate(ID{ID: id}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + + blocked, err := h.Service.BlockedVideoService.IsVideoBlocked(c.Request().Context(), id) + if err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, blocked) +} + +func (h *Handler) CreateBlockedVideo(c echo.Context) error { + id := c.Param("id") + + err := h.Server.Validator.Validate(ID{ID: id}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + + err = h.Service.BlockedVideoService.CreateBlockedVideo(c.Request().Context(), id) + if err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, nil) +} + +func (h *Handler) DeleteBlockedVideo(c echo.Context) error { + id := c.Param("id") + + err := h.Server.Validator.Validate(ID{ID: id}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + + err = h.Service.BlockedVideoService.DeleteBlockedVideo(c.Request().Context(), id) + if err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, nil) +} + +func (h *Handler) GetBlockedVideos(c echo.Context) error { + videos, err := h.Service.BlockedVideoService.GetBlockedVideos(c.Request().Context()) + if err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, videos) +} diff --git a/internal/transport/http/blocked_test.go b/internal/transport/http/blocked_test.go new file mode 100644 index 00000000..6e0a8da8 --- /dev/null +++ b/internal/transport/http/blocked_test.go @@ -0,0 +1,133 @@ +package http_test + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + + "github.com/labstack/echo/v4" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/zibbp/ganymede/ent" + httpHandler "github.com/zibbp/ganymede/internal/transport/http" +) + +type MockBlockedVideoService struct { + mock.Mock +} + +func (m *MockBlockedVideoService) IsVideoBlocked(ctx context.Context, id string) (bool, error) { + args := m.Called(ctx, id) + return args.Get(0).(bool), args.Error(1) +} + +func (m *MockBlockedVideoService) CreateBlockedVideo(ctx context.Context, id string) error { + args := m.Called(ctx, id) + return args.Error(0) +} + +func (m *MockBlockedVideoService) DeleteBlockedVideo(ctx context.Context, id string) error { + args := m.Called(ctx, id) + return args.Error(0) +} + +func (m *MockBlockedVideoService) GetBlockedVideos(ctx context.Context) ([]*ent.BlockedVideos, error) { + args := m.Called(ctx) + return args.Get(0).([]*ent.BlockedVideos), args.Error(1) +} + +func setupBlockedVideoHandler() *httpHandler.Handler { + e := setupEcho() + + MockBlockedVideoService := new(MockBlockedVideoService) + + services := httpHandler.Services{ + BlockedVideoService: MockBlockedVideoService, + } + + handler := &httpHandler.Handler{ + Server: e, + Service: services, + } + + return handler +} + +func TestIsVideoBlocked(t *testing.T) { + handler := setupBlockedVideoHandler() + e := handler.Server + mockService := handler.Service.BlockedVideoService.(*MockBlockedVideoService) + + mockService.On("IsVideoBlocked", mock.Anything, mock.Anything).Return(true, nil) + + req := httptest.NewRequest(http.MethodGet, "/blocked-video/123", nil) + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + rec := httptest.NewRecorder() + c := e.NewContext(req, rec) + c.SetParamNames("id") + c.SetParamValues("123") + + if assert.NoError(t, handler.IsVideoBlocked(c)) { + assert.Equal(t, http.StatusOK, rec.Code) + mockService.AssertExpectations(t) + } +} + +func TestCreateBlockedVideo(t *testing.T) { + handler := setupBlockedVideoHandler() + e := handler.Server + mockService := handler.Service.BlockedVideoService.(*MockBlockedVideoService) + + mockService.On("CreateBlockedVideo", mock.Anything, mock.Anything).Return(nil) + + req := httptest.NewRequest(http.MethodPost, "/blocked-video/123", nil) + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + rec := httptest.NewRecorder() + c := e.NewContext(req, rec) + c.SetParamNames("id") + c.SetParamValues("123") + + if assert.NoError(t, handler.CreateBlockedVideo(c)) { + assert.Equal(t, http.StatusOK, rec.Code) + mockService.AssertExpectations(t) + } +} + +func TestDeleteBlockedVideo(t *testing.T) { + handler := setupBlockedVideoHandler() + e := handler.Server + mockService := handler.Service.BlockedVideoService.(*MockBlockedVideoService) + + mockService.On("DeleteBlockedVideo", mock.Anything, mock.Anything).Return(nil) + + req := httptest.NewRequest(http.MethodDelete, "/blocked-video/123", nil) + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + rec := httptest.NewRecorder() + c := e.NewContext(req, rec) + c.SetParamNames("id") + c.SetParamValues("123") + + if assert.NoError(t, handler.DeleteBlockedVideo(c)) { + assert.Equal(t, http.StatusOK, rec.Code) + mockService.AssertExpectations(t) + } +} + +func TestGetBlockedVideos(t *testing.T) { + handler := setupBlockedVideoHandler() + e := handler.Server + mockService := handler.Service.BlockedVideoService.(*MockBlockedVideoService) + + mockService.On("GetBlockedVideos", mock.Anything).Return([]*ent.BlockedVideos{}, nil) + + req := httptest.NewRequest(http.MethodGet, "/blocked", nil) + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + rec := httptest.NewRecorder() + c := e.NewContext(req, rec) + + if assert.NoError(t, handler.GetBlockedVideos(c)) { + assert.Equal(t, http.StatusOK, rec.Code) + mockService.AssertExpectations(t) + } +} diff --git a/internal/transport/http/category.go b/internal/transport/http/category.go new file mode 100644 index 00000000..c1f17625 --- /dev/null +++ b/internal/transport/http/category.go @@ -0,0 +1,21 @@ +package http + +import ( + "context" + "net/http" + + "github.com/labstack/echo/v4" + "github.com/zibbp/ganymede/ent" +) + +type CategoryService interface { + GetCategories(ctx context.Context) ([]*ent.TwitchCategory, error) +} + +func (h *Handler) GetCategories(c echo.Context) error { + categories, err := h.Service.CategoryService.GetCategories(c.Request().Context()) + if err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, categories) +} diff --git a/internal/transport/http/channel.go b/internal/transport/http/channel.go index b5b070da..4b15fcc4 100644 --- a/internal/transport/http/channel.go +++ b/internal/transport/http/channel.go @@ -1,6 +1,7 @@ package http import ( + "context" "math/rand" "net/http" "strconv" @@ -18,7 +19,7 @@ type ChannelService interface { GetChannelByName(channelName string) (*ent.Channel, error) DeleteChannel(channelID uuid.UUID) error UpdateChannel(channelID uuid.UUID, channelDto channel.Channel) (*ent.Channel, error) - UpdateChannelImage(c echo.Context, channelID uuid.UUID) error + UpdateChannelImage(ctx context.Context, channelID uuid.UUID) error } type CreateChannelRequest struct { @@ -230,7 +231,7 @@ func (h *Handler) UpdateChannelImage(c echo.Context) error { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - err = h.Service.ChannelService.UpdateChannelImage(c, cUUID) + err = h.Service.ChannelService.UpdateChannelImage(c.Request().Context(), cUUID) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } diff --git a/internal/transport/http/channel_test.go b/internal/transport/http/channel_test.go index f385c386..38833f45 100644 --- a/internal/transport/http/channel_test.go +++ b/internal/transport/http/channel_test.go @@ -1,315 +1,315 @@ package http_test -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "strings" - "testing" - - "github.com/go-playground/validator/v10" - "github.com/labstack/echo/v4" - _ "github.com/mattn/go-sqlite3" - "github.com/stretchr/testify/assert" - "github.com/zibbp/ganymede/ent" - entChannel "github.com/zibbp/ganymede/ent/channel" - "github.com/zibbp/ganymede/ent/enttest" - "github.com/zibbp/ganymede/internal/channel" - "github.com/zibbp/ganymede/internal/database" - httpHandler "github.com/zibbp/ganymede/internal/transport/http" - "github.com/zibbp/ganymede/internal/utils" -) - -var ( - channelJSON = `{ - "name": "test_channel", - "display_name": "Test Channel", - "image_path": "/vods/test_channel/test_channel.jpg" - }` - invalidChannelJSON = `{ - "name": "t", - "display_name": "t", - "image_path": "t" - }` -) - -// * TestCreateChannel tests the CreateChannel function -// Test creates a new channel and checks if the response is correct -func TestCreateChannel(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - ChannelService: channel.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - req := httptest.NewRequest(http.MethodPost, "/api/v1/channels", strings.NewReader(channelJSON)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.CreateChannel(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test_channel", response["name"]) - } -} - -// * TestCreateChannelInvalid tests the CreateChannel function -// Test creates a new channel with invalid data and checks if the response is correct -func TestCreateInvalidChannel(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - ChannelService: channel.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - req := httptest.NewRequest(http.MethodPost, "/api/v1/channels", strings.NewReader(invalidChannelJSON)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - // Response should be 400, pass the test if it is - if assert.Error(t, h.CreateChannel(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - } -} - -// * TestGetChannels tests the GetChannel function -// Test creates a new channel and checks if the response contains 1 channel -func TestGetChannels(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - ChannelService: channel.NewService(&database.Database{Client: client}), - }, - } - - // Create a channel - client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - req := httptest.NewRequest(http.MethodGet, "/api/v1/channel", nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.GetChannels(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response []map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, 1, len(response)) - } -} - -// * TestGetChannel tests the GetChannel function -// Test creates a new channel and checks if the response contains the correct channel -func TestGetChannel(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - ChannelService: channel.NewService(&database.Database{Client: client}), - }, - } - - // Create a channel - testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/channel/%s", testChannel.ID.String()), nil) - - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - // Set path parameters - c.SetParamNames("id") - c.SetParamValues(testChannel.ID.String()) - - if assert.NoError(t, h.GetChannel(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test_channel", response["name"]) - } -} - -// * TestDeleteChannel tests the DeleteChannel function -// Test creates a new channel and deletes it and checks if the response is correct -func TestDeleteChannel(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - ChannelService: channel.NewService(&database.Database{Client: client}), - }, - } - - // Create a channel - testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/channel/%s", testChannel.ID.String()), nil) - - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - // Set path parameters - c.SetParamNames("id") - c.SetParamValues(testChannel.ID.String()) - - if assert.NoError(t, h.DeleteChannel(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - } - - // Check if channel is deleted - channel, err := client.Channel.Query().Where(entChannel.ID(testChannel.ID)).Only(context.Background()) - assert.Error(t, err) - assert.Nil(t, channel) -} - -// * TestUpdateChannel tests the UpdateChannel function -// Test creates a new channel and updates it and checks if the response is correct -func TestUpdateChannel(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - ChannelService: channel.NewService(&database.Database{Client: client}), - }, - } - - // Create a channel - testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) - - // Updated channel - updatedJson := `{ - "name": "updated", - "display_name": "updated", - "image_path": "/vods/updated/updated.jpg" - }` - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/channel/%s", testChannel.ID.String()), strings.NewReader(updatedJson)) - - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - // Set path parameters - c.SetParamNames("id") - c.SetParamValues(testChannel.ID.String()) - - if assert.NoError(t, h.UpdateChannel(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "updated", response["name"]) - assert.Equal(t, "updated", response["display_name"]) - assert.Equal(t, "/vods/updated/updated.jpg", response["image_path"]) - } -} - -// * TestGetChannelByName tests the GetChannelByName function -// Test creates a new channel and checks if the response contains the correct channel -func TestGetChannelByName(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - ChannelService: channel.NewService(&database.Database{Client: client}), - }, - } - - // Create a channel - testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/channel/name/%s", testChannel.Name), nil) - - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - // Set path parameters - c.SetParamNames("name") - c.SetParamValues(testChannel.Name) - - if assert.NoError(t, h.GetChannelByName(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test_channel", response["name"]) - } -} +// import ( +// "context" +// "encoding/json" +// "fmt" +// "net/http" +// "net/http/httptest" +// "strings" +// "testing" + +// "github.com/go-playground/validator/v10" +// "github.com/labstack/echo/v4" +// _ "github.com/mattn/go-sqlite3" +// "github.com/stretchr/testify/assert" +// "github.com/zibbp/ganymede/ent" +// entChannel "github.com/zibbp/ganymede/ent/channel" +// "github.com/zibbp/ganymede/ent/enttest" +// "github.com/zibbp/ganymede/internal/channel" +// "github.com/zibbp/ganymede/internal/database" +// httpHandler "github.com/zibbp/ganymede/internal/transport/http" +// "github.com/zibbp/ganymede/internal/utils" +// ) + +// var ( +// channelJSON = `{ +// "name": "test_channel", +// "display_name": "Test Channel", +// "image_path": "/vods/test_channel/test_channel.jpg" +// }` +// invalidChannelJSON = `{ +// "name": "t", +// "display_name": "t", +// "image_path": "t" +// }` +// ) + +// // * TestCreateChannel tests the CreateChannel function +// // Test creates a new channel and checks if the response is correct +// func TestCreateChannel(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// ChannelService: channel.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// req := httptest.NewRequest(http.MethodPost, "/api/v1/channels", strings.NewReader(channelJSON)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.CreateChannel(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "test_channel", response["name"]) +// } +// } + +// // * TestCreateChannelInvalid tests the CreateChannel function +// // Test creates a new channel with invalid data and checks if the response is correct +// func TestCreateInvalidChannel(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// ChannelService: channel.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// req := httptest.NewRequest(http.MethodPost, "/api/v1/channels", strings.NewReader(invalidChannelJSON)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// // Response should be 400, pass the test if it is +// if assert.Error(t, h.CreateChannel(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) +// } +// } + +// // * TestGetChannels tests the GetChannel function +// // Test creates a new channel and checks if the response contains 1 channel +// func TestGetChannels(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// ChannelService: channel.NewService(&database.Database{Client: client}), +// }, +// } + +// // Create a channel +// client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// req := httptest.NewRequest(http.MethodGet, "/api/v1/channel", nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.GetChannels(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response []map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, 1, len(response)) +// } +// } + +// // * TestGetChannel tests the GetChannel function +// // Test creates a new channel and checks if the response contains the correct channel +// func TestGetChannel(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// ChannelService: channel.NewService(&database.Database{Client: client}), +// }, +// } + +// // Create a channel +// testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} +// req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/channel/%s", testChannel.ID.String()), nil) + +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// // Set path parameters +// c.SetParamNames("id") +// c.SetParamValues(testChannel.ID.String()) + +// if assert.NoError(t, h.GetChannel(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "test_channel", response["name"]) +// } +// } + +// // * TestDeleteChannel tests the DeleteChannel function +// // Test creates a new channel and deletes it and checks if the response is correct +// func TestDeleteChannel(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// ChannelService: channel.NewService(&database.Database{Client: client}), +// }, +// } + +// // Create a channel +// testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} +// req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/channel/%s", testChannel.ID.String()), nil) + +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// // Set path parameters +// c.SetParamNames("id") +// c.SetParamValues(testChannel.ID.String()) + +// if assert.NoError(t, h.DeleteChannel(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) +// } + +// // Check if channel is deleted +// channel, err := client.Channel.Query().Where(entChannel.ID(testChannel.ID)).Only(context.Background()) +// assert.Error(t, err) +// assert.Nil(t, channel) +// } + +// // * TestUpdateChannel tests the UpdateChannel function +// // Test creates a new channel and updates it and checks if the response is correct +// func TestUpdateChannel(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// ChannelService: channel.NewService(&database.Database{Client: client}), +// }, +// } + +// // Create a channel +// testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) + +// // Updated channel +// updatedJson := `{ +// "name": "updated", +// "display_name": "updated", +// "image_path": "/vods/updated/updated.jpg" +// }` + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} +// req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/channel/%s", testChannel.ID.String()), strings.NewReader(updatedJson)) + +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// // Set path parameters +// c.SetParamNames("id") +// c.SetParamValues(testChannel.ID.String()) + +// if assert.NoError(t, h.UpdateChannel(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "updated", response["name"]) +// assert.Equal(t, "updated", response["display_name"]) +// assert.Equal(t, "/vods/updated/updated.jpg", response["image_path"]) +// } +// } + +// // * TestGetChannelByName tests the GetChannelByName function +// // Test creates a new channel and checks if the response contains the correct channel +// func TestGetChannelByName(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// ChannelService: channel.NewService(&database.Database{Client: client}), +// }, +// } + +// // Create a channel +// testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} +// req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/channel/name/%s", testChannel.Name), nil) + +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// // Set path parameters +// c.SetParamNames("name") +// c.SetParamValues(testChannel.Name) + +// if assert.NoError(t, h.GetChannelByName(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "test_channel", response["name"]) +// } +// } diff --git a/internal/transport/http/config.go b/internal/transport/http/config.go index 6360bd2e..8ce4b07c 100644 --- a/internal/transport/http/config.go +++ b/internal/transport/http/config.go @@ -2,59 +2,58 @@ package http import ( "net/http" - "strings" "github.com/labstack/echo/v4" "github.com/zibbp/ganymede/internal/config" ) -type ConfigService interface { - GetConfig(c echo.Context) (*config.Conf, error) - UpdateConfig(c echo.Context, conf *config.Conf) error - GetNotificationConfig(c echo.Context) (*config.Notification, error) - UpdateNotificationConfig(c echo.Context, conf *config.Notification) error - GetStorageTemplateConfig(c echo.Context) (*config.StorageTemplate, error) - UpdateStorageTemplateConfig(c echo.Context, conf *config.StorageTemplate) error -} - -type UpdateConfigRequest struct { - RegistrationEnabled bool `json:"registration_enabled"` - Parameters struct { - TwitchToken string `json:"twitch_token"` - VideoConvert string `json:"video_convert" validate:"required"` - ChatRender string `json:"chat_render" validate:"required"` - StreamlinkLive string `json:"streamlink_live"` - } `json:"parameters"` - Archive struct { - SaveAsHls bool `json:"save_as_hls"` - } `json:"archive"` - Livestream struct { - Proxies []config.ProxyListItem `json:"proxies"` - ProxyEnabled bool `json:"proxy_enabled"` - ProxyParameters string `json:"proxy_parameters"` - ProxyWhitelist []string `json:"proxy_whitelist"` - } `json:"livestream"` -} - -type UpdateNotificationRequest struct { - VideoSuccessWebhookUrl string `json:"video_success_webhook_url"` - VideoSuccessTemplate string `json:"video_success_template"` - VideoSuccessEnabled bool `json:"video_success_enabled"` - LiveSuccessWebhookUrl string `json:"live_success_webhook_url"` - LiveSuccessTemplate string `json:"live_success_template"` - LiveSuccessEnabled bool `json:"live_success_enabled"` - ErrorWebhookUrl string `json:"error_webhook_url"` - ErrorTemplate string `json:"error_template"` - ErrorEnabled bool `json:"error_enabled"` - IsLiveWebhookUrl string `json:"is_live_webhook_url"` - IsLiveTemplate string `json:"is_live_template"` - IsLiveEnabled bool `json:"is_live_enabled"` -} - -type UpdateStorageTemplateRequest struct { - FolderTemplate string `json:"folder_template" validate:"required"` - FileTemplate string `json:"file_template" validate:"required"` -} +// type ConfigService interface { +// GetConfig(ctx context.Context) (*config.Conf, error) +// UpdateConfig(c echo.Context, conf *config.Conf) error +// GetNotificationConfig(c echo.Context) (*config.Notification, error) +// UpdateNotificationConfig(c echo.Context, conf *config.Notification) error +// GetStorageTemplateConfig(c echo.Context) (*config.StorageTemplate, error) +// UpdateStorageTemplateConfig(c echo.Context, conf *config.StorageTemplate) error +// } + +// type UpdateConfigRequest struct { +// RegistrationEnabled bool `json:"registration_enabled"` +// Parameters struct { +// TwitchToken string `json:"twitch_token"` +// VideoConvert string `json:"video_convert" validate:"required"` +// ChatRender string `json:"chat_render" validate:"required"` +// StreamlinkLive string `json:"streamlink_live"` +// } `json:"parameters"` +// Archive struct { +// SaveAsHls bool `json:"save_as_hls"` +// } `json:"archive"` +// Livestream struct { +// Proxies []config.ProxyListItem `json:"proxies"` +// ProxyEnabled bool `json:"proxy_enabled"` +// ProxyParameters string `json:"proxy_parameters"` +// ProxyWhitelist []string `json:"proxy_whitelist"` +// } `json:"livestream"` +// } + +// type UpdateNotificationRequest struct { +// VideoSuccessWebhookUrl string `json:"video_success_webhook_url"` +// VideoSuccessTemplate string `json:"video_success_template"` +// VideoSuccessEnabled bool `json:"video_success_enabled"` +// LiveSuccessWebhookUrl string `json:"live_success_webhook_url"` +// LiveSuccessTemplate string `json:"live_success_template"` +// LiveSuccessEnabled bool `json:"live_success_enabled"` +// ErrorWebhookUrl string `json:"error_webhook_url"` +// ErrorTemplate string `json:"error_template"` +// ErrorEnabled bool `json:"error_enabled"` +// IsLiveWebhookUrl string `json:"is_live_webhook_url"` +// IsLiveTemplate string `json:"is_live_template"` +// IsLiveEnabled bool `json:"is_live_enabled"` +// } + +// type UpdateStorageTemplateRequest struct { +// FolderTemplate string `json:"folder_template" validate:"required"` +// FileTemplate string `json:"file_template" validate:"required"` +// } // GetConfig godoc // @@ -68,11 +67,8 @@ type UpdateStorageTemplateRequest struct { // @Router /config [get] // @Security ApiKeyCookieAuth func (h *Handler) GetConfig(c echo.Context) error { - conf, err := h.Service.ConfigService.GetConfig(c) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, conf) + config := config.Get() + return c.JSON(http.StatusOK, config) } // UpdateConfig godoc @@ -89,154 +85,19 @@ func (h *Handler) GetConfig(c echo.Context) error { // @Router /config [put] // @Security ApiKeyCookieAuth func (h *Handler) UpdateConfig(c echo.Context) error { - conf := new(UpdateConfigRequest) + conf := new(config.Config) if err := c.Bind(conf); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - if err := c.Validate(conf); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - cDto := config.Conf{ - RegistrationEnabled: conf.RegistrationEnabled, - Archive: struct { - SaveAsHls bool `json:"save_as_hls"` - }(conf.Archive), - Parameters: struct { - TwitchToken string `json:"twitch_token"` - VideoConvert string `json:"video_convert"` - ChatRender string `json:"chat_render"` - StreamlinkLive string `json:"streamlink_live"` - }(conf.Parameters), - Livestream: struct { - Proxies []config.ProxyListItem `json:"proxies"` - ProxyEnabled bool `json:"proxy_enabled"` - ProxyParameters string `json:"proxy_parameters"` - ProxyWhitelist []string `json:"proxy_whitelist"` - }(conf.Livestream), - } - if err := h.Service.ConfigService.UpdateConfig(c, &cDto); err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, conf) -} -// GetNotificationConfig godoc -// -// @Summary Get notification config -// @Description Get notification config -// @Tags config -// @Accept json -// @Produce json -// @Success 200 {object} config.Notification -// @Failure 500 {object} utils.ErrorResponse -// @Router /config/notification [get] -// @Security ApiKeyCookieAuth -func (h *Handler) GetNotificationConfig(c echo.Context) error { - conf, err := h.Service.ConfigService.GetNotificationConfig(c) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, conf) -} - -// UpdateNotificationConfig godoc -// -// @Summary Update notification config -// @Description Update notification config -// @Tags config -// @Accept json -// @Produce json -// @Param body body UpdateNotificationRequest true "Config" -// @Success 200 {object} UpdateNotificationRequest -// @Failure 400 {object} utils.ErrorResponse -// @Failure 500 {object} utils.ErrorResponse -// @Router /config/notification [put] -// @Security ApiKeyCookieAuth -func (h *Handler) UpdateNotificationConfig(c echo.Context) error { - conf := new(UpdateNotificationRequest) - if err := c.Bind(conf); err != nil { + if err := c.Validate(conf); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - cDto := config.Notification{ - VideoSuccessWebhookUrl: conf.VideoSuccessWebhookUrl, - VideoSuccessTemplate: conf.VideoSuccessTemplate, - VideoSuccessEnabled: conf.VideoSuccessEnabled, - LiveSuccessWebhookUrl: conf.LiveSuccessWebhookUrl, - LiveSuccessTemplate: conf.LiveSuccessTemplate, - LiveSuccessEnabled: conf.LiveSuccessEnabled, - ErrorWebhookUrl: conf.ErrorWebhookUrl, - ErrorTemplate: conf.ErrorTemplate, - ErrorEnabled: conf.ErrorEnabled, - IsLiveWebhookUrl: conf.IsLiveWebhookUrl, - IsLiveTemplate: conf.IsLiveTemplate, - IsLiveEnabled: conf.IsLiveEnabled, - } - - if err := h.Service.ConfigService.UpdateNotificationConfig(c, &cDto); err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, conf) -} -// GetStorageTemplateConfig godoc -// -// @Summary Get storage template config -// @Description Get storage template config -// @Tags config -// @Accept json -// @Produce json -// @Success 200 {object} config.StorageTemplate -// @Failure 500 {object} utils.ErrorResponse -// @Router /config/storage [get] -// @Security ApiKeyCookieAuth -func (h *Handler) GetStorageTemplateConfig(c echo.Context) error { - conf, err := h.Service.ConfigService.GetStorageTemplateConfig(c) + err := config.UpdateConfig(conf) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } - return c.JSON(http.StatusOK, conf) -} -// UpdateStorageTemplateConfig godoc -// -// @Summary Update storage template config -// @Description Update storage template config -// @Tags config -// @Accept json -// @Produce json -// @Param body body UpdateStorageTemplateRequest true "Config" -// @Success 200 {object} UpdateStorageTemplateRequest -// @Failure 400 {object} utils.ErrorResponse -// @Failure 500 {object} utils.ErrorResponse -// @Router /config/storage [put] -// @Security ApiKeyCookieAuth -func (h *Handler) UpdateStorageTemplateConfig(c echo.Context) error { - conf := new(UpdateStorageTemplateRequest) - if err := c.Bind(conf); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - - if err := c.Validate(conf); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - - if len(conf.FolderTemplate) == 0 || len(conf.FileTemplate) == 0 { - return echo.NewHTTPError(http.StatusBadRequest, "Folder template and file template can't be empty") - } - - // Check if folder template contains {{uuid}} - - if !strings.Contains(conf.FolderTemplate, "{{uuid}}") { - return echo.NewHTTPError(http.StatusBadRequest, "Folder template must contain {{uuid}}") - } - - cDto := config.StorageTemplate{ - FolderTemplate: conf.FolderTemplate, - FileTemplate: conf.FileTemplate, - } - - if err := h.Service.ConfigService.UpdateStorageTemplateConfig(c, &cDto); err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } return c.JSON(http.StatusOK, conf) } diff --git a/internal/transport/http/handler.go b/internal/transport/http/handler.go index e6f8f1d5..9e1dd557 100644 --- a/internal/transport/http/handler.go +++ b/internal/transport/http/handler.go @@ -3,8 +3,6 @@ package http import ( "context" "net/http" - "os" - "os/signal" "time" "github.com/go-playground/validator/v10" @@ -12,31 +10,32 @@ import ( "github.com/labstack/echo/v4/middleware" "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/rs/zerolog/log" - "github.com/spf13/viper" echoSwagger "github.com/swaggo/echo-swagger" _ "github.com/zibbp/ganymede/docs" "github.com/zibbp/ganymede/internal/auth" - "github.com/zibbp/ganymede/internal/channel" + "github.com/zibbp/ganymede/internal/config" + "github.com/zibbp/ganymede/internal/platform" "github.com/zibbp/ganymede/internal/utils" ) type Services struct { - AuthService AuthService - ChannelService ChannelService - VodService VodService - QueueService QueueService - TwitchService TwitchService - ArchiveService ArchiveService - AdminService AdminService - UserService UserService - ConfigService ConfigService - LiveService LiveService - SchedulerService SchedulerService - PlaybackService PlaybackService - MetricsService MetricsService - PlaylistService PlaylistService - TaskService TaskService - ChapterService ChapterService + AuthService AuthService + ChannelService ChannelService + VodService VodService + QueueService QueueService + ArchiveService ArchiveService + AdminService AdminService + UserService UserService + LiveService LiveService + SchedulerService SchedulerService + PlaybackService PlaybackService + MetricsService MetricsService + PlaylistService PlaylistService + TaskService TaskService + ChapterService ChapterService + CategoryService CategoryService + BlockedVideoService BlockedVideoService + PlatformTwitch platform.Platform } type Handler struct { @@ -44,36 +43,40 @@ type Handler struct { Service Services } -func NewHandler(authService AuthService, channelService ChannelService, vodService VodService, queueService QueueService, twitchService TwitchService, archiveService ArchiveService, adminService AdminService, userService UserService, configService ConfigService, liveService LiveService, schedulerService SchedulerService, playbackService PlaybackService, metricsService MetricsService, playlistService PlaylistService, taskService TaskService, chapterService ChapterService) *Handler { +func NewHandler(authService AuthService, channelService ChannelService, vodService VodService, queueService QueueService, archiveService ArchiveService, adminService AdminService, userService UserService, liveService LiveService, schedulerService SchedulerService, playbackService PlaybackService, metricsService MetricsService, playlistService PlaylistService, taskService TaskService, chapterService ChapterService, categoryService CategoryService, blockedVideoService BlockedVideoService, platformTwitch platform.Platform) *Handler { log.Debug().Msg("creating new handler") + env := config.GetEnvApplicationConfig() h := &Handler{ Server: echo.New(), Service: Services{ - AuthService: authService, - ChannelService: channelService, - VodService: vodService, - QueueService: queueService, - TwitchService: twitchService, - ArchiveService: archiveService, - AdminService: adminService, - UserService: userService, - ConfigService: configService, - LiveService: liveService, - SchedulerService: schedulerService, - PlaybackService: playbackService, - MetricsService: metricsService, - PlaylistService: playlistService, - TaskService: taskService, - ChapterService: chapterService, + AuthService: authService, + ChannelService: channelService, + VodService: vodService, + QueueService: queueService, + ArchiveService: archiveService, + AdminService: adminService, + UserService: userService, + LiveService: liveService, + SchedulerService: schedulerService, + PlaybackService: playbackService, + MetricsService: metricsService, + PlaylistService: playlistService, + TaskService: taskService, + ChapterService: chapterService, + CategoryService: categoryService, + BlockedVideoService: blockedVideoService, + PlatformTwitch: platformTwitch, }, } // Middleware h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + h.Server.HideBanner = true + h.Server.Use(middleware.CORSWithConfig(middleware.CORSConfig{ - AllowOrigins: []string{os.Getenv("FRONTEND_HOST")}, + AllowOrigins: []string{env.FrontendHost}, AllowMethods: []string{http.MethodGet, http.MethodHead, http.MethodPut, http.MethodPatch, http.MethodPost, http.MethodDelete}, AllowCredentials: true, })) @@ -81,23 +84,7 @@ func NewHandler(authService AuthService, channelService ChannelService, vodServi h.mapRoutes() // Start scheduler - h.Service.SchedulerService.StartAppScheduler() - // Start schedules as a goroutine - // to avoid blocking application start - // and to wait for twitch api auth go h.Service.SchedulerService.StartLiveScheduler() - if viper.GetBool("oauth_enabled") { - go h.Service.SchedulerService.StartJwksScheduler() - } - go h.Service.SchedulerService.StartWatchVideoScheduler() - go h.Service.SchedulerService.StartTwitchCategoriesScheduler() - go h.Service.SchedulerService.StartPruneVideoScheduler() - - // Populate channel external ids - go func() { - time.Sleep(5 * time.Second) - channel.PopulateExternalChannelID() - }() return h } @@ -114,7 +101,10 @@ func (h *Handler) mapRoutes() { }) h.Server.GET("/metrics", func(c echo.Context) error { - r := h.GatherMetrics() + r, err := h.GatherMetrics() + if err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } handler := promhttp.HandlerFor(r, promhttp.HandlerOpts{}) handler.ServeHTTP(c.Response(), c.Request()) @@ -122,7 +112,8 @@ func (h *Handler) mapRoutes() { }) // Static files - h.Server.Static("/static/vods", "/vods") + envConfig := config.GetEnvConfig() + h.Server.Static(envConfig.VideosDir, envConfig.VideosDir) // Swagger h.Server.GET("/swagger/*", echoSwagger.WrapHandler) @@ -180,9 +171,10 @@ func groupV1Routes(e *echo.Group, h *Handler) { vodGroup.GET("/:id/chat", h.GetVodChatComments) vodGroup.GET("/:id/chat/seek", h.GetNumberOfVodChatCommentsFromTime) vodGroup.GET("/:id/chat/userid", h.GetUserIdFromChat) - vodGroup.GET("/:id/chat/emotes", h.GetVodChatEmotes) - vodGroup.GET("/:id/chat/badges", h.GetVodChatBadges) + vodGroup.GET("/:id/chat/emotes", h.GetChatEmotes) + vodGroup.GET("/:id/chat/badges", h.GetChatBadges) vodGroup.POST("/:id/lock", h.LockVod, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.EditorRole)) + vodGroup.POST("/:id/generate-static-thumbnail", h.GenerateStaticThumbnail, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.EditorRole)) // Queue queueGroup := e.Group("/queue") @@ -193,18 +185,19 @@ func groupV1Routes(e *echo.Group, h *Handler) { queueGroup.DELETE("/:id", h.DeleteQueueItem, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.AdminRole)) queueGroup.GET("/:id/tail", h.ReadQueueLogFile, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) queueGroup.POST("/:id/stop", h.StopQueueItem, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.AdminRole)) + queueGroup.POST("/task/start", h.StartQueueTask, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) // Twitch twitchGroup := e.Group("/twitch") - twitchGroup.GET("/channel", h.GetTwitchUser) - twitchGroup.GET("/vod", h.GetTwitchVod) - twitchGroup.GET("/gql/video", h.GQLGetTwitchVideo) - twitchGroup.GET("/categories", h.GetTwitchCategories) + twitchGroup.GET("/channel", h.GetTwitchChannel) + twitchGroup.GET("/video", h.GetTwitchVideo) + // twitchGroup.GET("/gql/video", h.GQLGetTwitchVideo) + // twitchGroup.GET("/categories", h.GetTwitchCategories) // Archive archiveGroup := e.Group("/archive") - archiveGroup.POST("/channel", h.ArchiveTwitchChannel, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) - archiveGroup.POST("/vod", h.ArchiveTwitchVod, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) + archiveGroup.POST("/channel", h.ArchiveChannel, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) + archiveGroup.POST("/video", h.ArchiveVideo, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) archiveGroup.POST("/convert-twitch-live-chat", h.ConvertTwitchChat, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.AdminRole)) // Admin @@ -223,10 +216,6 @@ func groupV1Routes(e *echo.Group, h *Handler) { configGroup := e.Group("/config") configGroup.GET("", h.GetConfig, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.AdminRole)) configGroup.PUT("", h.UpdateConfig, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.AdminRole)) - configGroup.GET("/notification", h.GetNotificationConfig, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.AdminRole)) - configGroup.PUT("/notification", h.UpdateNotificationConfig, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.AdminRole)) - configGroup.GET("/storage", h.GetStorageTemplateConfig, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.AdminRole)) - configGroup.PUT("/storage", h.UpdateStorageTemplateConfig, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.AdminRole)) // Live liveGroup := e.Group("/live") @@ -236,8 +225,8 @@ func groupV1Routes(e *echo.Group, h *Handler) { liveGroup.PUT("/:id", h.UpdateLiveWatchedChannel, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.EditorRole)) liveGroup.DELETE("/:id", h.DeleteLiveWatchedChannel, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.EditorRole)) liveGroup.GET("/check", h.Check, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.EditorRole)) - liveGroup.GET("/vod", h.CheckVodWatchedChannels, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.EditorRole)) - liveGroup.POST("/archive", h.ArchiveLiveChannel, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) + // liveGroup.GET("/vod", h.CheckVodWatchedChannels, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.EditorRole)) + // liveGroup.POST("/archive", h.ArchiveLiveChannel, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) // Playback playbackGroup := e.Group("/playback") @@ -271,36 +260,47 @@ func groupV1Routes(e *echo.Group, h *Handler) { notificationGroup := e.Group("/notification") notificationGroup.POST("/test", h.TestNotification, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.AdminRole)) - // Workflows - workflowGroup := e.Group("/workflows") - workflowGroup.GET("/active", h.GetActiveWorkflows, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) - workflowGroup.GET("/closed", h.GetClosedWorkflows, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) - workflowGroup.GET("/:workflowId/:runId", h.GetWorkflowById, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) - workflowGroup.GET("/:workflowId/:runId/history", h.GetWorkflowHistory, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) - workflowGroup.GET("/:workflowId/:runId/video_id", h.GetVideoIdFromWorkflow, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) - workflowGroup.POST("/start", h.StartWorkflow, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) - workflowGroup.POST("/restart", h.RestartArchiveWorkflow, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.ArchiverRole)) - // Chapter chapterGroup := e.Group("/chapter") chapterGroup.GET("/video/:videoId", h.GetVideoChapters) chapterGroup.GET("/video/:videoId/webvtt", h.GetWebVTTChapters) + + // Category + categoryGroup := e.Group("/category") + categoryGroup.GET("", h.GetCategories) + + // Blocked + blockedGroup := e.Group("/blocked-video") + blockedGroup.GET("", h.GetBlockedVideos) + blockedGroup.POST("/:id", h.CreateBlockedVideo, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.EditorRole)) + blockedGroup.DELETE("/:id", h.DeleteBlockedVideo, auth.GuardMiddleware, auth.GetUserMiddleware, auth.UserRoleMiddleware(utils.EditorRole)) + blockedGroup.GET("/:id", h.IsVideoBlocked) } -func (h *Handler) Serve() error { +func (h *Handler) Serve(ctx context.Context) error { + // Run the server in a goroutine + serverErrCh := make(chan error, 1) go func() { if err := h.Server.Start(":4000"); err != nil && err != http.ErrServerClosed { - log.Fatal().Err(err).Msg("failed to start server") + serverErrCh <- err } + close(serverErrCh) }() - // Wait for interrupt signal to gracefully shutdown the server with a timeout of 10 seconds. - // Use a buffered channel to avoid missing signals as recommended for signal.Notify - quit := make(chan os.Signal, 1) - signal.Notify(quit, os.Interrupt) - <-quit - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + + // Listen for the context to be canceled or an error to occur in the server + select { + case <-ctx.Done(): + log.Info().Msg("Context canceled, shutting down the server") + case err := <-serverErrCh: + if err != nil { + log.Fatal().Err(err).Msg("failed to start server") + } + } + + // Shutdown the server with a timeout of 10 seconds + shutdownCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() - if err := h.Server.Shutdown(ctx); err != nil { + if err := h.Server.Shutdown(shutdownCtx); err != nil { log.Fatal().Err(err).Msg("failed to shutdown server") } diff --git a/internal/transport/http/live.go b/internal/transport/http/live.go index 118c07f4..92790331 100644 --- a/internal/transport/http/live.go +++ b/internal/transport/http/live.go @@ -1,6 +1,7 @@ package http import ( + "context" "net/http" "github.com/google/uuid" @@ -14,25 +15,25 @@ type LiveService interface { AddLiveWatchedChannel(c echo.Context, liveDto live.Live) (*ent.Live, error) DeleteLiveWatchedChannel(c echo.Context, lID uuid.UUID) error UpdateLiveWatchedChannel(c echo.Context, liveDto live.Live) (*ent.Live, error) - Check() error - CheckVodWatchedChannels() - ArchiveLiveChannel(c echo.Context, archiveDto live.ArchiveLive) error + Check(ctx context.Context) error + // ArchiveLiveChannel(c echo.Context, archiveDto live.ArchiveLive) error } type AddWatchedChannelRequest struct { - WatchLive bool `json:"watch_live" validate:"boolean"` - WatchVod bool `json:"watch_vod" validate:"boolean"` - DownloadArchives bool `json:"download_archives" validate:"boolean"` - DownloadHighlights bool `json:"download_highlights" validate:"boolean"` - DownloadUploads bool `json:"download_uploads" validate:"boolean"` - ChannelID string `json:"channel_id" validate:"required"` - Resolution string `json:"resolution" validate:"required,oneof=best source 720p60 480p 360p 160p 480p30 360p30 160p30 audio"` - ArchiveChat bool `json:"archive_chat" validate:"boolean"` - RenderChat bool `json:"render_chat" validate:"boolean"` - DownloadSubOnly bool `json:"download_sub_only" validate:"boolean"` - Categories []string `json:"categories"` - MaxAge int64 `json:"max_age"` - Regex []AddLiveTitleRegex `json:"regex"` + WatchLive bool `json:"watch_live" validate:"boolean"` + WatchVod bool `json:"watch_vod" validate:"boolean"` + DownloadArchives bool `json:"download_archives" validate:"boolean"` + DownloadHighlights bool `json:"download_highlights" validate:"boolean"` + DownloadUploads bool `json:"download_uploads" validate:"boolean"` + ChannelID string `json:"channel_id" validate:"required"` + Resolution string `json:"resolution" validate:"required,oneof=best source 720p60 480p 360p 160p 480p30 360p30 160p30 audio"` + ArchiveChat bool `json:"archive_chat" validate:"boolean"` + RenderChat bool `json:"render_chat" validate:"boolean"` + DownloadSubOnly bool `json:"download_sub_only" validate:"boolean"` + Categories []string `json:"categories"` + ApplyCategoriesToLive bool `json:"apply_categories_to_live" validate:"boolean"` + MaxAge int64 `json:"max_age"` + Regex []AddLiveTitleRegex `json:"regex"` } type AddLiveTitleRegex struct { @@ -42,33 +43,35 @@ type AddLiveTitleRegex struct { } type AddMultipleWatchedChannelRequest struct { - WatchLive bool `json:"watch_live" ` - WatchVod bool `json:"watch_vod" ` - DownloadArchives bool `json:"download_archives" ` - DownloadHighlights bool `json:"download_highlights" ` - DownloadUploads bool `json:"download_uploads"` - ChannelID []string `json:"channel_id" validate:"required"` - Resolution string `json:"resolution" validate:"required,oneof=best source 720p60 480p 360p 160p 480p30 360p30 160p30 audio"` - ArchiveChat bool `json:"archive_chat"` - RenderChat bool `json:"render_chat"` - DownloadSubOnly bool `json:"download_sub_only"` - Categories []string `json:"categories"` - MaxAge int64 `json:"max_age"` + WatchLive bool `json:"watch_live" ` + WatchVod bool `json:"watch_vod" ` + DownloadArchives bool `json:"download_archives" ` + DownloadHighlights bool `json:"download_highlights" ` + DownloadUploads bool `json:"download_uploads"` + ChannelID []string `json:"channel_id" validate:"required"` + Resolution string `json:"resolution" validate:"required,oneof=best source 720p60 480p 360p 160p 480p30 360p30 160p30 audio"` + ArchiveChat bool `json:"archive_chat"` + RenderChat bool `json:"render_chat"` + DownloadSubOnly bool `json:"download_sub_only"` + Categories []string `json:"categories"` + ApplyCategoriesToLive bool `json:"apply_categories_to_live"` + MaxAge int64 `json:"max_age"` } type UpdateWatchedChannelRequest struct { - WatchLive bool `json:"watch_live" validate:"boolean"` - WatchVod bool `json:"watch_vod" validate:"boolean"` - DownloadArchives bool `json:"download_archives" validate:"boolean"` - DownloadHighlights bool `json:"download_highlights" validate:"boolean"` - DownloadUploads bool `json:"download_uploads" validate:"boolean"` - Resolution string `json:"resolution" validate:"required,oneof=best source 720p60 480p 360p 160p 480p30 360p30 160p30 audio"` - ArchiveChat bool `json:"archive_chat" validate:"boolean"` - RenderChat bool `json:"render_chat" validate:"boolean"` - DownloadSubOnly bool `json:"download_sub_only" validate:"boolean"` - Categories []string `json:"categories"` - MaxAge int64 `json:"max_age"` - Regex []AddLiveTitleRegex `json:"regex"` + WatchLive bool `json:"watch_live" validate:"boolean"` + WatchVod bool `json:"watch_vod" validate:"boolean"` + DownloadArchives bool `json:"download_archives" validate:"boolean"` + DownloadHighlights bool `json:"download_highlights" validate:"boolean"` + DownloadUploads bool `json:"download_uploads" validate:"boolean"` + Resolution string `json:"resolution" validate:"required,oneof=best source 720p60 480p 360p 160p 480p30 360p30 160p30 audio"` + ArchiveChat bool `json:"archive_chat" validate:"boolean"` + RenderChat bool `json:"render_chat" validate:"boolean"` + DownloadSubOnly bool `json:"download_sub_only" validate:"boolean"` + Categories []string `json:"categories"` + ApplyCategoriesToLive bool `json:"apply_categories_to_live" validate:"boolean"` + MaxAge int64 `json:"max_age"` + Regex []AddLiveTitleRegex `json:"regex"` } type ConvertChatRequest struct { @@ -132,20 +135,26 @@ func (h *Handler) AddLiveWatchedChannel(c echo.Context) error { if err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } + + if len(ccr.Categories) == 0 && ccr.ApplyCategoriesToLive { + return echo.NewHTTPError(http.StatusBadRequest, "categories cannot be empty if apply_categories_to_live is true") + } + liveDto := live.Live{ - ID: cUUID, - WatchLive: ccr.WatchLive, - WatchVod: ccr.WatchVod, - DownloadArchives: ccr.DownloadArchives, - DownloadHighlights: ccr.DownloadHighlights, - DownloadUploads: ccr.DownloadUploads, - IsLive: false, - ArchiveChat: ccr.ArchiveChat, - Resolution: ccr.Resolution, - RenderChat: ccr.RenderChat, - DownloadSubOnly: ccr.DownloadSubOnly, - Categories: ccr.Categories, - MaxAge: ccr.MaxAge, + ID: cUUID, + WatchLive: ccr.WatchLive, + WatchVod: ccr.WatchVod, + DownloadArchives: ccr.DownloadArchives, + DownloadHighlights: ccr.DownloadHighlights, + DownloadUploads: ccr.DownloadUploads, + IsLive: false, + ArchiveChat: ccr.ArchiveChat, + Resolution: ccr.Resolution, + RenderChat: ccr.RenderChat, + DownloadSubOnly: ccr.DownloadSubOnly, + Categories: ccr.Categories, + ApplyCategoriesToLive: ccr.ApplyCategoriesToLive, + MaxAge: ccr.MaxAge, } for _, regex := range ccr.Regex { @@ -195,20 +204,26 @@ func (h *Handler) AddMultipleLiveWatchedChannel(c echo.Context) error { if err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } + + if len(ccr.Categories) == 0 && ccr.ApplyCategoriesToLive { + return echo.NewHTTPError(http.StatusBadRequest, "categories cannot be empty if apply_categories_to_live is true") + } + liveDto := live.Live{ - ID: cUUID, - WatchLive: ccr.WatchLive, - WatchVod: ccr.WatchVod, - DownloadArchives: ccr.DownloadArchives, - DownloadHighlights: ccr.DownloadHighlights, - DownloadUploads: ccr.DownloadUploads, - IsLive: false, - ArchiveChat: ccr.ArchiveChat, - Resolution: ccr.Resolution, - RenderChat: ccr.RenderChat, - DownloadSubOnly: ccr.DownloadSubOnly, - Categories: ccr.Categories, - MaxAge: ccr.MaxAge, + ID: cUUID, + WatchLive: ccr.WatchLive, + WatchVod: ccr.WatchVod, + DownloadArchives: ccr.DownloadArchives, + DownloadHighlights: ccr.DownloadHighlights, + DownloadUploads: ccr.DownloadUploads, + IsLive: false, + ArchiveChat: ccr.ArchiveChat, + Resolution: ccr.Resolution, + RenderChat: ccr.RenderChat, + DownloadSubOnly: ccr.DownloadSubOnly, + Categories: ccr.Categories, + ApplyCategoriesToLive: ccr.ApplyCategoriesToLive, + MaxAge: ccr.MaxAge, } l, err := h.Service.LiveService.AddLiveWatchedChannel(c, liveDto) if err != nil { @@ -247,19 +262,25 @@ func (h *Handler) UpdateLiveWatchedChannel(c echo.Context) error { if err := c.Validate(ccr); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } + + if len(ccr.Categories) == 0 && ccr.ApplyCategoriesToLive { + return echo.NewHTTPError(http.StatusBadRequest, "categories cannot be empty if apply_categories_to_live is true") + } + liveDto := live.Live{ - ID: lID, - WatchLive: ccr.WatchLive, - WatchVod: ccr.WatchVod, - DownloadArchives: ccr.DownloadArchives, - DownloadHighlights: ccr.DownloadHighlights, - DownloadUploads: ccr.DownloadUploads, - ArchiveChat: ccr.ArchiveChat, - Resolution: ccr.Resolution, - RenderChat: ccr.RenderChat, - DownloadSubOnly: ccr.DownloadSubOnly, - Categories: ccr.Categories, - MaxAge: ccr.MaxAge, + ID: lID, + WatchLive: ccr.WatchLive, + WatchVod: ccr.WatchVod, + DownloadArchives: ccr.DownloadArchives, + DownloadHighlights: ccr.DownloadHighlights, + DownloadUploads: ccr.DownloadUploads, + ArchiveChat: ccr.ArchiveChat, + Resolution: ccr.Resolution, + RenderChat: ccr.RenderChat, + DownloadSubOnly: ccr.DownloadSubOnly, + Categories: ccr.Categories, + ApplyCategoriesToLive: ccr.ApplyCategoriesToLive, + MaxAge: ccr.MaxAge, } for _, regex := range ccr.Regex { @@ -310,7 +331,7 @@ func (h *Handler) DeleteLiveWatchedChannel(c echo.Context) error { } func (h *Handler) Check(c echo.Context) error { - err := h.Service.LiveService.Check() + err := h.Service.LiveService.Check(c.Request().Context()) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } @@ -329,11 +350,11 @@ func (h *Handler) Check(c echo.Context) error { // @Failure 500 {object} utils.ErrorResponse // @Router /live/check [get] // @Security ApiKeyCookieAuth -func (h *Handler) CheckVodWatchedChannels(c echo.Context) error { - go h.Service.LiveService.CheckVodWatchedChannels() +// func (h *Handler) CheckVodWatchedChannels(c echo.Context) error { +// go h.Service.LiveService.CheckVodWatchedChannels() - return c.JSON(http.StatusOK, "ok") -} +// return c.JSON(http.StatusOK, "ok") +// } // ArchiveLiveChannel godoc // @@ -347,31 +368,31 @@ func (h *Handler) CheckVodWatchedChannels(c echo.Context) error { // @Failure 500 {object} utils.ErrorResponse // @Router /live/archive [post] // @Security ApiKeyCookieAuth -func (h *Handler) ArchiveLiveChannel(c echo.Context) error { - alcr := new(ArchiveLiveChannelRequest) - if err := c.Bind(alcr); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - if err := c.Validate(alcr); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - // validate channel uuid - cID, err := uuid.Parse(alcr.ChannelID) - if err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - - archiveLiveDto := live.ArchiveLive{ - ChannelID: cID, - Resolution: alcr.Resolution, - ArchiveChat: alcr.ArchiveChat, - RenderChat: alcr.RenderChat, - } - - err = h.Service.LiveService.ArchiveLiveChannel(c, archiveLiveDto) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - - return c.JSON(http.StatusOK, "ok") -} +// func (h *Handler) ArchiveLiveChannel(c echo.Context) error { +// alcr := new(ArchiveLiveChannelRequest) +// if err := c.Bind(alcr); err != nil { +// return echo.NewHTTPError(http.StatusBadRequest, err.Error()) +// } +// if err := c.Validate(alcr); err != nil { +// return echo.NewHTTPError(http.StatusBadRequest, err.Error()) +// } +// // validate channel uuid +// cID, err := uuid.Parse(alcr.ChannelID) +// if err != nil { +// return echo.NewHTTPError(http.StatusBadRequest, err.Error()) +// } + +// archiveLiveDto := live.ArchiveLive{ +// ChannelID: cID, +// Resolution: alcr.Resolution, +// ArchiveChat: alcr.ArchiveChat, +// RenderChat: alcr.RenderChat, +// } + +// err = h.Service.LiveService.ArchiveLiveChannel(c, archiveLiveDto) +// if err != nil { +// return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) +// } + +// return c.JSON(http.StatusOK, "ok") +// } diff --git a/internal/transport/http/live_test.go b/internal/transport/http/live_test.go index 4a3f1921..0a522c14 100644 --- a/internal/transport/http/live_test.go +++ b/internal/transport/http/live_test.go @@ -1,233 +1,233 @@ package http_test -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "strings" - "testing" - - "github.com/go-playground/validator/v10" - "github.com/labstack/echo/v4" - "github.com/stretchr/testify/assert" - "github.com/zibbp/ganymede/ent" - entChannel "github.com/zibbp/ganymede/ent/channel" - "github.com/zibbp/ganymede/ent/enttest" - entLive "github.com/zibbp/ganymede/ent/live" - "github.com/zibbp/ganymede/internal/archive" - "github.com/zibbp/ganymede/internal/channel" - "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/live" - "github.com/zibbp/ganymede/internal/queue" - httpHandler "github.com/zibbp/ganymede/internal/transport/http" - "github.com/zibbp/ganymede/internal/twitch" - "github.com/zibbp/ganymede/internal/utils" - "github.com/zibbp/ganymede/internal/vod" -) - -var () - -// * TestAddLiveWatchedChannel tests the create watched channel -// Test creates a live watched channel -func TestAddLiveWatchedChannel(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - twitchService := twitch.NewService() - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - queueService := queue.NewService(&database.Database{Client: client}, vodService, channelService) - archiveService := archive.NewService(&database.Database{Client: client}, twitchService, channelService, vodService, queueService) - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - LiveService: live.NewService(&database.Database{Client: client}, twitchService, archiveService), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a test channel - testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) - - // Watched channel json - liveWatchedChannelJson := `{"channel_id": "` + testChannel.ID.String() + `", "watch_live": true, "watch_vod": true, "download_archives": true, "download_highlights": true, "download_uploads": true, "resolution": "best", "archive_chat": true}` - - req := httptest.NewRequest(http.MethodPost, "/api/v1/live", strings.NewReader(liveWatchedChannelJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.AddLiveWatchedChannel(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - - // Check database to ensure the live watched channel was created - liveWatchedChannels := client.Live.Query().Where(entLive.HasChannelWith(entChannel.IDEQ(testChannel.ID))).AllX(context.Background()) - assert.Equal(t, 1, len(liveWatchedChannels)) - } -} - -// * TestGetLiveWatchedChannels tests the get watched channels -// Test gets watched channels -func TestGetLiveWatchedChannels(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - twitchService := twitch.NewService() - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - queueService := queue.NewService(&database.Database{Client: client}, vodService, channelService) - archiveService := archive.NewService(&database.Database{Client: client}, twitchService, channelService, vodService, queueService) - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - LiveService: live.NewService(&database.Database{Client: client}, twitchService, archiveService), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a test channel - testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) - - // Create a live watched channel - client.Live.Create().SetChannel(testChannel).SetWatchLive(true).SetWatchVod(true).SetDownloadArchives(true).SetDownloadHighlights(true).SetDownloadUploads(true).SetResolution("best").SetArchiveChat(true).SaveX(context.Background()) - - req := httptest.NewRequest(http.MethodGet, "/api/v1/live", nil) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.GetLiveWatchedChannels(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response []map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - - // Check database to ensure the live watched channel was created - liveWatchedChannels := client.Live.Query().Where(entLive.HasChannelWith(entChannel.IDEQ(testChannel.ID))).AllX(context.Background()) - assert.Equal(t, 1, len(liveWatchedChannels)) - } -} - -// * TestUpdateLiveWatchedChannel tests the update live watched channel -// Test updating a live watched channel -func TestUpdateLiveWatchedChannel(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - twitchService := twitch.NewService() - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - queueService := queue.NewService(&database.Database{Client: client}, vodService, channelService) - archiveService := archive.NewService(&database.Database{Client: client}, twitchService, channelService, vodService, queueService) - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - LiveService: live.NewService(&database.Database{Client: client}, twitchService, archiveService), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a test channel - testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) - - // Create a live watched channel - liveWatchedChannel := client.Live.Create().SetChannel(testChannel).SetWatchLive(true).SetWatchVod(true).SetDownloadArchives(true).SetDownloadHighlights(true).SetDownloadUploads(true).SetResolution("best").SetArchiveChat(true).SaveX(context.Background()) - - // Live watched channel json - liveWatchedChannelJson := `{"channel_id": "` + testChannel.ID.String() + `", "watch_live": false, "watch_vod": false, "download_archives": false, "download_highlights": false, "download_uploads": false, "resolution": "720p60", "archive_chat": false}` - - req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/live/%s", liveWatchedChannel.ID.String()), strings.NewReader(liveWatchedChannelJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - // Set params - c.SetParamNames("id") - c.SetParamValues(liveWatchedChannel.ID.String()) - - if assert.NoError(t, h.UpdateLiveWatchedChannel(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - // Check if the live watched channel was updated, the fields set to false will not be returned - assert.Equal(t, "720p60", response["resolution"]) - } -} - -// * TestDeleteLiveWatchedChannel tests the delete watched channel -// Test deletes a live watched channel -func TestDeleteLiveWatchedChannel(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - twitchService := twitch.NewService() - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - queueService := queue.NewService(&database.Database{Client: client}, vodService, channelService) - archiveService := archive.NewService(&database.Database{Client: client}, twitchService, channelService, vodService, queueService) - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - LiveService: live.NewService(&database.Database{Client: client}, twitchService, archiveService), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a test channel - testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) - - // Create a live watched channel - liveWatchedChannel := client.Live.Create().SetChannel(testChannel).SetWatchLive(true).SetWatchVod(true).SetDownloadArchives(true).SetDownloadHighlights(true).SetDownloadUploads(true).SetResolution("best").SetArchiveChat(true).SaveX(context.Background()) - - req := httptest.NewRequest(http.MethodDelete, "/api/v1/live/"+liveWatchedChannel.ID.String(), nil) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - // Set params - c.SetParamNames("id") - c.SetParamValues(liveWatchedChannel.ID.String()) - - if assert.NoError(t, h.DeleteLiveWatchedChannel(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check if watched channel is deleted from database - liveWatchedChannels := client.Live.Query().Where(entLive.HasChannelWith(entChannel.IDEQ(testChannel.ID))).AllX(context.Background()) - assert.Equal(t, 0, len(liveWatchedChannels)) - - } -} +// import ( +// "context" +// "encoding/json" +// "fmt" +// "net/http" +// "net/http/httptest" +// "strings" +// "testing" + +// "github.com/go-playground/validator/v10" +// "github.com/labstack/echo/v4" +// "github.com/stretchr/testify/assert" +// "github.com/zibbp/ganymede/ent" +// entChannel "github.com/zibbp/ganymede/ent/channel" +// "github.com/zibbp/ganymede/ent/enttest" +// entLive "github.com/zibbp/ganymede/ent/live" +// "github.com/zibbp/ganymede/internal/archive" +// "github.com/zibbp/ganymede/internal/channel" +// "github.com/zibbp/ganymede/internal/database" +// "github.com/zibbp/ganymede/internal/live" +// "github.com/zibbp/ganymede/internal/queue" +// httpHandler "github.com/zibbp/ganymede/internal/transport/http" +// "github.com/zibbp/ganymede/internal/twitch" +// "github.com/zibbp/ganymede/internal/utils" +// "github.com/zibbp/ganymede/internal/vod" +// ) + +// var () + +// // * TestAddLiveWatchedChannel tests the create watched channel +// // Test creates a live watched channel +// func TestAddLiveWatchedChannel(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// twitchService := twitch.NewService() +// vodService := vod.NewService(&database.Database{Client: client}) +// channelService := channel.NewService(&database.Database{Client: client}) +// queueService := queue.NewService(&database.Database{Client: client}, vodService, channelService) +// archiveService := archive.NewService(&database.Database{Client: client}, twitchService, channelService, vodService, queueService) + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// LiveService: live.NewService(&database.Database{Client: client}, twitchService, archiveService), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a test channel +// testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) + +// // Watched channel json +// liveWatchedChannelJson := `{"channel_id": "` + testChannel.ID.String() + `", "watch_live": true, "watch_vod": true, "download_archives": true, "download_highlights": true, "download_uploads": true, "resolution": "best", "archive_chat": true}` + +// req := httptest.NewRequest(http.MethodPost, "/api/v1/live", strings.NewReader(liveWatchedChannelJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.AddLiveWatchedChannel(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) + +// // Check database to ensure the live watched channel was created +// liveWatchedChannels := client.Live.Query().Where(entLive.HasChannelWith(entChannel.IDEQ(testChannel.ID))).AllX(context.Background()) +// assert.Equal(t, 1, len(liveWatchedChannels)) +// } +// } + +// // * TestGetLiveWatchedChannels tests the get watched channels +// // Test gets watched channels +// func TestGetLiveWatchedChannels(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// twitchService := twitch.NewService() +// vodService := vod.NewService(&database.Database{Client: client}) +// channelService := channel.NewService(&database.Database{Client: client}) +// queueService := queue.NewService(&database.Database{Client: client}, vodService, channelService) +// archiveService := archive.NewService(&database.Database{Client: client}, twitchService, channelService, vodService, queueService) + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// LiveService: live.NewService(&database.Database{Client: client}, twitchService, archiveService), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a test channel +// testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) + +// // Create a live watched channel +// client.Live.Create().SetChannel(testChannel).SetWatchLive(true).SetWatchVod(true).SetDownloadArchives(true).SetDownloadHighlights(true).SetDownloadUploads(true).SetResolution("best").SetArchiveChat(true).SaveX(context.Background()) + +// req := httptest.NewRequest(http.MethodGet, "/api/v1/live", nil) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.GetLiveWatchedChannels(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response []map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) + +// // Check database to ensure the live watched channel was created +// liveWatchedChannels := client.Live.Query().Where(entLive.HasChannelWith(entChannel.IDEQ(testChannel.ID))).AllX(context.Background()) +// assert.Equal(t, 1, len(liveWatchedChannels)) +// } +// } + +// // * TestUpdateLiveWatchedChannel tests the update live watched channel +// // Test updating a live watched channel +// func TestUpdateLiveWatchedChannel(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// twitchService := twitch.NewService() +// vodService := vod.NewService(&database.Database{Client: client}) +// channelService := channel.NewService(&database.Database{Client: client}) +// queueService := queue.NewService(&database.Database{Client: client}, vodService, channelService) +// archiveService := archive.NewService(&database.Database{Client: client}, twitchService, channelService, vodService, queueService) + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// LiveService: live.NewService(&database.Database{Client: client}, twitchService, archiveService), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a test channel +// testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) + +// // Create a live watched channel +// liveWatchedChannel := client.Live.Create().SetChannel(testChannel).SetWatchLive(true).SetWatchVod(true).SetDownloadArchives(true).SetDownloadHighlights(true).SetDownloadUploads(true).SetResolution("best").SetArchiveChat(true).SaveX(context.Background()) + +// // Live watched channel json +// liveWatchedChannelJson := `{"channel_id": "` + testChannel.ID.String() + `", "watch_live": false, "watch_vod": false, "download_archives": false, "download_highlights": false, "download_uploads": false, "resolution": "720p60", "archive_chat": false}` + +// req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/live/%s", liveWatchedChannel.ID.String()), strings.NewReader(liveWatchedChannelJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// // Set params +// c.SetParamNames("id") +// c.SetParamValues(liveWatchedChannel.ID.String()) + +// if assert.NoError(t, h.UpdateLiveWatchedChannel(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// // Check if the live watched channel was updated, the fields set to false will not be returned +// assert.Equal(t, "720p60", response["resolution"]) +// } +// } + +// // * TestDeleteLiveWatchedChannel tests the delete watched channel +// // Test deletes a live watched channel +// func TestDeleteLiveWatchedChannel(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// twitchService := twitch.NewService() +// vodService := vod.NewService(&database.Database{Client: client}) +// channelService := channel.NewService(&database.Database{Client: client}) +// queueService := queue.NewService(&database.Database{Client: client}, vodService, channelService) +// archiveService := archive.NewService(&database.Database{Client: client}, twitchService, channelService, vodService, queueService) + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// LiveService: live.NewService(&database.Database{Client: client}, twitchService, archiveService), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a test channel +// testChannel := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").SaveX(context.Background()) + +// // Create a live watched channel +// liveWatchedChannel := client.Live.Create().SetChannel(testChannel).SetWatchLive(true).SetWatchVod(true).SetDownloadArchives(true).SetDownloadHighlights(true).SetDownloadUploads(true).SetResolution("best").SetArchiveChat(true).SaveX(context.Background()) + +// req := httptest.NewRequest(http.MethodDelete, "/api/v1/live/"+liveWatchedChannel.ID.String(), nil) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// // Set params +// c.SetParamNames("id") +// c.SetParamValues(liveWatchedChannel.ID.String()) + +// if assert.NoError(t, h.DeleteLiveWatchedChannel(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check if watched channel is deleted from database +// liveWatchedChannels := client.Live.Query().Where(entLive.HasChannelWith(entChannel.IDEQ(testChannel.ID))).AllX(context.Background()) +// assert.Equal(t, 0, len(liveWatchedChannels)) + +// } +// } diff --git a/internal/transport/http/metrics.go b/internal/transport/http/metrics.go index 2878b4ef..2eaba4fd 100644 --- a/internal/transport/http/metrics.go +++ b/internal/transport/http/metrics.go @@ -1,11 +1,19 @@ package http -import "github.com/prometheus/client_golang/prometheus" +import ( + "github.com/prometheus/client_golang/prometheus" + "github.com/rs/zerolog/log" +) type MetricsService interface { - GatherMetrics() *prometheus.Registry + GatherMetrics() (*prometheus.Registry, error) } -func (h *Handler) GatherMetrics() *prometheus.Registry { - return h.Service.MetricsService.GatherMetrics() +func (h *Handler) GatherMetrics() (*prometheus.Registry, error) { + r, err := h.Service.MetricsService.GatherMetrics() + if err != nil { + log.Error().Err(err).Msg("error gathering metrics") + return nil, err + } + return r, nil } diff --git a/internal/transport/http/playback.go b/internal/transport/http/playback.go index db61fd8a..2ff6d847 100644 --- a/internal/transport/http/playback.go +++ b/internal/transport/http/playback.go @@ -1,6 +1,7 @@ package http import ( + "errors" "fmt" "net/http" "strconv" @@ -86,9 +87,13 @@ func (h *Handler) GetProgress(c echo.Context) error { } playbackEntry, err := h.Service.PlaybackService.GetProgress(cc, vID) if err != nil { + if errors.Is(err, playback.ErrorPlaybackNotFound) { + return ErrorResponse(c, http.StatusOK, "playback not found") + } return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } - return c.JSON(http.StatusOK, playbackEntry) + + return SuccessResponse(c, playbackEntry, fmt.Sprintf("playback data for %s", vID)) } // GetAllProgress godoc diff --git a/internal/transport/http/playlist_test.go b/internal/transport/http/playlist_test.go index 8da31c50..9da6c330 100644 --- a/internal/transport/http/playlist_test.go +++ b/internal/transport/http/playlist_test.go @@ -1,369 +1,369 @@ package http_test -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "strings" - "testing" - - "github.com/go-playground/validator/v10" - "github.com/labstack/echo/v4" - "github.com/stretchr/testify/assert" - "github.com/zibbp/ganymede/ent" - "github.com/zibbp/ganymede/ent/enttest" - entPlaylist "github.com/zibbp/ganymede/ent/playlist" - "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/playlist" - httpHandler "github.com/zibbp/ganymede/internal/transport/http" - "github.com/zibbp/ganymede/internal/utils" -) - -var ( - createPlaylistTestJson = `{ - "name": "test_playlist", - "description": "test_description" - }` -) - -// * TestCreatePlaylist tests the CreatePlaylist function -// Creates a new playlist -func TestCreatePlaylist(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - PlaylistService: playlist.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - req := httptest.NewRequest(http.MethodPost, "/api/v1/playlist", strings.NewReader(createPlaylistTestJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.CreatePlaylist(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test_playlist", response["name"]) - } -} - -// * TestAddVodToPlaylist tests the AddVodToPlaylist function -// Adds a vod to a playlist -func TestAddVodToPlaylist(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - // Create a playlist - dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - PlaylistService: playlist.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - addVodToPlaylistJson := `{ - "vod_id": "` + dbVod.ID.String() + `" - }` - - req := httptest.NewRequest(http.MethodPost, fmt.Sprintf("/api/v1/playlist/%s", dbVod.ID.String()), strings.NewReader(addVodToPlaylistJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbPlaylist.ID.String()) - - if assert.NoError(t, h.AddVodToPlaylist(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - // response will be a string - var response string - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "ok", response) - } -} - -// * TestGetPlaylists tests the GetPlaylists function -// Gets all playlists -func TestGetPlaylists(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - PlaylistService: playlist.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a playlist - _, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodGet, "/api/v1/playlist", nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.GetPlaylists(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response []map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test_playlist", response[0]["name"]) - } -} - -// * TestGetPlaylist tests the GetPlaylist function -// Gets a playlist -func TestGetPlaylist(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - PlaylistService: playlist.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a playlist - dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/playlist/%s", dbPlaylist.ID.String()), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbPlaylist.ID.String()) - - if assert.NoError(t, h.GetPlaylist(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test_playlist", response["name"]) - } -} - -// * TestUpdatePlaylist tests the UpdatePlaylist function -// Update a playlist -func TestUpdatePlaylist(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - PlaylistService: playlist.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a playlist - dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - updatePlaylistJson := `{ - "name": "test_playlist_updated", - "description": "test_description_updated" - }` - - req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/playlist/%s", dbPlaylist.ID.String()), strings.NewReader(updatePlaylistJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbPlaylist.ID.String()) - - if assert.NoError(t, h.UpdatePlaylist(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test_playlist_updated", response["name"]) - } -} - -// * TestDeletePlaylist tests the DeletePlaylist function -// Delete a playlist -func TestDeletePlaylist(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - PlaylistService: playlist.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a playlist - dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/playlist/%s", dbPlaylist.ID.String()), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbPlaylist.ID.String()) - - if assert.NoError(t, h.DeletePlaylist(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check if playlist is deleted - dbPlaylists, err := client.Playlist.Query().All(context.Background()) - assert.NoError(t, err) - assert.Equal(t, 0, len(dbPlaylists)) - } -} - -// * TestDeleteVodFromPlaylist tests the DeleteVodFromPlaylist function -// Delete a vod from a playlist -func TestDeleteVodFromPlaylist(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - // Create a playlist - dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Add vod to playlist - _, err = client.Playlist.UpdateOne(dbPlaylist).AddVods(dbVod).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - PlaylistService: playlist.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - deletVodFromPlaylistJson := `{ - "vod_id": "` + dbVod.ID.String() + `" - }` - - req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/playlist/%s", dbPlaylist.ID.String()), strings.NewReader(deletVodFromPlaylistJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbPlaylist.ID.String()) - - if assert.NoError(t, h.DeleteVodFromPlaylist(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - // response will be a string - var response string - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "ok", response) - - // Check if vod is deleted from playlist - dbPlaylist, err := client.Playlist.Query().Where(entPlaylist.ID(dbPlaylist.ID)).Only(context.Background()) - assert.NoError(t, err) - assert.Equal(t, 0, len(dbPlaylist.Edges.Vods)) - } -} +// import ( +// "context" +// "encoding/json" +// "fmt" +// "net/http" +// "net/http/httptest" +// "strings" +// "testing" + +// "github.com/go-playground/validator/v10" +// "github.com/labstack/echo/v4" +// "github.com/stretchr/testify/assert" +// "github.com/zibbp/ganymede/ent" +// "github.com/zibbp/ganymede/ent/enttest" +// entPlaylist "github.com/zibbp/ganymede/ent/playlist" +// "github.com/zibbp/ganymede/internal/database" +// "github.com/zibbp/ganymede/internal/playlist" +// httpHandler "github.com/zibbp/ganymede/internal/transport/http" +// "github.com/zibbp/ganymede/internal/utils" +// ) + +// var ( +// createPlaylistTestJson = `{ +// "name": "test_playlist", +// "description": "test_description" +// }` +// ) + +// // * TestCreatePlaylist tests the CreatePlaylist function +// // Creates a new playlist +// func TestCreatePlaylist(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// PlaylistService: playlist.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// req := httptest.NewRequest(http.MethodPost, "/api/v1/playlist", strings.NewReader(createPlaylistTestJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.CreatePlaylist(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "test_playlist", response["name"]) +// } +// } + +// // * TestAddVodToPlaylist tests the AddVodToPlaylist function +// // Adds a vod to a playlist +// func TestAddVodToPlaylist(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// // Create a playlist +// dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// PlaylistService: playlist.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// addVodToPlaylistJson := `{ +// "vod_id": "` + dbVod.ID.String() + `" +// }` + +// req := httptest.NewRequest(http.MethodPost, fmt.Sprintf("/api/v1/playlist/%s", dbVod.ID.String()), strings.NewReader(addVodToPlaylistJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbPlaylist.ID.String()) + +// if assert.NoError(t, h.AddVodToPlaylist(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// // response will be a string +// var response string +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "ok", response) +// } +// } + +// // * TestGetPlaylists tests the GetPlaylists function +// // Gets all playlists +// func TestGetPlaylists(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// PlaylistService: playlist.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a playlist +// _, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodGet, "/api/v1/playlist", nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.GetPlaylists(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response []map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "test_playlist", response[0]["name"]) +// } +// } + +// // * TestGetPlaylist tests the GetPlaylist function +// // Gets a playlist +// func TestGetPlaylist(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// PlaylistService: playlist.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a playlist +// dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/playlist/%s", dbPlaylist.ID.String()), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbPlaylist.ID.String()) + +// if assert.NoError(t, h.GetPlaylist(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "test_playlist", response["name"]) +// } +// } + +// // * TestUpdatePlaylist tests the UpdatePlaylist function +// // Update a playlist +// func TestUpdatePlaylist(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// PlaylistService: playlist.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a playlist +// dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// updatePlaylistJson := `{ +// "name": "test_playlist_updated", +// "description": "test_description_updated" +// }` + +// req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/playlist/%s", dbPlaylist.ID.String()), strings.NewReader(updatePlaylistJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbPlaylist.ID.String()) + +// if assert.NoError(t, h.UpdatePlaylist(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "test_playlist_updated", response["name"]) +// } +// } + +// // * TestDeletePlaylist tests the DeletePlaylist function +// // Delete a playlist +// func TestDeletePlaylist(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// PlaylistService: playlist.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a playlist +// dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/playlist/%s", dbPlaylist.ID.String()), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbPlaylist.ID.String()) + +// if assert.NoError(t, h.DeletePlaylist(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check if playlist is deleted +// dbPlaylists, err := client.Playlist.Query().All(context.Background()) +// assert.NoError(t, err) +// assert.Equal(t, 0, len(dbPlaylists)) +// } +// } + +// // * TestDeleteVodFromPlaylist tests the DeleteVodFromPlaylist function +// // Delete a vod from a playlist +// func TestDeleteVodFromPlaylist(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// // Create a playlist +// dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("test_description").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Add vod to playlist +// _, err = client.Playlist.UpdateOne(dbPlaylist).AddVods(dbVod).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// PlaylistService: playlist.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// deletVodFromPlaylistJson := `{ +// "vod_id": "` + dbVod.ID.String() + `" +// }` + +// req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/playlist/%s", dbPlaylist.ID.String()), strings.NewReader(deletVodFromPlaylistJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbPlaylist.ID.String()) + +// if assert.NoError(t, h.DeleteVodFromPlaylist(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// // response will be a string +// var response string +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "ok", response) + +// // Check if vod is deleted from playlist +// dbPlaylist, err := client.Playlist.Query().Where(entPlaylist.ID(dbPlaylist.ID)).Only(context.Background()) +// assert.NoError(t, err) +// assert.Equal(t, 0, len(dbPlaylist.Edges.Vods)) +// } +// } diff --git a/internal/transport/http/queue.go b/internal/transport/http/queue.go index 01e7095c..cca5aa09 100644 --- a/internal/transport/http/queue.go +++ b/internal/transport/http/queue.go @@ -1,10 +1,12 @@ package http import ( + "context" "net/http" "github.com/google/uuid" "github.com/labstack/echo/v4" + "github.com/riverqueue/river/rivertype" "github.com/zibbp/ganymede/ent" "github.com/zibbp/ganymede/internal/queue" "github.com/zibbp/ganymede/internal/utils" @@ -18,13 +20,20 @@ type QueueService interface { UpdateQueueItem(queueDto queue.Queue, id uuid.UUID) (*ent.Queue, error) DeleteQueueItem(c echo.Context, id uuid.UUID) error ReadLogFile(c echo.Context, id uuid.UUID, logType string) ([]byte, error) - StopQueueItem(c echo.Context, id uuid.UUID) error + StopQueueItem(ctx context.Context, id uuid.UUID) error + StartQueueTask(ctx context.Context, input queue.StartQueueTaskInput) (*rivertype.JobRow, error) } type CreateQueueRequest struct { VodID string `json:"vod_id" validate:"required"` } +type StartQueueTaskRequest struct { + QueueId uuid.UUID `json:"queue_id" validate:"required,uuid4"` + TaskName string `json:"task_name" validate:"required,oneof=task_vod_create_folder task_vod_download_thumbnail task_vod_save_info task_video_download task_video_convert task_video_move task_chat_download task_chat_convert task_chat_render task_chat_move task_live_chat_download task_live_video_download"` + Continue bool `json:"continue"` +} + type UpdateQueueRequest struct { ID uuid.UUID `json:"id"` LiveArchive bool `json:"live_archive"` @@ -255,6 +264,19 @@ func (h *Handler) ReadQueueLogFile(c echo.Context) error { return c.JSON(http.StatusOK, string(log)) } +// StopQueueItem godoc +// +// @Summary Stop a queue item +// @Description Stop processing the video and chat downloads of an active queue item +// @Tags queue +// @Accept json +// @Produce json +// @Param id path string true "Queue item id" +// @Success 200 {object} string +// @Failure 400 {object} utils.ErrorResponse +// @Failure 500 {object} utils.ErrorResponse +// @Router /queue/{id}/stop [post] +// @Security ApiKeyCookieAuth func (h *Handler) StopQueueItem(c echo.Context) error { id := c.Param("id") @@ -263,9 +285,43 @@ func (h *Handler) StopQueueItem(c echo.Context) error { return echo.NewHTTPError(http.StatusBadRequest, "invalid id") } - err = h.Service.QueueService.StopQueueItem(c, uuid) + err = h.Service.QueueService.StopQueueItem(c.Request().Context(), uuid) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } return c.NoContent(http.StatusNoContent) } + +// StartQueueTask godoc +// +// @Summary Start a queue task for a queue +// @Description Start a specific queue task +// @Tags queue +// @Accept json +// @Produce json +// @Success 200 {object} string +// @Failure 400 {object} utils.ErrorResponse +// @Failure 500 {object} utils.ErrorResponse +// @Router /queue/task/start [post] +// @Security ApiKeyCookieAuth +func (h *Handler) StartQueueTask(c echo.Context) error { + body := new(StartQueueTaskRequest) + if err := c.Bind(body); err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + if err := c.Validate(body); err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + + _, err := h.Service.QueueService.StartQueueTask(c.Request().Context(), queue.StartQueueTaskInput{ + QueueId: body.QueueId, + TaskName: body.TaskName, + Continue: body.Continue, + }) + + if err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + + return c.NoContent(http.StatusOK) +} diff --git a/internal/transport/http/queue_test.go b/internal/transport/http/queue_test.go index 52613556..e0e3a783 100644 --- a/internal/transport/http/queue_test.go +++ b/internal/transport/http/queue_test.go @@ -1,410 +1,410 @@ package http_test -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "os" - "strings" - "testing" - - "github.com/go-playground/validator/v10" - "github.com/labstack/echo/v4" - "github.com/stretchr/testify/assert" - "github.com/zibbp/ganymede/ent" - "github.com/zibbp/ganymede/ent/enttest" - entQueue "github.com/zibbp/ganymede/ent/queue" - entVod "github.com/zibbp/ganymede/ent/vod" - "github.com/zibbp/ganymede/internal/channel" - "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/queue" - httpHandler "github.com/zibbp/ganymede/internal/transport/http" - "github.com/zibbp/ganymede/internal/utils" - "github.com/zibbp/ganymede/internal/vod" -) - -// * TestCreateQueueItem tests the CreateQueueItem function -// Creates a new queue item -func TestCreateQueueItem(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - createQueueItemJson := `{ - "vod_id": "` + dbVod.ID.String() + `" - }` - - req := httptest.NewRequest(http.MethodPost, "/api/v1/queue", strings.NewReader(createQueueItemJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.CreateQueueItem(c)) { - assert.Equal(t, http.StatusCreated, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - - /// Check if the queue item was created - queueItem, err := client.Queue.Query().Where(entQueue.HasVodWith(entVod.ID(dbVod.ID))).WithVod().Only(context.Background()) - assert.NoError(t, err) - assert.Equal(t, dbVod.ID, queueItem.Edges.Vod.ID) - - } -} - -// * TestGetQueueItems tests the GetQueueItems function -// Gets all queue items -func TestGetQueueItems(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a queue item - dbQueue, err := client.Queue.Create().SetVod(dbVod).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodGet, "/api/v1/queue", nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.GetQueueItems(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response []map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, 1, len(response)) - assert.Equal(t, dbQueue.ID.String(), response[0]["id"]) - - } -} - -// * TestGetQueueItem tests the GetQueueItem function -// Gets all queue items -func TestGetQueueItem(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a queue item - dbQueue, err := client.Queue.Create().SetVod(dbVod).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/queue/%s", dbQueue.ID.String()), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbQueue.ID.String()) - - if assert.NoError(t, h.GetQueueItem(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, dbQueue.ID.String(), response["id"]) - - } -} - -// * TestUpdateQueueItem tests the UpdateQueueItem function -// Updates a queue item -func TestUpdateQueueItem(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a queue item - dbQueue, err := client.Queue.Create().SetVod(dbVod).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - updateQueueItemJson := `{ - "processing": false, - "task_vod_create_folder": "success", - "task_vod_download_thumbnail": "success", - "task_vod_save_info": "success", - "task_video_download": "success", - "task_video_move": "success", - "task_chat_download": "success", - "task_chat_render": "success", - "task_chat_move": "success", - "task_video_convert": "success", - "task_chat_convert": "success" - }` - - req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/queue/%s", dbQueue.ID.String()), strings.NewReader(updateQueueItemJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbQueue.ID.String()) - - if assert.NoError(t, h.UpdateQueueItem(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "success", response["task_vod_create_folder"]) - - } -} - -// * TestDeleteQueueItem tests the DeleteQueueItem function -// Deletes a queue item -func TestDeleteQueueItem(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a queue item - dbQueue, err := client.Queue.Create().SetVod(dbVod).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/queue/%s", dbQueue.ID.String()), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbQueue.ID.String()) - - if assert.NoError(t, h.DeleteQueueItem(c)) { - assert.Equal(t, http.StatusNoContent, rec.Code) - - // Check if queue item was deleted - queueItem, err := client.Queue.Get(context.Background(), dbQueue.ID) - assert.Error(t, err) - assert.Nil(t, queueItem) - - } -} - -// * TestReadQueueLogFile tests the ReadQueueLogFile function -// Deletes a queue item -func TestReadQueueLogFile(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - vodService := vod.NewService(&database.Database{Client: client}) - channelService := channel.NewService(&database.Database{Client: client}) - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a queue item - dbQueue, err := client.Queue.Create().SetVod(dbVod).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create log folder - err = os.MkdirAll("/logs", 0755) - if err != nil { - t.Fatal(err) - } - - // Create log file - logFile, err := os.Create(fmt.Sprintf("/logs/%s-%s.log", dbVod.ID.String(), "video")) - if err != nil { - t.Fatal(err) - } - - // Write to log file - _, err = logFile.WriteString("test log") - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/queue/%s/tail?type=%s", dbQueue.ID.String(), "video"), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id", "type") - c.SetParamValues(dbQueue.ID.String(), "video") - - if assert.NoError(t, h.ReadQueueLogFile(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response string - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test log", response) - - } -} +// import ( +// "context" +// "encoding/json" +// "fmt" +// "net/http" +// "net/http/httptest" +// "os" +// "strings" +// "testing" + +// "github.com/go-playground/validator/v10" +// "github.com/labstack/echo/v4" +// "github.com/stretchr/testify/assert" +// "github.com/zibbp/ganymede/ent" +// "github.com/zibbp/ganymede/ent/enttest" +// entQueue "github.com/zibbp/ganymede/ent/queue" +// entVod "github.com/zibbp/ganymede/ent/vod" +// "github.com/zibbp/ganymede/internal/channel" +// "github.com/zibbp/ganymede/internal/database" +// "github.com/zibbp/ganymede/internal/queue" +// httpHandler "github.com/zibbp/ganymede/internal/transport/http" +// "github.com/zibbp/ganymede/internal/utils" +// "github.com/zibbp/ganymede/internal/vod" +// ) + +// // * TestCreateQueueItem tests the CreateQueueItem function +// // Creates a new queue item +// func TestCreateQueueItem(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// vodService := vod.NewService(&database.Database{Client: client}) +// channelService := channel.NewService(&database.Database{Client: client}) + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// createQueueItemJson := `{ +// "vod_id": "` + dbVod.ID.String() + `" +// }` + +// req := httptest.NewRequest(http.MethodPost, "/api/v1/queue", strings.NewReader(createQueueItemJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.CreateQueueItem(c)) { +// assert.Equal(t, http.StatusCreated, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) + +// /// Check if the queue item was created +// queueItem, err := client.Queue.Query().Where(entQueue.HasVodWith(entVod.ID(dbVod.ID))).WithVod().Only(context.Background()) +// assert.NoError(t, err) +// assert.Equal(t, dbVod.ID, queueItem.Edges.Vod.ID) + +// } +// } + +// // * TestGetQueueItems tests the GetQueueItems function +// // Gets all queue items +// func TestGetQueueItems(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// vodService := vod.NewService(&database.Database{Client: client}) +// channelService := channel.NewService(&database.Database{Client: client}) + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a queue item +// dbQueue, err := client.Queue.Create().SetVod(dbVod).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodGet, "/api/v1/queue", nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.GetQueueItems(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response []map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, 1, len(response)) +// assert.Equal(t, dbQueue.ID.String(), response[0]["id"]) + +// } +// } + +// // * TestGetQueueItem tests the GetQueueItem function +// // Gets all queue items +// func TestGetQueueItem(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// vodService := vod.NewService(&database.Database{Client: client}) +// channelService := channel.NewService(&database.Database{Client: client}) + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a queue item +// dbQueue, err := client.Queue.Create().SetVod(dbVod).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/queue/%s", dbQueue.ID.String()), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbQueue.ID.String()) + +// if assert.NoError(t, h.GetQueueItem(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, dbQueue.ID.String(), response["id"]) + +// } +// } + +// // * TestUpdateQueueItem tests the UpdateQueueItem function +// // Updates a queue item +// func TestUpdateQueueItem(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// vodService := vod.NewService(&database.Database{Client: client}) +// channelService := channel.NewService(&database.Database{Client: client}) + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a queue item +// dbQueue, err := client.Queue.Create().SetVod(dbVod).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// updateQueueItemJson := `{ +// "processing": false, +// "task_vod_create_folder": "success", +// "task_vod_download_thumbnail": "success", +// "task_vod_save_info": "success", +// "task_video_download": "success", +// "task_video_move": "success", +// "task_chat_download": "success", +// "task_chat_render": "success", +// "task_chat_move": "success", +// "task_video_convert": "success", +// "task_chat_convert": "success" +// }` + +// req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/queue/%s", dbQueue.ID.String()), strings.NewReader(updateQueueItemJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbQueue.ID.String()) + +// if assert.NoError(t, h.UpdateQueueItem(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "success", response["task_vod_create_folder"]) + +// } +// } + +// // * TestDeleteQueueItem tests the DeleteQueueItem function +// // Deletes a queue item +// func TestDeleteQueueItem(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// vodService := vod.NewService(&database.Database{Client: client}) +// channelService := channel.NewService(&database.Database{Client: client}) + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a queue item +// dbQueue, err := client.Queue.Create().SetVod(dbVod).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/queue/%s", dbQueue.ID.String()), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbQueue.ID.String()) + +// if assert.NoError(t, h.DeleteQueueItem(c)) { +// assert.Equal(t, http.StatusNoContent, rec.Code) + +// // Check if queue item was deleted +// queueItem, err := client.Queue.Get(context.Background(), dbQueue.ID) +// assert.Error(t, err) +// assert.Nil(t, queueItem) + +// } +// } + +// // * TestReadQueueLogFile tests the ReadQueueLogFile function +// // Deletes a queue item +// func TestReadQueueLogFile(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// vodService := vod.NewService(&database.Database{Client: client}) +// channelService := channel.NewService(&database.Database{Client: client}) + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// QueueService: queue.NewService(&database.Database{Client: client}, vodService, channelService), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetTitle("test vod").SetExtID("123").SetWebThumbnailPath("").SetVideoPath("").SetChannel(dbChannel).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a queue item +// dbQueue, err := client.Queue.Create().SetVod(dbVod).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create log folder +// err = os.MkdirAll("/logs", 0755) +// if err != nil { +// t.Fatal(err) +// } + +// // Create log file +// logFile, err := os.Create(fmt.Sprintf("/logs/%s-%s.log", dbVod.ID.String(), "video")) +// if err != nil { +// t.Fatal(err) +// } + +// // Write to log file +// _, err = logFile.WriteString("test log") +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/queue/%s/tail?type=%s", dbQueue.ID.String(), "video"), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id", "type") +// c.SetParamValues(dbQueue.ID.String(), "video") + +// if assert.NoError(t, h.ReadQueueLogFile(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response string +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "test log", response) + +// } +// } diff --git a/internal/transport/http/response.go b/internal/transport/http/response.go new file mode 100644 index 00000000..0f556de9 --- /dev/null +++ b/internal/transport/http/response.go @@ -0,0 +1,29 @@ +package http + +import ( + "net/http" + + "github.com/labstack/echo/v4" +) + +type Response struct { + Success bool `json:"success"` + Data interface{} `json:"data"` + Message string `json:"message"` +} + +func SuccessResponse(c echo.Context, data interface{}, message string) error { + return c.JSON(http.StatusOK, Response{ + Success: true, + Data: data, + Message: message, + }) +} + +func ErrorResponse(c echo.Context, statusCode int, message string) error { + return c.JSON(statusCode, Response{ + Success: false, + Data: nil, + Message: message, + }) +} diff --git a/internal/transport/http/scheduler.go b/internal/transport/http/scheduler.go index 2e1f03f6..1b0fd20f 100644 --- a/internal/transport/http/scheduler.go +++ b/internal/transport/http/scheduler.go @@ -1,10 +1,5 @@ package http type SchedulerService interface { - StartAppScheduler() StartLiveScheduler() - StartJwksScheduler() - StartWatchVideoScheduler() - StartTwitchCategoriesScheduler() - StartPruneVideoScheduler() } diff --git a/internal/transport/http/task.go b/internal/transport/http/task.go index 51bc2f5b..f5b74039 100644 --- a/internal/transport/http/task.go +++ b/internal/transport/http/task.go @@ -1,17 +1,18 @@ package http import ( + "context" "net/http" "github.com/labstack/echo/v4" ) type TaskService interface { - StartTask(c echo.Context, task string) error + StartTask(ctx context.Context, task string) error } type StartTaskRequest struct { - Task string `json:"task" validate:"required,oneof=check_live check_vod get_jwks twitch_auth storage_migration prune_videos"` + Task string `json:"task" validate:"required,oneof=check_live check_vod get_jwks storage_migration prune_videos save_chapters update_stream_vod_ids"` } // StartTask godoc @@ -34,7 +35,7 @@ func (h *Handler) StartTask(c echo.Context) error { if err := c.Validate(str); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - if err := h.Service.TaskService.StartTask(c, str.Task); err != nil { + if err := h.Service.TaskService.StartTask(c.Request().Context(), str.Task); err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } return c.NoContent(http.StatusOK) diff --git a/internal/transport/http/twitch.go b/internal/transport/http/twitch.go index 1fb5d809..63fce664 100644 --- a/internal/transport/http/twitch.go +++ b/internal/transport/http/twitch.go @@ -4,16 +4,15 @@ import ( "net/http" "github.com/labstack/echo/v4" - "github.com/zibbp/ganymede/ent" - "github.com/zibbp/ganymede/internal/twitch" + "github.com/zibbp/ganymede/internal/platform" ) type TwitchService interface { - GetVodByID(id string) (twitch.Vod, error) - GetCategories() ([]*ent.TwitchCategory, error) + GetTwitchVideo(id string) (platform.VideoInfo, error) + GetTwitchChannel(name string) (platform.ChannelInfo, error) } -// GetTwitchUser godoc +// GetTwitchChannel godoc // // @Summary Get a twitch channel // @Description Get a twitch user/channel by name (uses twitch api) @@ -25,19 +24,19 @@ type TwitchService interface { // @Failure 400 {object} utils.ErrorResponse // @Failure 500 {object} utils.ErrorResponse // @Router /twitch/channel [get] -func (h *Handler) GetTwitchUser(c echo.Context) error { +func (h *Handler) GetTwitchChannel(c echo.Context) error { name := c.QueryParam("name") if name == "" { return echo.NewHTTPError(http.StatusBadRequest, "channel name query param is required") } - channel, err := twitch.API.GetUserByLogin(name) + channel, err := h.Service.PlatformTwitch.GetChannel(c.Request().Context(), name) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } return c.JSON(http.StatusOK, channel) } -// GetTwitchVod godoc +// GetTwitchVideo godoc // // @Summary Get a twitch vod // @Description Get a twitch vod by id (uses twitch api) @@ -48,13 +47,13 @@ func (h *Handler) GetTwitchUser(c echo.Context) error { // @Success 200 {object} twitch.Vod // @Failure 400 {object} utils.ErrorResponse // @Failure 500 {object} utils.ErrorResponse -// @Router /twitch/vod [get] -func (h *Handler) GetTwitchVod(c echo.Context) error { +// @Router /twitch/video [get] +func (h *Handler) GetTwitchVideo(c echo.Context) error { vodID := c.QueryParam("id") if vodID == "" { return echo.NewHTTPError(http.StatusBadRequest, "id query param is required") } - vod, err := h.Service.TwitchService.GetVodByID(vodID) + vod, err := h.Service.PlatformTwitch.GetVideo(c.Request().Context(), vodID, true, true) if err != nil { if err.Error() == "vod not found" { return echo.NewHTTPError(http.StatusNotFound, err.Error()) @@ -63,46 +62,3 @@ func (h *Handler) GetTwitchVod(c echo.Context) error { } return c.JSON(http.StatusOK, vod) } - -// GQLGetTwitchVideo godoc -// -// @Summary Get a twitch video -// @Description Get a twitch video by id (uses twitch graphql api) -// @Tags twitch -// @Accept json -// @Produce json -// @Param id query string true "Twitch video id" -// @Success 200 {object} twitch.Video -// @Failure 400 {object} utils.ErrorResponse -// @Failure 500 {object} utils.ErrorResponse -// @Router /twitch/gql/video [get] -func (h *Handler) GQLGetTwitchVideo(c echo.Context) error { - videoID := c.QueryParam("id") - if videoID == "" { - return echo.NewHTTPError(http.StatusBadRequest, "id query param is required") - } - video, err := twitch.GQLGetVideo(videoID) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - - return c.JSON(http.StatusOK, video) -} - -// GetTwitchCategories godoc -// -// @Summary Get a list of twitch categories -// @Description Get a list of twitch categories -// @Tags twitch -// @Accept json -// @Produce json -// @Success 200 {object} twitch.Category -// @Failure 500 {object} utils.ErrorResponse -// @Router /twitch/categories [get] -func (h *Handler) GetTwitchCategories(c echo.Context) error { - categories, err := h.Service.TwitchService.GetCategories() - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, categories) -} diff --git a/internal/transport/http/user_test.go b/internal/transport/http/user_test.go index f859327f..f3fc865c 100644 --- a/internal/transport/http/user_test.go +++ b/internal/transport/http/user_test.go @@ -1,192 +1,192 @@ package http_test -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "strings" - "testing" - - "github.com/go-playground/validator/v10" - "github.com/labstack/echo/v4" - "github.com/stretchr/testify/assert" - "github.com/zibbp/ganymede/ent" - "github.com/zibbp/ganymede/ent/enttest" - "github.com/zibbp/ganymede/internal/database" - httpHandler "github.com/zibbp/ganymede/internal/transport/http" - "github.com/zibbp/ganymede/internal/user" - "github.com/zibbp/ganymede/internal/utils" -) - -// * TestGetUsers tests the GetUsers function -// Gets all users -func TestGetUsers(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - UserService: user.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a user - dbUser, err := client.User.Create().SetUsername("test").SetPassword("test").Save(context.Background()) - assert.NoError(t, err) - - req := httptest.NewRequest(http.MethodGet, "/api/v1/user", nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.GetUsers(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response []map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, 1, len(response)) - assert.Equal(t, dbUser.ID.String(), response[0]["id"]) - - } -} - -// * TestGetUser tests the GetUser function -// Gets a user by id -func TestGetUser(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - UserService: user.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a user - dbUser, err := client.User.Create().SetUsername("test").SetPassword("test").Save(context.Background()) - assert.NoError(t, err) - - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/user/%s", dbUser.ID.String()), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbUser.ID.String()) - - if assert.NoError(t, h.GetUser(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, dbUser.ID.String(), response["id"]) - - } -} - -// * TestUpdateUser tests the UpdateUser function -// Update a user -func TestUpdateUser(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - UserService: user.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a user - dbUser, err := client.User.Create().SetUsername("test").SetPassword("test").Save(context.Background()) - assert.NoError(t, err) - - updateUserJson := `{ - "username": "test2", - "role": "admin" - }` - - req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/user/%s", dbUser.ID.String()), strings.NewReader(updateUserJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbUser.ID.String()) - - if assert.NoError(t, h.UpdateUser(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "test2", response["username"]) - - } -} - -// * TestDeleteUser tests the DeleteUser function -// Delete a user -func TestDeleteUser(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - UserService: user.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a user - dbUser, err := client.User.Create().SetUsername("test").SetPassword("test").Save(context.Background()) - assert.NoError(t, err) - - req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/user/%s", dbUser.ID.String()), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbUser.ID.String()) - - if assert.NoError(t, h.DeleteUser(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check if user is deleted - user, err := client.User.Get(context.Background(), dbUser.ID) - assert.Error(t, err) - assert.Nil(t, user) - - } -} +// import ( +// "context" +// "encoding/json" +// "fmt" +// "net/http" +// "net/http/httptest" +// "strings" +// "testing" + +// "github.com/go-playground/validator/v10" +// "github.com/labstack/echo/v4" +// "github.com/stretchr/testify/assert" +// "github.com/zibbp/ganymede/ent" +// "github.com/zibbp/ganymede/ent/enttest" +// "github.com/zibbp/ganymede/internal/database" +// httpHandler "github.com/zibbp/ganymede/internal/transport/http" +// "github.com/zibbp/ganymede/internal/user" +// "github.com/zibbp/ganymede/internal/utils" +// ) + +// // * TestGetUsers tests the GetUsers function +// // Gets all users +// func TestGetUsers(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// UserService: user.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a user +// dbUser, err := client.User.Create().SetUsername("test").SetPassword("test").Save(context.Background()) +// assert.NoError(t, err) + +// req := httptest.NewRequest(http.MethodGet, "/api/v1/user", nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.GetUsers(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response []map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, 1, len(response)) +// assert.Equal(t, dbUser.ID.String(), response[0]["id"]) + +// } +// } + +// // * TestGetUser tests the GetUser function +// // Gets a user by id +// func TestGetUser(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// UserService: user.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a user +// dbUser, err := client.User.Create().SetUsername("test").SetPassword("test").Save(context.Background()) +// assert.NoError(t, err) + +// req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/user/%s", dbUser.ID.String()), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbUser.ID.String()) + +// if assert.NoError(t, h.GetUser(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, dbUser.ID.String(), response["id"]) + +// } +// } + +// // * TestUpdateUser tests the UpdateUser function +// // Update a user +// func TestUpdateUser(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// UserService: user.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a user +// dbUser, err := client.User.Create().SetUsername("test").SetPassword("test").Save(context.Background()) +// assert.NoError(t, err) + +// updateUserJson := `{ +// "username": "test2", +// "role": "admin" +// }` + +// req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/user/%s", dbUser.ID.String()), strings.NewReader(updateUserJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbUser.ID.String()) + +// if assert.NoError(t, h.UpdateUser(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "test2", response["username"]) + +// } +// } + +// // * TestDeleteUser tests the DeleteUser function +// // Delete a user +// func TestDeleteUser(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// UserService: user.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a user +// dbUser, err := client.User.Create().SetUsername("test").SetPassword("test").Save(context.Background()) +// assert.NoError(t, err) + +// req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/user/%s", dbUser.ID.String()), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbUser.ID.String()) + +// if assert.NoError(t, h.DeleteUser(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check if user is deleted +// user, err := client.User.Get(context.Background(), dbUser.ID) +// assert.Error(t, err) +// assert.Nil(t, user) + +// } +// } diff --git a/internal/transport/http/vod.go b/internal/transport/http/vod.go index 0a951a49..a12e2f15 100644 --- a/internal/transport/http/vod.go +++ b/internal/transport/http/vod.go @@ -1,6 +1,7 @@ package http import ( + "context" "fmt" "net/http" "strconv" @@ -9,8 +10,10 @@ import ( "github.com/google/uuid" "github.com/labstack/echo/v4" + "github.com/riverqueue/river/rivertype" "github.com/zibbp/ganymede/ent" "github.com/zibbp/ganymede/internal/chat" + "github.com/zibbp/ganymede/internal/platform" "github.com/zibbp/ganymede/internal/utils" "github.com/zibbp/ganymede/internal/vod" ) @@ -27,32 +30,33 @@ type VodService interface { GetVodsPagination(c echo.Context, limit int, offset int, channelId uuid.UUID, types []utils.VodType) (vod.Pagination, error) GetVodChatComments(c echo.Context, vodID uuid.UUID, start float64, end float64) (*[]chat.Comment, error) GetUserIdFromChat(c echo.Context, vodID uuid.UUID) (*int64, error) - GetVodChatEmotes(c echo.Context, vodID uuid.UUID) (*chat.GanymedeEmotes, error) - GetVodChatBadges(c echo.Context, vodID uuid.UUID) (*chat.GanymedeBadges, error) + GetChatEmotes(ctx context.Context, vodID uuid.UUID) (*platform.Emotes, error) + GetChatBadges(ctx context.Context, vodID uuid.UUID) (*platform.Badges, error) GetNumberOfVodChatCommentsFromTime(c echo.Context, vodID uuid.UUID, start float64, commentCount int64) (*[]chat.Comment, error) LockVod(c echo.Context, vID uuid.UUID, status bool) error + GenerateStaticThumbnail(ctx context.Context, videoID uuid.UUID) (*rivertype.JobInsertResult, error) } type CreateVodRequest struct { - ID string `json:"id"` - ChannelID string `json:"channel_id" validate:"required"` - ExtID string `json:"ext_id" validate:"min=1"` - Platform utils.VodPlatform `json:"platform" validate:"required,oneof=twitch youtube"` - Type utils.VodType `json:"type" validate:"required,oneof=archive live highlight upload clip"` - Title string `json:"title" validate:"required,min=1"` - Duration int `json:"duration" validate:"required"` - Views int `json:"views" validate:"required"` - Resolution string `json:"resolution"` - Processing bool `json:"processing"` - ThumbnailPath string `json:"thumbnail_path"` - WebThumbnailPath string `json:"web_thumbnail_path" validate:"required,min=1"` - VideoPath string `json:"video_path" validate:"required,min=1"` - ChatPath string `json:"chat_path"` - ChatVideoPath string `json:"chat_video_path"` - InfoPath string `json:"info_path"` - CaptionPath string `json:"caption_path"` - StreamedAt string `json:"streamed_at" validate:"required"` - Locked bool `json:"locked"` + ID string `json:"id"` + ChannelID string `json:"channel_id" validate:"required"` + ExtID string `json:"ext_id" validate:"min=1"` + Platform utils.VideoPlatform `json:"platform" validate:"required,oneof=twitch youtube"` + Type utils.VodType `json:"type" validate:"required,oneof=archive live highlight upload clip"` + Title string `json:"title" validate:"required,min=1"` + Duration int `json:"duration" validate:"required"` + Views int `json:"views" validate:"required"` + Resolution string `json:"resolution"` + Processing bool `json:"processing"` + ThumbnailPath string `json:"thumbnail_path"` + WebThumbnailPath string `json:"web_thumbnail_path" validate:"required,min=1"` + VideoPath string `json:"video_path" validate:"required,min=1"` + ChatPath string `json:"chat_path"` + ChatVideoPath string `json:"chat_video_path"` + InfoPath string `json:"info_path"` + CaptionPath string `json:"caption_path"` + StreamedAt string `json:"streamed_at" validate:"required"` + Locked bool `json:"locked"` } // CreateVod godoc @@ -485,7 +489,7 @@ func (h *Handler) GetVodChatComments(c echo.Context) error { return c.JSON(http.StatusOK, v) } -// GetVodChatEmotes godoc +// GetChatEmotes godoc // // @Summary Get vod chat emotes // @Description Get vod chat emotes @@ -498,13 +502,13 @@ func (h *Handler) GetVodChatComments(c echo.Context) error { // @Failure 404 {object} utils.ErrorResponse // @Failure 500 {object} utils.ErrorResponse // @Router /vod/{id}/chat/emotes [get] -func (h *Handler) GetVodChatEmotes(c echo.Context) error { +func (h *Handler) GetChatEmotes(c echo.Context) error { vID, err := uuid.Parse(c.Param("id")) if err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - emotes, err := h.Service.VodService.GetVodChatEmotes(c, vID) + emotes, err := h.Service.VodService.GetChatEmotes(c.Request().Context(), vID) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } @@ -512,7 +516,7 @@ func (h *Handler) GetVodChatEmotes(c echo.Context) error { return c.JSON(http.StatusOK, emotes) } -// GetVodChatBadges godoc +// GetChatBadges godoc // // @Summary Get vod chat badges // @Description Get vod chat badges @@ -525,13 +529,13 @@ func (h *Handler) GetVodChatEmotes(c echo.Context) error { // @Failure 404 {object} utils.ErrorResponse // @Failure 500 {object} utils.ErrorResponse // @Router /vod/{id}/chat/badges [get] -func (h *Handler) GetVodChatBadges(c echo.Context) error { +func (h *Handler) GetChatBadges(c echo.Context) error { vID, err := uuid.Parse(c.Param("id")) if err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - badges, err := h.Service.VodService.GetVodChatBadges(c, vID) + badges, err := h.Service.VodService.GetChatBadges(c.Request().Context(), vID) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } @@ -597,3 +601,15 @@ func (h *Handler) LockVod(c echo.Context) error { } return c.JSON(http.StatusOK, nil) } + +func (h *Handler) GenerateStaticThumbnail(c echo.Context) error { + vID, err := uuid.Parse(c.Param("id")) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + job, err := h.Service.VodService.GenerateStaticThumbnail(c.Request().Context(), vID) + if err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + return SuccessResponse(c, nil, fmt.Sprintf("job created: %d", job.Job.ID)) +} diff --git a/internal/transport/http/vod_test.go b/internal/transport/http/vod_test.go index 587e7e09..ef1077c0 100644 --- a/internal/transport/http/vod_test.go +++ b/internal/transport/http/vod_test.go @@ -1,464 +1,464 @@ package http_test -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "strings" - "testing" - "time" - - "github.com/go-playground/validator/v10" - "github.com/labstack/echo/v4" - "github.com/stretchr/testify/assert" - "github.com/zibbp/ganymede/ent" - "github.com/zibbp/ganymede/ent/enttest" - "github.com/zibbp/ganymede/internal/database" - httpHandler "github.com/zibbp/ganymede/internal/transport/http" - "github.com/zibbp/ganymede/internal/utils" - "github.com/zibbp/ganymede/internal/vod" -) - -// * TestCreateVod tests the CreateVod function -// Creates a vod -func TestCreateVod(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - VodService: vod.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - createVodJson := `{ - "channel_id": "` + dbChannel.ID.String() + `", - "ext_id": "123456789", - "platform": "twitch", - "type": "archive", - "title": "Test Vod", - "duration": 6520, - "views": 520, - "resolution": "source", - "thumbnail_path": "/vods/test/123456789/123456789-thumbnail.jpg", - "web_thumbnail_path": "/vods/test/123456789/123456789-web_thumbnail.jpg", - "video_path": "/vods/test/123456789/123456789-video.mp4", - "chat_path": "/vods/test/123456789/123456789-chat.json", - "chat_video_path": "/vods/test/123456789/123456789-chat.mp4", - "info_path": "/vods/test/123456789/123456789-info.json", - "streamed_at": "2023-02-02T20:07:51.594Z" - }` - - req := httptest.NewRequest(http.MethodPost, "/api/v1/vod", strings.NewReader(createVodJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.CreateVod(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "123456789", response["ext_id"]) - - } -} - -// * TestGetVods tests the GetVods function -// Gets all vods -func TestGetVods(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - VodService: vod.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodGet, "/api/v1/vod", nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - - if assert.NoError(t, h.GetVods(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response []map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, dbVod.ID.String(), response[0]["id"]) - - } -} - -// * TestGetVod tests the GetVod function -// Gets a vod -func TestGetVod(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - VodService: vod.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/vod/%s", dbVod.ID.String()), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbVod.ID.String()) - - if assert.NoError(t, h.GetVod(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, dbVod.ID.String(), response["id"]) - - } -} - -// * TestDeleteVod tests the DeleteVod function -// Deletes a vod -func TestDeleteVod(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - VodService: vod.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/vod/%s", dbVod.ID.String()), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbVod.ID.String()) - - if assert.NoError(t, h.DeleteVod(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check if vod is deleted - vods, err := client.Vod.Query().All(context.Background()) - assert.NoError(t, err) - assert.Equal(t, 0, len(vods)) - } -} - -// * TestUpdateVod tests the UpdateVod function -// Updates a vod -func TestUpdateVod(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - VodService: vod.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - updateVodJson := `{ - "channel_id": "` + dbChannel.ID.String() + `", - "ext_id": "123456789", - "platform": "twitch", - "type": "archive", - "title": "Updated Test Vod", - "duration": 6520, - "views": 520, - "resolution": "source", - "thumbnail_path": "/vods/test/123456789/123456789-thumbnail.jpg", - "web_thumbnail_path": "/vods/test/123456789/123456789-web_thumbnail.jpg", - "video_path": "/vods/test/123456789/123456789-video.mp4", - "chat_path": "/vods/test/123456789/123456789-chat.json", - "chat_video_path": "/vods/test/123456789/123456789-chat.mp4", - "info_path": "/vods/test/123456789/123456789-info.json", - "streamed_at": "2023-02-02T20:07:51.594Z" - }` - - req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/vod/%s", dbVod.ID.String()), strings.NewReader(updateVodJson)) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbVod.ID.String()) - - if assert.NoError(t, h.UpdateVod(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, "Updated Test Vod", response["title"]) - - } -} - -// * TestSearchVods tests the SearchVods function -// Searches for vods -func TestSearchVods(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - VodService: vod.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - _, err = client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/vod/search/?q=%s&limit=%s&offset=%s", "test", "20", "1"), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("q", "limit", "offset") - c.SetParamValues("test", "20", "1") - - if assert.NoError(t, h.SearchVods(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, float64(1), response["total_count"]) - } -} - -// * TestGetVodPlaylists tests the GetVodPlaylists function -// Gets a vod's playlists -func TestGetVodPlaylists(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - VodService: vod.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a playlist - dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("Test Playlist").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Add vod to playlist - _, err = client.Playlist.UpdateOne(dbPlaylist).AddVods(dbVod).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/vod/%s/playlist", dbVod.ID.String()), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("id") - c.SetParamValues(dbVod.ID.String()) - - if assert.NoError(t, h.GetVodPlaylists(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response []map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, dbPlaylist.ID.String(), response[0]["id"]) - } -} - -// * TestGetVodsPagination tests the GetVodsPagination function -// Gets a paginated list of vods -func TestGetVodsPagination(t *testing.T) { - opts := []enttest.Option{ - enttest.WithOptions(ent.Log(t.Log)), - } - - client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) - defer client.Close() - - h := &httpHandler.Handler{ - Server: echo.New(), - Service: httpHandler.Services{ - VodService: vod.NewService(&database.Database{Client: client}), - }, - } - - h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} - - // Create a channel - dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - // Create a vod - _, err = client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("987654321").SetPlatform("twitch").SetType("highlight").SetTitle("Test Vod 2").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) - if err != nil { - t.Fatal(err) - } - - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/vod/paginate?limit=%s&offset=%s&channel_id=%s", "20", "0", dbChannel.ID.String()), nil) - req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) - rec := httptest.NewRecorder() - c := h.Server.NewContext(req, rec) - c.SetParamNames("limit", "offset", "channel_id") - c.SetParamValues("20", "0", dbChannel.ID.String()) - - if assert.NoError(t, h.GetVodsPagination(c)) { - assert.Equal(t, http.StatusOK, rec.Code) - - // Check response body - var response map[string]interface{} - err := json.Unmarshal(rec.Body.Bytes(), &response) - assert.NoError(t, err) - assert.Equal(t, float64(0), response["offset"]) - assert.Equal(t, float64(20), response["limit"]) - assert.Equal(t, float64(2), response["total_count"]) - assert.Equal(t, float64(1), response["pages"]) - assert.Equal(t, dbVod.ID.String(), response["data"].([]interface{})[0].(map[string]interface{})["id"]) - - } -} +// import ( +// "context" +// "encoding/json" +// "fmt" +// "net/http" +// "net/http/httptest" +// "strings" +// "testing" +// "time" + +// "github.com/go-playground/validator/v10" +// "github.com/labstack/echo/v4" +// "github.com/stretchr/testify/assert" +// "github.com/zibbp/ganymede/ent" +// "github.com/zibbp/ganymede/ent/enttest" +// "github.com/zibbp/ganymede/internal/database" +// httpHandler "github.com/zibbp/ganymede/internal/transport/http" +// "github.com/zibbp/ganymede/internal/utils" +// "github.com/zibbp/ganymede/internal/vod" +// ) + +// // * TestCreateVod tests the CreateVod function +// // Creates a vod +// func TestCreateVod(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// VodService: vod.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// createVodJson := `{ +// "channel_id": "` + dbChannel.ID.String() + `", +// "ext_id": "123456789", +// "platform": "twitch", +// "type": "archive", +// "title": "Test Vod", +// "duration": 6520, +// "views": 520, +// "resolution": "source", +// "thumbnail_path": "/vods/test/123456789/123456789-thumbnail.jpg", +// "web_thumbnail_path": "/vods/test/123456789/123456789-web_thumbnail.jpg", +// "video_path": "/vods/test/123456789/123456789-video.mp4", +// "chat_path": "/vods/test/123456789/123456789-chat.json", +// "chat_video_path": "/vods/test/123456789/123456789-chat.mp4", +// "info_path": "/vods/test/123456789/123456789-info.json", +// "streamed_at": "2023-02-02T20:07:51.594Z" +// }` + +// req := httptest.NewRequest(http.MethodPost, "/api/v1/vod", strings.NewReader(createVodJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.CreateVod(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "123456789", response["ext_id"]) + +// } +// } + +// // * TestGetVods tests the GetVods function +// // Gets all vods +// func TestGetVods(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// VodService: vod.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodGet, "/api/v1/vod", nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) + +// if assert.NoError(t, h.GetVods(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response []map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, dbVod.ID.String(), response[0]["id"]) + +// } +// } + +// // * TestGetVod tests the GetVod function +// // Gets a vod +// func TestGetVod(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// VodService: vod.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/vod/%s", dbVod.ID.String()), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbVod.ID.String()) + +// if assert.NoError(t, h.GetVod(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, dbVod.ID.String(), response["id"]) + +// } +// } + +// // * TestDeleteVod tests the DeleteVod function +// // Deletes a vod +// func TestDeleteVod(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// VodService: vod.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/v1/vod/%s", dbVod.ID.String()), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbVod.ID.String()) + +// if assert.NoError(t, h.DeleteVod(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check if vod is deleted +// vods, err := client.Vod.Query().All(context.Background()) +// assert.NoError(t, err) +// assert.Equal(t, 0, len(vods)) +// } +// } + +// // * TestUpdateVod tests the UpdateVod function +// // Updates a vod +// func TestUpdateVod(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// VodService: vod.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// updateVodJson := `{ +// "channel_id": "` + dbChannel.ID.String() + `", +// "ext_id": "123456789", +// "platform": "twitch", +// "type": "archive", +// "title": "Updated Test Vod", +// "duration": 6520, +// "views": 520, +// "resolution": "source", +// "thumbnail_path": "/vods/test/123456789/123456789-thumbnail.jpg", +// "web_thumbnail_path": "/vods/test/123456789/123456789-web_thumbnail.jpg", +// "video_path": "/vods/test/123456789/123456789-video.mp4", +// "chat_path": "/vods/test/123456789/123456789-chat.json", +// "chat_video_path": "/vods/test/123456789/123456789-chat.mp4", +// "info_path": "/vods/test/123456789/123456789-info.json", +// "streamed_at": "2023-02-02T20:07:51.594Z" +// }` + +// req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/v1/vod/%s", dbVod.ID.String()), strings.NewReader(updateVodJson)) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbVod.ID.String()) + +// if assert.NoError(t, h.UpdateVod(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, "Updated Test Vod", response["title"]) + +// } +// } + +// // * TestSearchVods tests the SearchVods function +// // Searches for vods +// func TestSearchVods(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// VodService: vod.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// _, err = client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/vod/search/?q=%s&limit=%s&offset=%s", "test", "20", "1"), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("q", "limit", "offset") +// c.SetParamValues("test", "20", "1") + +// if assert.NoError(t, h.SearchVods(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, float64(1), response["total_count"]) +// } +// } + +// // * TestGetVodPlaylists tests the GetVodPlaylists function +// // Gets a vod's playlists +// func TestGetVodPlaylists(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// VodService: vod.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a playlist +// dbPlaylist, err := client.Playlist.Create().SetName("test_playlist").SetDescription("Test Playlist").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Add vod to playlist +// _, err = client.Playlist.UpdateOne(dbPlaylist).AddVods(dbVod).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/vod/%s/playlist", dbVod.ID.String()), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("id") +// c.SetParamValues(dbVod.ID.String()) + +// if assert.NoError(t, h.GetVodPlaylists(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response []map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, dbPlaylist.ID.String(), response[0]["id"]) +// } +// } + +// // * TestGetVodsPagination tests the GetVodsPagination function +// // Gets a paginated list of vods +// func TestGetVodsPagination(t *testing.T) { +// opts := []enttest.Option{ +// enttest.WithOptions(ent.Log(t.Log)), +// } + +// client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", opts...) +// defer client.Close() + +// h := &httpHandler.Handler{ +// Server: echo.New(), +// Service: httpHandler.Services{ +// VodService: vod.NewService(&database.Database{Client: client}), +// }, +// } + +// h.Server.Validator = &utils.CustomValidator{Validator: validator.New()} + +// // Create a channel +// dbChannel, err := client.Channel.Create().SetName("test_channel").SetDisplayName("Test Channel").SetImagePath("/vods/test_channel/test_channel.jpg").Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// // Create a vod +// _, err = client.Vod.Create().SetChannel(dbChannel).SetExtID("123456789").SetPlatform("twitch").SetType("archive").SetTitle("Test Vod").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } +// dbVod, err := client.Vod.Create().SetChannel(dbChannel).SetExtID("987654321").SetPlatform("twitch").SetType("highlight").SetTitle("Test Vod 2").SetDuration(6520).SetViews(520).SetResolution("source").SetThumbnailPath("/vods/test/123456789/123456789-thumbnail.jpg").SetWebThumbnailPath("/vods/test/123456789/123456789-web_thumbnail.jpg").SetVideoPath("/vods/test/123456789/123456789-video.mp4").SetChatPath("/vods/test/123456789/123456789-chat.json").SetChatVideoPath("/vods/test/123456789/123456789-chat.mp4").SetInfoPath("/vods/test/123456789/123456789-info.json").SetStreamedAt(time.Now()).Save(context.Background()) +// if err != nil { +// t.Fatal(err) +// } + +// req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/v1/vod/paginate?limit=%s&offset=%s&channel_id=%s", "20", "0", dbChannel.ID.String()), nil) +// req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) +// rec := httptest.NewRecorder() +// c := h.Server.NewContext(req, rec) +// c.SetParamNames("limit", "offset", "channel_id") +// c.SetParamValues("20", "0", dbChannel.ID.String()) + +// if assert.NoError(t, h.GetVodsPagination(c)) { +// assert.Equal(t, http.StatusOK, rec.Code) + +// // Check response body +// var response map[string]interface{} +// err := json.Unmarshal(rec.Body.Bytes(), &response) +// assert.NoError(t, err) +// assert.Equal(t, float64(0), response["offset"]) +// assert.Equal(t, float64(20), response["limit"]) +// assert.Equal(t, float64(2), response["total_count"]) +// assert.Equal(t, float64(1), response["pages"]) +// assert.Equal(t, dbVod.ID.String(), response["data"].([]interface{})[0].(map[string]interface{})["id"]) + +// } +// } diff --git a/internal/transport/http/workflow.go b/internal/transport/http/workflow.go deleted file mode 100644 index b1d34b2c..00000000 --- a/internal/transport/http/workflow.go +++ /dev/null @@ -1,143 +0,0 @@ -package http - -import ( - "encoding/base64" - "net/http" - - "github.com/google/uuid" - "github.com/labstack/echo/v4" - "github.com/zibbp/ganymede/internal/temporal" - "github.com/zibbp/ganymede/internal/workflows" -) - -type StartWorkflowRequest struct { - WorkflowName string `json:"workflow_name" validate:"required"` -} -type RestartArchiveWorkflowRequest struct { - WorkflowName string `json:"workflow_name" validate:"required"` - VideoID string `json:"video_id" validate:"required"` -} - -func (h *Handler) GetActiveWorkflows(c echo.Context) error { - nextPageToken := c.QueryParam("next_page_token") - - // base64 decode the next page token - decoded, err := base64.StdEncoding.DecodeString(nextPageToken) - if err != nil { - return err - } - - executions, err := temporal.GetActiveWorkflows(c.Request().Context(), []byte(decoded)) - if err != nil { - return err - } - - return c.JSON(200, executions) - -} - -func (h *Handler) GetClosedWorkflows(c echo.Context) error { - nextPageToken := c.QueryParam("next_page_token") - - // base64 decode the next page token - decoded, err := base64.StdEncoding.DecodeString(nextPageToken) - if err != nil { - return err - } - - executions, err := temporal.GetClosedWorkflows(c.Request().Context(), []byte(decoded)) - if err != nil { - return err - } - - return c.JSON(200, executions) -} - -func (h *Handler) GetWorkflowById(c echo.Context) error { - workflowId := c.Param("workflowId") - runId := c.Param("runId") - - execution, err := temporal.GetWorkflowById(c.Request().Context(), workflowId, runId) - if err != nil { - return err - } - - return c.JSON(200, execution) -} - -func (h *Handler) GetWorkflowHistory(c echo.Context) error { - workflowId := c.Param("workflowId") - runId := c.Param("runId") - - history, err := temporal.GetWorkflowHistory(c.Request().Context(), workflowId, runId) - if err != nil { - return err - } - - return c.JSON(200, history) -} - -func (h *Handler) StartWorkflow(c echo.Context) error { - var request StartWorkflowRequest - err := c.Bind(&request) - if err != nil { - return err - } - - // validate request - if err := c.Validate(request); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - - startWorkflowResponse, err := workflows.StartWorkflow(c.Request().Context(), request.WorkflowName) - if err != nil { - return err - } - - return c.JSON(200, startWorkflowResponse) -} - -func (h *Handler) RestartArchiveWorkflow(c echo.Context) error { - var request RestartArchiveWorkflowRequest - err := c.Bind(&request) - if err != nil { - return err - } - - // validate request - if err := c.Validate(request); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - - // create uuid - videoId, err := uuid.Parse(request.VideoID) - if err != nil { - return err - } - - // some workflows should not be restarted such as live video and chat downloads - if request.WorkflowName == "ArchiveTwitchLiveVideoWorkflow" || request.WorkflowName == "ArchiveTwitchLiveChatWorkflow" || request.WorkflowName == " DownloadTwitchLiveChatWorkflow" || request.WorkflowName == "DownloadTwitchLiveVideoWorkflow" { - return echo.NewHTTPError(http.StatusBadRequest, "cannot restart live video or chat workflows") - } - - workflowId, err := temporal.RestartArchiveWorkflow(c.Request().Context(), videoId, request.WorkflowName) - if err != nil { - return err - } - - return c.JSON(200, map[string]string{ - "workflow_id": workflowId, - }) -} - -func (h *Handler) GetVideoIdFromWorkflow(c echo.Context) error { - workflowId := c.Param("workflowId") - runId := c.Param("runId") - - id, err := temporal.GetVideoIdFromWorkflow(c.Request().Context(), workflowId, runId) - if err != nil { - return err - } - - return c.JSON(200, id) -} diff --git a/internal/twitch/category.go b/internal/twitch/category.go deleted file mode 100644 index c093772d..00000000 --- a/internal/twitch/category.go +++ /dev/null @@ -1,131 +0,0 @@ -package twitch - -import ( - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "os" - - "github.com/rs/zerolog/log" - entTwitchCategory "github.com/zibbp/ganymede/ent/twitchcategory" - "github.com/zibbp/ganymede/internal/database" -) - -type CategoryResponse struct { - Data []TwitchCategory `json:"data"` - Pagination Pagination `json:"pagination"` -} - -type TwitchCategory struct { - ID string `json:"id"` - Name string `json:"name"` - BoxArtURL string `json:"box_art_url"` - IgdbID string `json:"igdb_id"` -} - -// SetTwitchCategories sets the twitch categories in the database -func SetTwitchCategories() error { - categories, err := GetCategories() - if err != nil { - return fmt.Errorf("failed to get twitch categories: %v", err) - } - - for _, category := range categories { - err = database.DB().Client.TwitchCategory.Create().SetID(category.ID).SetName(category.Name).SetBoxArtURL(category.BoxArtURL).SetIgdbID(category.IgdbID).OnConflictColumns(entTwitchCategory.FieldID).UpdateNewValues().Exec(context.Background()) - if err != nil { - return fmt.Errorf("failed to upsert twitch category: %v", err) - } - } - - log.Debug().Msgf("successfully set twitch categories") - - return nil -} - -// GetCategories gets the top 100 twitch categories -// It then gets the next 100 categories until there are no more using the cursor -// Returns a different number of categories each time it is called for some reason -func GetCategories() ([]TwitchCategory, error) { - client := &http.Client{} - req, err := http.NewRequest("GET", "https://api.twitch.tv/helix/games/top?first=100", nil) - if err != nil { - return nil, fmt.Errorf("failed to create request: %v", err) - } - req.Header.Set("Client-ID", os.Getenv("TWITCH_CLIENT_ID")) - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", os.Getenv("TWITCH_ACCESS_TOKEN"))) - resp, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to get twitch categories: %v", err) - } - - defer resp.Body.Close() - - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response body: %v", err) - } - - if resp.StatusCode != http.StatusOK { - log.Error().Err(err).Msgf("failed to get twitch categories: %v", string(body)) - return nil, fmt.Errorf("failed to get twitch categories: %v", resp) - } - - var categoryResponse CategoryResponse - err = json.Unmarshal(body, &categoryResponse) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal response: %v", err) - } - - var twitchCategories []TwitchCategory - twitchCategories = append(twitchCategories, categoryResponse.Data...) - - // pagination - var cursor string - cursor = categoryResponse.Pagination.Cursor - for cursor != "" { - response, err := getCategoriesWithCursor(cursor) - if err != nil { - return nil, fmt.Errorf("failed to get twitch categories: %v", err) - } - twitchCategories = append(twitchCategories, response.Data...) - cursor = response.Pagination.Cursor - } - - return twitchCategories, nil -} - -func getCategoriesWithCursor(cursor string) (*CategoryResponse, error) { - client := &http.Client{} - req, err := http.NewRequest("GET", fmt.Sprintf("https://api.twitch.tv/helix/games/top?first=100&after=%s", cursor), nil) - if err != nil { - return nil, fmt.Errorf("failed to create request: %v", err) - } - req.Header.Set("Client-ID", os.Getenv("TWITCH_CLIENT_ID")) - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", os.Getenv("TWITCH_ACCESS_TOKEN"))) - resp, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to get twitch categories: %v", err) - } - - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("failed to get twitch categories: %v", resp) - } - - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response body: %v", err) - } - - var categoryResponse CategoryResponse - err = json.Unmarshal(body, &categoryResponse) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal response: %v", err) - } - - return &categoryResponse, nil - -} diff --git a/internal/twitch/gql.go b/internal/twitch/gql.go deleted file mode 100644 index 51432fbc..00000000 --- a/internal/twitch/gql.go +++ /dev/null @@ -1,277 +0,0 @@ -package twitch - -import ( - "encoding/json" - "fmt" - "io" - "net/http" - "strings" -) - -type GQLResponse struct { - Data Data `json:"data"` - Extensions Extensions `json:"extensions"` -} - -type Data struct { - Video GQLVideo `json:"video"` -} - -type GQLVideo struct { - BroadcastType string `json:"broadcastType"` - ResourceRestriction ResourceRestriction `json:"resourceRestriction"` - Game GQLGame `json:"game"` - Title string `json:"title"` - CreatedAt string `json:"createdAt"` -} - -type GQLGame struct { - ID string `json:"id"` - Name string `json:"name"` -} - -type ResourceRestriction struct { - ID string `json:"id"` - Type string `json:"type"` -} - -type Extensions struct { - DurationMilliseconds int64 `json:"durationMilliseconds"` - RequestID string `json:"requestID"` -} - -type GQLMutedSegmentResponse struct { - Data MutedSegmentData `json:"data"` - Extensions Extensions `json:"extensions"` -} - -type MutedSegmentData struct { - Video MutedSegmentVideo `json:"video"` -} - -type MutedSegmentVideo struct { - ID string `json:"id"` - MuteInfo MuteInfo `json:"muteInfo"` -} - -type MuteInfo struct { - MutedSegmentConnection MutedSegmentConnection `json:"mutedSegmentConnection"` - TypeName string `json:"__typename"` -} - -type MutedSegmentConnection struct { - Nodes []MutedSegmentNode `json:"nodes"` -} - -type MutedSegmentNode struct { - Duration int `json:"duration"` - Offset int `json:"offset"` - TypeName string `json:"__typename"` -} - -type GQLChapterResponse struct { - Data GQLChapterData `json:"data"` - Extensions Extensions `json:"extensions"` -} - -type GQLChapterData struct { - Video GQLChapterDataVideo `json:"video"` -} - -type GQLChapterDataVideo struct { - ID string `json:"id"` - Moments Moments `json:"moments"` - Typename string `json:"__typename"` -} - -type Node struct { - Moments Moments `json:"moments"` - ID string `json:"id"` - DurationMilliseconds int64 `json:"durationMilliseconds"` - PositionMilliseconds int64 `json:"positionMilliseconds"` - Type Type `json:"type"` - Description string `json:"description"` - SubDescription string `json:"subDescription"` - ThumbnailURL string `json:"thumbnailURL"` - Details Details `json:"details"` - Video NodeVideo `json:"video"` - Typename string `json:"__typename"` -} - -type Edge struct { - Node Node `json:"node"` - Typename string `json:"__typename"` -} - -type Moments struct { - Edges []Edge `json:"edges"` - Typename string `json:"__typename"` -} - -type Details struct { - Game GameClass `json:"game"` - Typename DetailsTypename `json:"__typename"` -} - -type GameClass struct { - ID string `json:"id"` - DisplayName string `json:"displayName"` - BoxArtURL string `json:"boxArtURL"` - Typename GameTypename `json:"__typename"` -} - -type NodeVideo struct { - ID string `json:"id"` - LengthSeconds int64 `json:"lengthSeconds"` - Typename string `json:"__typename"` -} - -type GameTypename string - -const ( - Game GameTypename = "Game" -) - -type DetailsTypename string - -const ( - GameChangeMomentDetails DetailsTypename = "GameChangeMomentDetails" -) - -func gqlRequest(body string) (GQLResponse, error) { - var response GQLResponse - - client := &http.Client{} - req, err := http.NewRequest("POST", "https://gql.twitch.tv/gql", strings.NewReader(body)) - if err != nil { - return response, err - } - req.Header.Set("Client-ID", "kimne78kx3ncx6brgo4mv6wki5h1ko") - req.Header.Set("Content-Type", "text/plain;charset=UTF-8") - req.Header.Set("Origin", "https://www.twitch.tv") - req.Header.Set("Referer", "https://www.twitch.tv/") - req.Header.Set("Sec-Fetch-Mode", "cors") - req.Header.Set("Sec-Fetch-Site", "same-site") - req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36") - - resp, err := client.Do(req) - if err != nil { - return response, fmt.Errorf("error sending request: %w", err) - } - defer resp.Body.Close() - - bodyBytes, err := io.ReadAll(resp.Body) - if err != nil { - return response, fmt.Errorf("error reading response body: %w", err) - } - - err = json.Unmarshal(bodyBytes, &response) - if err != nil { - return response, fmt.Errorf("error unmarshalling response: %w", err) - } - - return response, nil - -} - -func GQLGetVideo(id string) (GQLResponse, error) { - body := fmt.Sprintf(`{"query": "query{video(id:%s){broadcastType,resourceRestriction{id,type},game{id,name},title,createdAt}}"}`, id) - resp, err := gqlRequest(body) - if err != nil { - return resp, fmt.Errorf("error getting video: %w", err) - } - - return resp, nil -} - -func gqlGetMutedSegmentsRequest(body string) (GQLMutedSegmentResponse, error) { - var response GQLMutedSegmentResponse - - client := &http.Client{} - req, err := http.NewRequest("POST", "https://gql.twitch.tv/gql", strings.NewReader(body)) - if err != nil { - return response, err - } - req.Header.Set("Client-ID", "kimne78kx3ncx6brgo4mv6wki5h1ko") - req.Header.Set("Content-Type", "text/plain;charset=UTF-8") - req.Header.Set("Origin", "https://www.twitch.tv") - req.Header.Set("Referer", "https://www.twitch.tv/") - req.Header.Set("Sec-Fetch-Mode", "cors") - req.Header.Set("Sec-Fetch-Site", "same-site") - req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36") - - resp, err := client.Do(req) - if err != nil { - return response, fmt.Errorf("error sending request: %w", err) - } - defer resp.Body.Close() - - bodyBytes, err := io.ReadAll(resp.Body) - if err != nil { - return response, fmt.Errorf("error reading response body: %w", err) - } - - err = json.Unmarshal(bodyBytes, &response) - if err != nil { - return response, fmt.Errorf("error unmarshalling response: %w", err) - } - - return response, nil - -} - -func GQLGetMutedSegments(id string) (GQLMutedSegmentResponse, error) { - body := fmt.Sprintf(`{"operationName":"VideoPlayer_MutedSegmentsAlertOverlay","variables":{"vodID":"%s","includePrivate":false},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"c36e7400657815f4704e6063d265dff766ed8fc1590361c6d71e4368805e0b49"}}}`, id) - resp, err := gqlGetMutedSegmentsRequest(body) - if err != nil { - return resp, fmt.Errorf("error getting video muted segments: %w", err) - } - - return resp, nil -} - -func gqlChapterRequest(body string) (GQLChapterResponse, error) { - var response GQLChapterResponse - - client := &http.Client{} - req, err := http.NewRequest("POST", "https://gql.twitch.tv/gql", strings.NewReader(body)) - if err != nil { - return response, err - } - req.Header.Set("Client-ID", "kimne78kx3ncx6brgo4mv6wki5h1ko") - req.Header.Set("Content-Type", "text/plain;charset=UTF-8") - req.Header.Set("Origin", "https://www.twitch.tv") - req.Header.Set("Referer", "https://www.twitch.tv/") - req.Header.Set("Sec-Fetch-Mode", "cors") - req.Header.Set("Sec-Fetch-Site", "same-site") - req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36") - - resp, err := client.Do(req) - if err != nil { - return response, fmt.Errorf("error sending request: %w", err) - } - defer resp.Body.Close() - - bodyBytes, err := io.ReadAll(resp.Body) - if err != nil { - return response, fmt.Errorf("error reading response body: %w", err) - } - - err = json.Unmarshal(bodyBytes, &response) - if err != nil { - return response, fmt.Errorf("error unmarshalling response: %w", err) - } - - return response, nil - -} - -func GQLGetChapters(id string) (GQLChapterResponse, error) { - body := fmt.Sprintf(`{"operationName":"VideoPlayer_ChapterSelectButtonVideo","variables":{"videoID":"%s","includePrivate":false},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"8d2793384aac3773beab5e59bd5d6f585aedb923d292800119e03d40cd0f9b41"}}}`, id) - resp, err := gqlChapterRequest(body) - if err != nil { - return resp, fmt.Errorf("error getting video chapters: %w", err) - } - - return resp, nil -} diff --git a/internal/twitch/graphql.go b/internal/twitch/graphql.go new file mode 100644 index 00000000..b5cddc41 --- /dev/null +++ b/internal/twitch/graphql.go @@ -0,0 +1,206 @@ +package twitch + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strings" +) + +type GQLVideoResponse struct { + Data GQLVideoData `json:"data"` + Extensions Extensions `json:"extensions"` +} + +type GQLVideoData struct { + Video GQLVideo `json:"video"` +} + +type GQLVideo struct { + BroadcastType string `json:"broadcastType"` + ResourceRestriction ResourceRestriction `json:"resourceRestriction"` + Game GQLGame `json:"game"` + Title string `json:"title"` + CreatedAt string `json:"createdAt"` +} + +type GQLGame struct { + ID string `json:"id"` + Name string `json:"name"` +} + +type ResourceRestriction struct { + ID string `json:"id"` + Type string `json:"type"` +} + +type Extensions struct { + DurationMilliseconds int64 `json:"durationMilliseconds"` + RequestID string `json:"requestID"` +} + +type GQLMutedSegmentsResponse struct { + Data GQLMutedSegmentsData `json:"data"` + Extensions Extensions `json:"extensions"` +} + +type GQLMutedSegmentsData struct { + Video GQLMutedSegmentsVideo `json:"video"` +} + +type GQLMutedSegmentsVideo struct { + ID string `json:"id"` + MuteInfo MuteInfo `json:"muteInfo"` +} + +type MuteInfo struct { + MutedSegmentConnection GQLMutedSegmentConnection `json:"mutedSegmentConnection"` + TypeName string `json:"__typename"` +} + +type GQLMutedSegmentConnection struct { + Nodes []GQLMutedSegment `json:"nodes"` +} + +type GQLMutedSegment struct { + Duration int `json:"duration"` + Offset int `json:"offset"` + TypeName string `json:"__typename"` +} + +type GQLChaptersResponse struct { + Data GQLChaptersData `json:"data"` + Extensions Extensions `json:"extensions"` +} + +type GQLChaptersData struct { + Video GQLChaptersVideo `json:"video"` +} + +type GQLChaptersVideo struct { + ID string `json:"id"` + Moments GQLMoments `json:"moments"` + Typename string `json:"__typename"` +} + +type GQLChapter struct { + Moments GQLMoments `json:"moments"` + ID string `json:"id"` + DurationMilliseconds int64 `json:"durationMilliseconds"` + PositionMilliseconds int64 `json:"positionMilliseconds"` + Type string `json:"type"` + Description string `json:"description"` + SubDescription string `json:"subDescription"` + ThumbnailURL string `json:"thumbnailURL"` + Details GQLDetails `json:"details"` + Video GQLNodeVideo `json:"video"` + Typename string `json:"__typename"` +} + +type GQLChapterEdge struct { + Node GQLChapter `json:"node"` + Typename string `json:"__typename"` +} + +type GQLMoments struct { + Edges []GQLChapterEdge `json:"edges"` + Typename string `json:"__typename"` +} + +type GQLDetails struct { + Game GQLGameInfo `json:"game"` + Typename string `json:"__typename"` +} + +type GQLGameInfo struct { + ID string `json:"id"` + DisplayName string `json:"displayName"` + BoxArtURL string `json:"boxArtURL"` + Typename string `json:"__typename"` +} + +type GQLNodeVideo struct { + ID string `json:"id"` + LengthSeconds int64 `json:"lengthSeconds"` + Typename string `json:"__typename"` +} + +// GQLRequest sends a generic GQL request and returns the response. +func gqlRequest(body string) ([]byte, error) { + client := &http.Client{} + req, err := http.NewRequest("POST", "https://gql.twitch.tv/gql", strings.NewReader(body)) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + + req.Header.Set("Client-ID", "kimne78kx3ncx6brgo4mv6wki5h1ko") + req.Header.Set("Content-Type", "text/plain;charset=UTF-8") + req.Header.Set("Origin", "https://www.twitch.tv") + req.Header.Set("Referer", "https://www.twitch.tv/") + req.Header.Set("Sec-Fetch-Mode", "cors") + req.Header.Set("Sec-Fetch-Site", "same-site") + // req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36") + + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + defer resp.Body.Close() + + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + + return bodyBytes, nil +} + +// GQLGetVideo returns the GraphQL version of the video. This often contains data not available in the public API. +func GQLGetVideo(id string) (GQLVideo, error) { + body := fmt.Sprintf(`{"query": "query{video(id:%s){broadcastType,resourceRestriction{id,type},game{id,name},title,createdAt}}"}`, id) + respBytes, err := gqlRequest(body) + if err != nil { + return GQLVideo{}, fmt.Errorf("error getting video: %w", err) + } + + var resp GQLVideoResponse + err = json.Unmarshal(respBytes, &resp) + if err != nil { + return GQLVideo{}, fmt.Errorf("error unmarshalling response: %w", err) + } + + return resp.Data.Video, nil +} + +func GQLGetMutedSegments(id string) ([]GQLMutedSegment, error) { + body := fmt.Sprintf(`{"operationName":"VideoPlayer_MutedSegmentsAlertOverlay","variables":{"vodID":"%s","includePrivate":false},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"c36e7400657815f4704e6063d265dff766ed8fc1590361c6d71e4368805e0b49"}}}`, id) + respBytes, err := gqlRequest(body) + if err != nil { + return nil, fmt.Errorf("error getting video muted segments: %w", err) + } + + var resp GQLMutedSegmentsResponse + err = json.Unmarshal(respBytes, &resp) + if err != nil { + return nil, fmt.Errorf("error unmarshalling response: %w", err) + } + + return resp.Data.Video.MuteInfo.MutedSegmentConnection.Nodes, nil +} + +func GQLGetChapters(id string) ([]GQLChapterEdge, error) { + body := fmt.Sprintf(`{"operationName":"VideoPlayer_ChapterSelectButtonVideo","variables":{"videoID":"%s","includePrivate":false},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"8d2793384aac3773beab5e59bd5d6f585aedb923d292800119e03d40cd0f9b41"}}}`, id) + respBytes, err := gqlRequest(body) + if err != nil { + return nil, fmt.Errorf("error getting video chapters: %w", err) + } + + var resp GQLChaptersResponse + err = json.Unmarshal(respBytes, &resp) + if err != nil { + return nil, fmt.Errorf("error unmarshalling response: %w", err) + } + + return resp.Data.Video.Moments.Edges, nil +} diff --git a/internal/twitch/twitch.go b/internal/twitch/twitch.go index beb5702f..f9f86d25 100644 --- a/internal/twitch/twitch.go +++ b/internal/twitch/twitch.go @@ -2,325 +2,14 @@ package twitch import ( "context" - "encoding/json" "fmt" - "io" "net/http" - "net/url" - "os" - - "github.com/rs/zerolog/log" - "github.com/zibbp/ganymede/ent" - "github.com/zibbp/ganymede/internal/chapter" - "github.com/zibbp/ganymede/internal/database" -) - -type Service struct { -} - -type TwitchVideoResponse struct { - Data []Video `json:"data"` - Pagination Pagination `json:"pagination"` -} - -type Video struct { - ID string `json:"id"` - StreamID string `json:"stream_id"` - UserID string `json:"user_id"` - UserLogin UserLogin `json:"user_login"` - UserName UserName `json:"user_name"` - Title string `json:"title"` - Description string `json:"description"` - CreatedAt string `json:"created_at"` - PublishedAt string `json:"published_at"` - URL string `json:"url"` - ThumbnailURL string `json:"thumbnail_url"` - Viewable Viewable `json:"viewable"` - ViewCount int64 `json:"view_count"` - Language Language `json:"language"` - Type Type `json:"type"` - Duration string `json:"duration"` - MutedSegments interface{} `json:"muted_segments"` -} - -type Pagination struct { - Cursor string `json:"cursor"` -} - -type Language string - -type Type string - -type UserLogin string - -type UserName string - -type Viewable string - -type AuthTokenResponse struct { - AccessToken string `json:"access_token"` - ExpiresIn int `json:"expires_in"` - TokenType string `json:"token_type"` -} - -type ChannelResponse struct { - Data []Channel `json:"data"` -} - -type Channel struct { - ID string `json:"id"` - Login string `json:"login"` - DisplayName string `json:"display_name"` - Type string `json:"type"` - BroadcasterType string `json:"broadcaster_type"` - Description string `json:"description"` - ProfileImageURL string `json:"profile_image_url"` - OfflineImageURL string `json:"offline_image_url"` - ViewCount int64 `json:"view_count"` - CreatedAt string `json:"created_at"` -} - -type VodResponse struct { - Data []Vod `json:"data"` - Pagination Pagination `json:"pagination"` -} - -type Vod struct { - ID string `json:"id"` - StreamID string `json:"stream_id"` - UserID string `json:"user_id"` - UserLogin string `json:"user_login"` - UserName string `json:"user_name"` - Title string `json:"title"` - Description string `json:"description"` - CreatedAt string `json:"created_at"` - PublishedAt string `json:"published_at"` - URL string `json:"url"` - ThumbnailURL string `json:"thumbnail_url"` - Viewable string `json:"viewable"` - ViewCount int64 `json:"view_count"` - Language string `json:"language"` - Type string `json:"type"` - Duration string `json:"duration"` - MutedSegments interface{} `json:"muted_segments"` - Chapters []chapter.Chapter `json:"chapters"` -} - -type Stream struct { - Data []Live `json:"data"` - Pagination Pagination `json:"pagination"` -} - -type Live struct { - ID string `json:"id"` - UserID string `json:"user_id"` - UserLogin string `json:"user_login"` - UserName string `json:"user_name"` - GameID string `json:"game_id"` - GameName string `json:"game_name"` - Type string `json:"type"` - Title string `json:"title"` - ViewerCount int64 `json:"viewer_count"` - StartedAt string `json:"started_at"` - Language string `json:"language"` - ThumbnailURL string `json:"thumbnail_url"` - TagIDS []string `json:"tag_ids"` - IsMature bool `json:"is_mature"` -} - -type Category struct { - ID string `json:"id"` - Name string `json:"name"` -} - -type twitchAPI struct{} -type TwitchAPI interface { - GetUserByLogin(login string) (Channel, error) -} - -var ( - API TwitchAPI = &twitchAPI{} ) -func NewService() *Service { - return &Service{} -} - -func Authenticate() error { - twitchClientID := os.Getenv("TWITCH_CLIENT_ID") - twitchClientSecret := os.Getenv("TWITCH_CLIENT_SECRET") - if twitchClientID == "" || twitchClientSecret == "" { - return fmt.Errorf("twitch client id or secret not set") - } - log.Debug().Msg("authenticating with twitch") - - client := &http.Client{} - - req, err := http.NewRequest("POST", "https://id.twitch.tv/oauth2/token", nil) - if err != nil { - return fmt.Errorf("failed to create request: %v", err) - } - req.Header.Set("Content-Type", "application/x-www-form-urlencoded") - - q := url.Values{} - q.Set("client_id", twitchClientID) - q.Set("client_secret", twitchClientSecret) - q.Set("grant_type", "client_credentials") - req.URL.RawQuery = q.Encode() - - resp, err := client.Do(req) - if err != nil { - return fmt.Errorf("failed to authenticate: %v", err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - return fmt.Errorf("failed to authenticate: %v", resp) - } - - body, err := io.ReadAll(resp.Body) - if err != nil { - return fmt.Errorf("failed to read response body: %v", err) - } - - var authTokenResponse AuthTokenResponse - err = json.Unmarshal(body, &authTokenResponse) - if err != nil { - return fmt.Errorf("failed to unmarshal response: %v", err) - } - - // Set access token as env var - err = os.Setenv("TWITCH_ACCESS_TOKEN", authTokenResponse.AccessToken) - if err != nil { - return fmt.Errorf("failed to set env var: %v", err) - } - - log.Info().Msg("authenticated with twitch") - - return nil -} -func (t *twitchAPI) GetUserByLogin(cName string) (Channel, error) { - log.Debug().Msgf("getting user by login: %s", cName) - client := &http.Client{} - req, err := http.NewRequest("GET", fmt.Sprintf("https://api.twitch.tv/helix/users?login=%s", cName), nil) - if err != nil { - return Channel{}, fmt.Errorf("failed to create request: %v", err) - } - req.Header.Set("Client-ID", os.Getenv("TWITCH_CLIENT_ID")) - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", os.Getenv("TWITCH_ACCESS_TOKEN"))) - - resp, err := client.Do(req) - if err != nil { - return Channel{}, fmt.Errorf("failed to get user: %v", err) - } - - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return Channel{}, fmt.Errorf("failed to get user: %v", resp) - } - - body, err := io.ReadAll(resp.Body) - if err != nil { - return Channel{}, fmt.Errorf("failed to read response body: %v", err) - } - - var channelResponse ChannelResponse - err = json.Unmarshal(body, &channelResponse) - if err != nil { - return Channel{}, fmt.Errorf("failed to unmarshal response: %v", err) - } - - // Check if channel is populated - if len(channelResponse.Data) == 0 { - return Channel{}, fmt.Errorf("channel not found") - } - - return channelResponse.Data[0], nil -} - -func (s *Service) GetVodByID(vID string) (Vod, error) { - log.Debug().Msgf("getting twitch vod by id: %s", vID) - client := &http.Client{} - req, err := http.NewRequest("GET", "https://api.twitch.tv/helix/videos", nil) - if err != nil { - return Vod{}, fmt.Errorf("failed to create request: %v", err) - } - req.Header.Set("Client-ID", os.Getenv("TWITCH_CLIENT_ID")) - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", os.Getenv("TWITCH_ACCESS_TOKEN"))) - - q := req.URL.Query() - q.Add("id", vID) - req.URL.RawQuery = q.Encode() - - resp, err := client.Do(req) - if err != nil { - return Vod{}, fmt.Errorf("failed to get vod: %v", err) - } - - defer resp.Body.Close() - - body, err := io.ReadAll(resp.Body) - if err != nil { - return Vod{}, fmt.Errorf("failed to read response body: %v", err) - } - - if resp.StatusCode != http.StatusOK { - return Vod{}, fmt.Errorf("%s", body) - } - - var vodResponse VodResponse - err = json.Unmarshal(body, &vodResponse) - if err != nil { - return Vod{}, fmt.Errorf("failed to unmarshal response: %v", err) - } - - // Check if vod is populated - if len(vodResponse.Data) == 0 { - return Vod{}, fmt.Errorf("vod not found") - } - - return vodResponse.Data[0], nil -} - -func (s *Service) GetStreams(queryParams string) (Stream, error) { - log.Debug().Msgf("getting live streams using the following query param: %s", queryParams) - client := &http.Client{} - req, err := http.NewRequest("GET", fmt.Sprintf("https://api.twitch.tv/helix/streams%s", queryParams), nil) - if err != nil { - return Stream{}, fmt.Errorf("failed to create request: %v", err) - } - req.Header.Set("Client-ID", os.Getenv("TWITCH_CLIENT_ID")) - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", os.Getenv("TWITCH_ACCESS_TOKEN"))) - - resp, err := client.Do(req) - if err != nil { - return Stream{}, fmt.Errorf("failed to get twitch streams: %v", err) - } - - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return Stream{}, fmt.Errorf("failed to get twitch streams: %v", resp) - } - - body, err := io.ReadAll(resp.Body) - if err != nil { - return Stream{}, fmt.Errorf("failed to read response body: %v", err) - } - - var streamResponse Stream - err = json.Unmarshal(body, &streamResponse) - if err != nil { - return Stream{}, fmt.Errorf("failed to unmarshal response: %v", err) - } - - return streamResponse, nil -} - -func CheckUserAccessToken(accessToken string) error { +// CheckUserAccessToken checks if the access token is valid by sending a GET request to the Twitch API +func CheckUserAccessToken(ctx context.Context, accessToken string) error { client := &http.Client{} - req, err := http.NewRequest("GET", "https://id.twitch.tv/oauth2/validate", nil) + req, err := http.NewRequestWithContext(ctx, "GET", "https://id.twitch.tv/oauth2/validate", nil) if err != nil { return fmt.Errorf("failed to create request: %v", err) } @@ -339,96 +28,3 @@ func CheckUserAccessToken(accessToken string) error { return nil } - -func GetVideosByUser(userID string, videoType string) ([]Video, error) { - client := &http.Client{} - req, err := http.NewRequest("GET", fmt.Sprintf("https://api.twitch.tv/helix/videos?user_id=%s&type=%s&first=100", userID, videoType), nil) - if err != nil { - return nil, fmt.Errorf("failed to create request: %v", err) - } - req.Header.Set("Client-ID", os.Getenv("TWITCH_CLIENT_ID")) - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", os.Getenv("TWITCH_ACCESS_TOKEN"))) - resp, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to get twitch videos: %v", err) - } - - defer resp.Body.Close() - - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response body: %v", err) - } - - if resp.StatusCode != http.StatusOK { - log.Error().Err(err).Msgf("failed to get twitch videos: %v", string(body)) - return nil, fmt.Errorf("failed to get twitch videos: %v", resp) - } - - var videoResponse TwitchVideoResponse - err = json.Unmarshal(body, &videoResponse) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal response: %v", err) - } - - var videos []Video - videos = append(videos, videoResponse.Data...) - - // pagination - var cursor string - cursor = videoResponse.Pagination.Cursor - for cursor != "" { - response, err := getVideosByUserWithCursor(userID, videoType, cursor) - if err != nil { - return nil, fmt.Errorf("failed to get twitch videos: %v", err) - } - videos = append(videos, response.Data...) - cursor = response.Pagination.Cursor - } - - return videos, nil -} - -func getVideosByUserWithCursor(userID string, videoType string, cursor string) (*TwitchVideoResponse, error) { - log.Debug().Msgf("getting twitch videos for user: %s with type %s and cursor %s", userID, videoType, cursor) - client := &http.Client{} - req, err := http.NewRequest("GET", fmt.Sprintf("https://api.twitch.tv/helix/videos?user_id=%s&type=%s&first=100&after=%s", userID, videoType, cursor), nil) - if err != nil { - return nil, fmt.Errorf("failed to create request: %v", err) - } - req.Header.Set("Client-ID", os.Getenv("TWITCH_CLIENT_ID")) - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", os.Getenv("TWITCH_ACCESS_TOKEN"))) - resp, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to get twitch videos: %v", err) - } - - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("failed to get twitch videos: %v", resp) - } - - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response body: %v", err) - } - - var videoResponse TwitchVideoResponse - err = json.Unmarshal(body, &videoResponse) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal response: %v", err) - } - - return &videoResponse, nil - -} - -func (s *Service) GetCategories() ([]*ent.TwitchCategory, error) { - categories, err := database.DB().Client.TwitchCategory.Query().All(context.Background()) - if err != nil { - return nil, fmt.Errorf("failed to get categories: %v", err) - } - - return categories, nil -} diff --git a/internal/utils/build.go b/internal/utils/build.go new file mode 100644 index 00000000..1d6e13e2 --- /dev/null +++ b/internal/utils/build.go @@ -0,0 +1,9 @@ +package utils + +import "time" + +var ( + Commit = "undefined" + BuildTime = "undefined" + StartTime = time.Now() +) diff --git a/internal/utils/enum.go b/internal/utils/enum.go index ea9165cb..15ec8ed2 100644 --- a/internal/utils/enum.go +++ b/internal/utils/enum.go @@ -16,15 +16,15 @@ func (Role) Values() (kinds []string) { return } -type VodPlatform string +type VideoPlatform string const ( - PlatformTwitch VodPlatform = "twitch" - PlatformYoutube VodPlatform = "youtube" + PlatformTwitch VideoPlatform = "twitch" + PlatformYoutube VideoPlatform = "youtube" ) -func (VodPlatform) Values() (kinds []string) { - for _, s := range []VodPlatform{PlatformTwitch, PlatformYoutube} { +func (VideoPlatform) Values() (kinds []string) { + for _, s := range []VideoPlatform{PlatformTwitch, PlatformYoutube} { kinds = append(kinds, string(s)) } return @@ -81,6 +81,10 @@ func (VodQuality) Values() (kinds []string) { return } +func (q VodQuality) String() string { + return string(q) +} + type PlaybackStatus string const ( @@ -94,3 +98,54 @@ func (PlaybackStatus) Values() (kinds []string) { } return } + +type TaskName string + +const ( + TaskCreateFolder TaskName = "task_vod_create_folder" + TaskDownloadThumbnail TaskName = "task_vod_download_thumbnail" + TaskSaveInfo TaskName = "task_vod_save_info" + TaskDownloadVideo TaskName = "task_video_download" + TaskDownloadLiveVideo TaskName = "task_live_video_download" // not used queue + TaskPostProcessVideo TaskName = "task_video_convert" + TaskMoveVideo TaskName = "task_video_move" + TaskDownloadChat TaskName = "task_chat_download" + TaskDownloadLiveChat TaskName = "task_live_chat_download" // not used queue + TaskConvertChat TaskName = "task_chat_convert" + TaskRenderChat TaskName = "task_chat_render" + TaskMoveChat TaskName = "task_chat_move" +) + +func (TaskName) Values() (kinds []string) { + for _, s := range []TaskName{TaskCreateFolder, TaskDownloadThumbnail, TaskSaveInfo, TaskDownloadVideo, TaskPostProcessVideo, TaskMoveVideo, TaskDownloadChat, TaskConvertChat, TaskRenderChat, TaskMoveChat} { + kinds = append(kinds, string(s)) + } + return +} + +func GetTaskName(s string) TaskName { + switch s { + case string(TaskCreateFolder): + return TaskCreateFolder + case string(TaskDownloadThumbnail): + return TaskDownloadThumbnail + case string(TaskSaveInfo): + return TaskSaveInfo + case string(TaskDownloadVideo): + return TaskDownloadVideo + case string(TaskPostProcessVideo): + return TaskPostProcessVideo + case string(TaskMoveVideo): + return TaskMoveVideo + case string(TaskDownloadChat): + return TaskDownloadChat + case string(TaskConvertChat): + return TaskConvertChat + case string(TaskRenderChat): + return TaskRenderChat + case string(TaskMoveChat): + return TaskMoveChat + default: + return "" + } +} diff --git a/internal/utils/file.go b/internal/utils/file.go index 083159d7..19626bfa 100644 --- a/internal/utils/file.go +++ b/internal/utils/file.go @@ -1,6 +1,7 @@ package utils import ( + "context" "encoding/json" "fmt" "io" @@ -12,21 +13,58 @@ import ( "github.com/rs/zerolog/log" ) -// CreateFolder - creates folder if it doesn't exist -// Adds base directory to path - supply with everything after /vods/ -func CreateFolder(path string) error { - log.Debug().Msgf("creating folder: %s", path) - err := os.MkdirAll(fmt.Sprintf("/vods/%s", path), os.ModePerm) +// Create a directory given the path +func CreateDirectory(path string) error { + err := os.MkdirAll(path, os.ModePerm) if err != nil { return err } return nil } -// DownloadFile - downloads file from url to destination -// Adds base directory to path - supply with everything after /vods/ -// DownloadFile("http://img", "channel", "profile.png") -func DownloadFile(url, path, filename string) error { +// Delete a directory given the path +func DeleteDirectory(path string) error { + err := os.RemoveAll(path) + if err != nil { + return err + } + return nil +} + +// DownloadAndSaveFile - downloads file from url to destination +func DownloadAndSaveFile(url, path string) error { + client := &http.Client{} + + // Send GET request to the URL + resp, err := client.Get(url) + if err != nil { + return fmt.Errorf("error making GET request: %v", err) + } + defer resp.Body.Close() + + // Check if the response status code is OK (200) + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("bad status: %s", resp.Status) + } + + // Create the local file + out, err := os.Create(path) + if err != nil { + return fmt.Errorf("error creating file: %v", err) + } + defer out.Close() + + // Write the response body to the local file + _, err = io.Copy(out, resp.Body) + if err != nil { + return fmt.Errorf("error writing to file: %v", err) + } + + return nil +} + +// DownloadFile downloads file from url to the path provided +func DownloadFile(url, path string) error { log.Debug().Msgf("downloading file: %s", url) // Get response bytes from URL resp, err := http.Get(url) @@ -39,7 +77,7 @@ func DownloadFile(url, path, filename string) error { } // Create file - file, err := os.Create(fmt.Sprintf("/vods/%s/%s", path, filename)) + file, err := os.Create(path) if err != nil { return fmt.Errorf("error creating file: %v", err) } @@ -53,43 +91,76 @@ func DownloadFile(url, path, filename string) error { return nil } -func WriteJson(j interface{}, path string, filename string) error { +func WriteJsonFile(j interface{}, path string) error { data, err := json.Marshal(j) if err != nil { - log.Error().Msgf("error marshalling json: %v", err) + return err } - err = os.WriteFile(fmt.Sprintf("/vods/%s/%s", path, filename), data, 0644) + + err = os.WriteFile(path, data, 0644) if err != nil { - log.Error().Msgf("error writing json: %v", err) + return err } return nil } -func MoveFile(sourcePath, destPath string) error { - log.Debug().Msgf("moving file: %s to %s", sourcePath, destPath) - inputFile, err := os.Open(sourcePath) +// MoveFile - moves file from source to destination. +// +// os.Rename is used if possible, and falls back to copy and delete if it fails (e.g. cross-device link) +func MoveFile(ctx context.Context, source, dest string) error { + // Try to rename the file first + err := os.Rename(source, dest) + if err == nil { + return nil + } + + // If rename fails (e.g. cross-device link), fall back to copy and delete + srcFile, err := os.Open(source) if err != nil { - return fmt.Errorf("error opening file: %v", err) + return fmt.Errorf("failed to open source file: %w", err) } - outputFile, err := os.Create(destPath) + defer srcFile.Close() + + destFile, err := os.Create(dest) if err != nil { - inputFile.Close() - return fmt.Errorf("error creating file: %v", err) + return fmt.Errorf("failed to create destination file: %w", err) } - defer outputFile.Close() - _, err = io.Copy(outputFile, inputFile) - inputFile.Close() + defer destFile.Close() + + // Use io.Copy with context to respect cancellation + _, err = io.Copy(destFile, &contextReader{ctx: ctx, r: srcFile}) if err != nil { - return fmt.Errorf("writing to output file failed: %v", err) + destFile.Close() + os.Remove(dest) // Clean up the partially written file + return fmt.Errorf("failed to copy file: %w", err) } - // Copy was successful - delete source file - err = os.Remove(sourcePath) + + // Close files before attempting to remove the source + srcFile.Close() + destFile.Close() + + // Remove the source file + err = os.Remove(source) if err != nil { - log.Info().Msgf("error deleting source file: %v", err) + return fmt.Errorf("failed to remove source file: %w", err) } + return nil } +// contextReader wraps an io.Reader with a context +type contextReader struct { + ctx context.Context + r io.Reader +} + +func (cr *contextReader) Read(p []byte) (n int, err error) { + if err := cr.ctx.Err(); err != nil { + return 0, err + } + return cr.r.Read(p) +} + func CopyFile(sourcePath, destPath string) error { log.Debug().Msgf("moving file: %s to %s", sourcePath, destPath) inputFile, err := os.Open(sourcePath) @@ -110,6 +181,46 @@ func CopyFile(sourcePath, destPath string) error { return nil } +// MoveDirectory - moves directory from source to destination. +func MoveDirectory(ctx context.Context, source, dest string) error { + // Create the destination directory + if err := os.MkdirAll(dest, os.ModePerm); err != nil { + return fmt.Errorf("failed to create destination directory: %w", err) + } + + // Walk through the source directory + return filepath.Walk(source, func(path string, info os.FileInfo, err error) error { + // Check if the context has been canceled + if err := ctx.Err(); err != nil { + return err + } + + if err != nil { + return fmt.Errorf("error accessing path %q: %w", path, err) + } + + // Compute the relative path + relPath, err := filepath.Rel(source, path) + if err != nil { + return fmt.Errorf("failed to get relative path for %q: %w", path, err) + } + + destPath := filepath.Join(dest, relPath) + + if info.IsDir() { + // Create the directory in the destination + return os.MkdirAll(destPath, info.Mode()) + } + + // Move the file + if err := MoveFile(ctx, path, destPath); err != nil { + return fmt.Errorf("failed to move file %q: %w", path, err) + } + + return nil + }) +} + func MoveFolder(src string, dst string) error { // Check if the source path exists if _, err := os.Stat(src); os.IsNotExist(err) { @@ -170,10 +281,9 @@ func MoveFolder(src string, dst string) error { } func DeleteFile(path string) error { - log.Debug().Msgf("deleting file: %s", path) err := os.Remove(path) if err != nil { - return fmt.Errorf("error deleting file: %v", err) + return err } return nil } @@ -189,37 +299,18 @@ func ReadLastLines(path string, lines int) ([]byte, error) { return out, nil } -func FileExists(path string) bool { - if _, err := os.Stat(path); err != nil { - if os.IsNotExist(err) { - return false - } - } - return true +func FileExists(filename string) bool { + _, err := os.Stat(filename) + return !os.IsNotExist(err) } func ReadChatFile(path string) ([]byte, error) { - // Check if file is cached - //cached, found := cache.Cache().Get(path) - //if found { - // log.Debug().Msgf("using cached file: %s", path) - // return cached.([]byte), nil - //} - data, err := os.ReadFile(path) if err != nil { return nil, fmt.Errorf("error reading chat file: %v", err) } - // Cache file - //err = cache.Cache().Set(path, data, 5*time.Minute) - //if err != nil { - // - // return nil, err - //} - //log.Debug().Msgf("set cache for file: %s", path) - return data, nil } diff --git a/internal/utils/tdl.go b/internal/utils/tdl.go index c0a09777..52ca9635 100644 --- a/internal/utils/tdl.go +++ b/internal/utils/tdl.go @@ -80,7 +80,7 @@ type LiveChat struct { Comments []LiveComment `json:"comments"` } -func ConvertTwitchLiveChatToTDLChat(path string, channelName string, videoID string, videoExternalID string, channelID int, chatStartTime time.Time, previousVideoID string) error { +func ConvertTwitchLiveChatToTDLChat(path string, outPath string, channelName string, videoID string, videoExternalID string, channelID int, chatStartTime time.Time, previousVideoID string) error { log.Debug().Str("chat_file", path).Msg("Converting live Twitch chat to TDL chat for rendering") @@ -168,7 +168,7 @@ func ConvertTwitchLiveChatToTDLChat(path string, channelName string, videoID str }, } - if (liveComment.MessageType == "highlighted_message") { + if liveComment.MessageType == "highlighted_message" { var highlightString = "highlighted-message" tdlComment.Message.UserNoticeParams.MsgID = &highlightString } @@ -179,6 +179,9 @@ func ConvertTwitchLiveChatToTDLChat(path string, channelName string, videoID str Emoticon: nil, }) + // set default offset value for this live comment + message_is_offset := false + // parse emotes, creating fragments with positions emoteFragments := []Fragment{} if liveComment.Emotes != nil { @@ -198,8 +201,9 @@ func ConvertTwitchLiveChatToTDLChat(path string, channelName string, videoID str // ensure that the sliced string equals the emote // sometimes the output of chat-downloader will not include a unicode character when calculating positions causing an offset in positions - if slicedEmote != liveCommentEmote.Name { + if slicedEmote != liveCommentEmote.Name || message_is_offset { log.Debug().Str("message_id", liveComment.MessageID).Msg("emote position mismatch detected while converting chat") + message_is_offset = true // attempt to get emote position in comment message pos1, pos2, found := findSubstringPositions(liveComment.Message, liveCommentEmote.Name, i+1) @@ -310,7 +314,7 @@ func ConvertTwitchLiveChatToTDLChat(path string, channelName string, videoID str tdlChat.Video.End = int64(lastComment.ContentOffsetSeconds) // write chat - err = writeTDLChat(tdlChat, videoID, videoExternalID) + err = writeTDLChat(tdlChat, outPath) if err != nil { return err } @@ -319,12 +323,12 @@ func ConvertTwitchLiveChatToTDLChat(path string, channelName string, videoID str } -func writeTDLChat(parsedChat TDLChat, vID string, vExtID string) error { +func writeTDLChat(parsedChat TDLChat, outPath string) error { data, err := json.Marshal(parsedChat) if err != nil { return fmt.Errorf("failed to marshal parsed comments: %v", err) } - err = os.WriteFile(fmt.Sprintf("/tmp/%s_%s-chat-convert.json", vExtID, vID), data, 0644) + err = os.WriteFile(outPath, data, 0644) if err != nil { return fmt.Errorf("failed to write parsed comments: %v", err) } diff --git a/internal/utils/utils.go b/internal/utils/utils.go index fc956955..d7d77569 100644 --- a/internal/utils/utils.go +++ b/internal/utils/utils.go @@ -2,6 +2,7 @@ package utils import ( "fmt" + "path/filepath" "runtime" "strings" "time" @@ -39,6 +40,7 @@ func SanitizeFileName(fileName string) string { return fileName } +// Contains returns true if the slice contains the string func Contains(s []string, e string) bool { for _, a := range s { if strings.EqualFold(a, e) { @@ -57,3 +59,21 @@ func SecondsToHHMMSS(seconds int) string { return fmt.Sprintf("%02d:%02d:%02d", hours, minutes, seconds) } + +// GetPathBefore returns the path before the delimiter +func GetPathBefore(path, delimiter string) string { + index := strings.Index(path, delimiter) + if index == -1 { + return path + } + return path[:index] +} + +// GetPathBeforePartial returns the path before the partialMatch +func GetPathBeforePartial(fullPath, partialMatch string) string { + index := strings.Index(strings.ToLower(fullPath), strings.ToLower(partialMatch)) + if index == -1 { + return fullPath + } + return filepath.Dir(fullPath[:index]) +} diff --git a/internal/vod/tasks.go b/internal/vod/tasks.go new file mode 100644 index 00000000..f5adef23 --- /dev/null +++ b/internal/vod/tasks.go @@ -0,0 +1,62 @@ +package vod + +import ( + "context" + "net/http" + "time" + + "github.com/labstack/echo/v4" + "github.com/rs/zerolog/log" + "github.com/zibbp/ganymede/ent/channel" + entChannel "github.com/zibbp/ganymede/ent/channel" + entVod "github.com/zibbp/ganymede/ent/vod" + "github.com/zibbp/ganymede/internal/database" +) + +func PruneVideos(ctx context.Context, store *database.Database) error { + vodService := &Service{Store: database.DB()} + req := &http.Request{} + echoCtx := echo.New().NewContext(req, nil) + echoCtx.SetRequest(req.WithContext(ctx)) + + // fetch all channels that have retention enable + channels, err := store.Client.Channel.Query().Where(channel.Retention(true)).All(context.Background()) + if err != nil { + log.Error().Err(err).Msg("error fetching channels") + return err + } + log.Debug().Msgf("found %d channels with retention enabled", len(channels)) + + // loop over channels + for _, channel := range channels { + log.Debug().Msgf("Processing channel %s", channel.ID) + // fetch all videos for channel + videos, err := store.Client.Vod.Query().Where(entVod.HasChannelWith(entChannel.ID(channel.ID))).All(context.Background()) + if err != nil { + log.Error().Err(err).Msgf("Error fetching videos for channel %s", channel.ID) + continue + } + + // loop over videos + for _, video := range videos { + // check if video is locked + if video.Locked { + log.Debug().Str("video_id", video.ID.String()).Msg("skipping locked video") + continue + } + // check if video is older than retention + if video.CreatedAt.Add(time.Duration(channel.RetentionDays) * 24 * time.Hour).Before(time.Now()) { + // delete video + log.Info().Str("video_id", video.ID.String()).Msg("deleting video as it is older than retention") + err := vodService.DeleteVod(echoCtx, video.ID, true) + if err != nil { + log.Error().Err(err).Msgf("Error deleting video %s", video.ID) + continue + } + } + } + + } + + return nil +} diff --git a/internal/vod/vod.go b/internal/vod/vod.go index 4ea9cd2e..a33c71e9 100644 --- a/internal/vod/vod.go +++ b/internal/vod/vod.go @@ -3,9 +3,11 @@ package vod import ( "context" "encoding/json" + "errors" "fmt" "math" "os" + "path/filepath" "runtime" "sort" "strconv" @@ -13,6 +15,7 @@ import ( "github.com/google/uuid" "github.com/labstack/echo/v4" + "github.com/riverqueue/river/rivertype" "github.com/rs/zerolog/log" "github.com/zibbp/ganymede/ent" "github.com/zibbp/ganymede/ent/channel" @@ -22,50 +25,56 @@ import ( "github.com/zibbp/ganymede/internal/cache" "github.com/zibbp/ganymede/internal/chat" "github.com/zibbp/ganymede/internal/database" + "github.com/zibbp/ganymede/internal/platform" + "github.com/zibbp/ganymede/internal/tasks" + tasks_client "github.com/zibbp/ganymede/internal/tasks/client" "github.com/zibbp/ganymede/internal/utils" ) type Service struct { - Store *database.Database + Store *database.Database + RiverClient *tasks_client.RiverClient + Platform platform.Platform } -func NewService(store *database.Database) *Service { - return &Service{Store: store} +func NewService(store *database.Database, riverClient *tasks_client.RiverClient, platform platform.Platform) *Service { + return &Service{Store: store, RiverClient: riverClient, Platform: platform} } type Vod struct { - ID uuid.UUID `json:"id"` - ExtID string `json:"ext_id"` - Platform utils.VodPlatform `json:"platform"` - Type utils.VodType `json:"type"` - Title string `json:"title"` - Duration int `json:"duration"` - Views int `json:"views"` - Resolution string `json:"resolution"` - Processing bool `json:"processing"` - ThumbnailPath string `json:"thumbnail_path"` - WebThumbnailPath string `json:"web_thumbnail_path"` - VideoPath string `json:"video_path"` - VideoHLSPath string `json:"video_hls_path"` - ChatPath string `json:"chat_path"` - LiveChatPath string `json:"live_chat_path"` - LiveChatConvertPath string `json:"live_chat_convert_path"` - ChatVideoPath string `json:"chat_video_path"` - InfoPath string `json:"info_path"` - CaptionPath string `json:"caption_path"` - StreamedAt time.Time `json:"streamed_at"` - UpdatedAt time.Time `json:"updated_at"` - CreatedAt time.Time `json:"created_at"` - FolderName string `json:"folder_name"` - FileName string `json:"file_name"` - Locked bool `json:"locked"` - TmpVideoDownloadPath string `json:"tmp_video_download_path"` - TmpVideoConvertPath string `json:"tmp_video_convert_path"` - TmpChatDownloadPath string `json:"tmp_chat_download_path"` - TmpLiveChatDownloadPath string `json:"tmp_live_chat_download_path"` - TmpLiveChatConvertPath string `json:"tmp_live_chat_convert_path"` - TmpChatRenderPath string `json:"tmp_chat_render_path"` - TmpVideoHLSPath string `json:"tmp_video_hls_path"` + ID uuid.UUID `json:"id"` + ExtID string `json:"ext_id"` + ExtStreamID string `json:"ext_stream_id"` + Platform utils.VideoPlatform `json:"platform"` + Type utils.VodType `json:"type"` + Title string `json:"title"` + Duration int `json:"duration"` + Views int `json:"views"` + Resolution string `json:"resolution"` + Processing bool `json:"processing"` + ThumbnailPath string `json:"thumbnail_path"` + WebThumbnailPath string `json:"web_thumbnail_path"` + VideoPath string `json:"video_path"` + VideoHLSPath string `json:"video_hls_path"` + ChatPath string `json:"chat_path"` + LiveChatPath string `json:"live_chat_path"` + LiveChatConvertPath string `json:"live_chat_convert_path"` + ChatVideoPath string `json:"chat_video_path"` + InfoPath string `json:"info_path"` + CaptionPath string `json:"caption_path"` + StreamedAt time.Time `json:"streamed_at"` + UpdatedAt time.Time `json:"updated_at"` + CreatedAt time.Time `json:"created_at"` + FolderName string `json:"folder_name"` + FileName string `json:"file_name"` + Locked bool `json:"locked"` + TmpVideoDownloadPath string `json:"tmp_video_download_path"` + TmpVideoConvertPath string `json:"tmp_video_convert_path"` + TmpChatDownloadPath string `json:"tmp_chat_download_path"` + TmpLiveChatDownloadPath string `json:"tmp_live_chat_download_path"` + TmpLiveChatConvertPath string `json:"tmp_live_chat_convert_path"` + TmpChatRenderPath string `json:"tmp_chat_render_path"` + TmpVideoHLSPath string `json:"tmp_video_hls_path"` } type Pagination struct { @@ -83,7 +92,7 @@ type MutedSegment struct { } func (s *Service) CreateVod(vodDto Vod, cUUID uuid.UUID) (*ent.Vod, error) { - v, err := s.Store.Client.Vod.Create().SetID(vodDto.ID).SetChannelID(cUUID).SetExtID(vodDto.ExtID).SetPlatform(vodDto.Platform).SetType(vodDto.Type).SetTitle(vodDto.Title).SetDuration(vodDto.Duration).SetViews(vodDto.Views).SetResolution(vodDto.Resolution).SetProcessing(vodDto.Processing).SetThumbnailPath(vodDto.ThumbnailPath).SetWebThumbnailPath(vodDto.WebThumbnailPath).SetVideoPath(vodDto.VideoPath).SetChatPath(vodDto.ChatPath).SetChatVideoPath(vodDto.ChatVideoPath).SetInfoPath(vodDto.InfoPath).SetCaptionPath(vodDto.CaptionPath).SetStreamedAt(vodDto.StreamedAt).SetFolderName(vodDto.FolderName).SetFileName(vodDto.FileName).SetLocked(vodDto.Locked).SetTmpVideoDownloadPath(vodDto.TmpVideoDownloadPath).SetTmpVideoConvertPath(vodDto.TmpVideoConvertPath).SetTmpChatDownloadPath(vodDto.TmpChatDownloadPath).SetTmpLiveChatDownloadPath(vodDto.TmpLiveChatDownloadPath).SetTmpLiveChatConvertPath(vodDto.TmpLiveChatConvertPath).SetTmpChatRenderPath(vodDto.TmpChatRenderPath).SetLiveChatPath(vodDto.LiveChatPath).SetLiveChatConvertPath(vodDto.LiveChatConvertPath).SetVideoHlsPath(vodDto.VideoHLSPath).SetTmpVideoHlsPath(vodDto.TmpVideoHLSPath).Save(context.Background()) + v, err := s.Store.Client.Vod.Create().SetID(vodDto.ID).SetChannelID(cUUID).SetExtID(vodDto.ExtID).SetExtStreamID(vodDto.ExtStreamID).SetPlatform(vodDto.Platform).SetType(vodDto.Type).SetTitle(vodDto.Title).SetDuration(vodDto.Duration).SetViews(vodDto.Views).SetResolution(vodDto.Resolution).SetProcessing(vodDto.Processing).SetThumbnailPath(vodDto.ThumbnailPath).SetWebThumbnailPath(vodDto.WebThumbnailPath).SetVideoPath(vodDto.VideoPath).SetChatPath(vodDto.ChatPath).SetChatVideoPath(vodDto.ChatVideoPath).SetInfoPath(vodDto.InfoPath).SetCaptionPath(vodDto.CaptionPath).SetStreamedAt(vodDto.StreamedAt).SetFolderName(vodDto.FolderName).SetFileName(vodDto.FileName).SetLocked(vodDto.Locked).SetTmpVideoDownloadPath(vodDto.TmpVideoDownloadPath).SetTmpVideoConvertPath(vodDto.TmpVideoConvertPath).SetTmpChatDownloadPath(vodDto.TmpChatDownloadPath).SetTmpLiveChatDownloadPath(vodDto.TmpLiveChatDownloadPath).SetTmpLiveChatConvertPath(vodDto.TmpLiveChatConvertPath).SetTmpChatRenderPath(vodDto.TmpChatRenderPath).SetLiveChatPath(vodDto.LiveChatPath).SetLiveChatConvertPath(vodDto.LiveChatConvertPath).SetVideoHlsPath(vodDto.VideoHLSPath).SetTmpVideoHlsPath(vodDto.TmpVideoHLSPath).Save(context.Background()) if err != nil { log.Debug().Err(err).Msg("error creating vod") if _, ok := err.(*ent.ConstraintError); ok { @@ -174,12 +183,65 @@ func (s *Service) DeleteVod(c echo.Context, vodID uuid.UUID, deleteFiles bool) e // delete files if deleteFiles { log.Debug().Msgf("deleting files for vod %s", v.ID) - path := fmt.Sprintf("/vods/%s/%s", v.Edges.Channel.Name, v.FolderName) - err := utils.DeleteFolder(path) - if err != nil { - log.Debug().Err(err).Msg("error deleting files") - return err + + path := filepath.Dir(filepath.Clean(v.VideoPath)) + + if err := utils.DeleteDirectory(path); err != nil { + log.Error().Err(err).Msg("error deleting directory") + return fmt.Errorf("error deleting directory: %v", err) + } + + // attempt to delete temp files + if err := utils.DeleteFile(v.TmpVideoDownloadPath); err != nil { + if errors.Is(err, os.ErrNotExist) { + log.Debug().Msgf("temp file %s does not exist", v.TmpVideoDownloadPath) + } else { + return err + } } + if err := utils.DeleteFile(v.TmpVideoConvertPath); err != nil { + if errors.Is(err, os.ErrNotExist) { + log.Debug().Msgf("temp file %s does not exist", v.TmpVideoConvertPath) + } else { + return err + } + } + if err := utils.DeleteDirectory(v.TmpVideoHlsPath); err != nil { + if errors.Is(err, os.ErrNotExist) { + log.Debug().Msgf("temp file %s does not exist", v.TmpVideoHlsPath) + } else { + return err + } + } + if err := utils.DeleteFile(v.TmpChatDownloadPath); err != nil { + if errors.Is(err, os.ErrNotExist) { + log.Debug().Msgf("temp file %s does not exist", v.TmpChatDownloadPath) + } else { + return err + } + } + if err := utils.DeleteFile(v.TmpChatRenderPath); err != nil { + if errors.Is(err, os.ErrNotExist) { + log.Debug().Msgf("temp file %s does not exist", v.TmpChatRenderPath) + } else { + return err + } + } + if err := utils.DeleteFile(v.TmpLiveChatConvertPath); err != nil { + if errors.Is(err, os.ErrNotExist) { + log.Debug().Msgf("temp file %s does not exist", v.TmpLiveChatConvertPath) + } else { + return err + } + } + if err := utils.DeleteFile(v.TmpLiveChatDownloadPath); err != nil { + if errors.Is(err, os.ErrNotExist) { + log.Debug().Msgf("temp file %s does not exist", v.TmpLiveChatDownloadPath) + } else { + return err + } + } + } err = s.Store.Client.Vod.DeleteOneID(vodID).Exec(c.Request().Context()) @@ -191,7 +253,7 @@ func (s *Service) DeleteVod(c echo.Context, vodID uuid.UUID, deleteFiles bool) e } func (s *Service) UpdateVod(c echo.Context, vodID uuid.UUID, vodDto Vod, cUUID uuid.UUID) (*ent.Vod, error) { - v, err := s.Store.Client.Vod.UpdateOneID(vodID).SetChannelID(cUUID).SetExtID(vodDto.ExtID).SetPlatform(vodDto.Platform).SetType(vodDto.Type).SetTitle(vodDto.Title).SetDuration(vodDto.Duration).SetViews(vodDto.Views).SetResolution(vodDto.Resolution).SetProcessing(vodDto.Processing).SetThumbnailPath(vodDto.ThumbnailPath).SetWebThumbnailPath(vodDto.WebThumbnailPath).SetVideoPath(vodDto.VideoPath).SetChatPath(vodDto.ChatPath).SetChatVideoPath(vodDto.ChatVideoPath).SetInfoPath(vodDto.InfoPath).SetCaptionPath(vodDto.CaptionPath).SetStreamedAt(vodDto.StreamedAt).SetLocked(vodDto.Locked).Save(c.Request().Context()) + v, err := s.Store.Client.Vod.UpdateOneID(vodID).SetChannelID(cUUID).SetExtID(vodDto.ExtID).SetExtID(vodDto.ExtID).SetPlatform(vodDto.Platform).SetType(vodDto.Type).SetTitle(vodDto.Title).SetDuration(vodDto.Duration).SetViews(vodDto.Views).SetResolution(vodDto.Resolution).SetProcessing(vodDto.Processing).SetThumbnailPath(vodDto.ThumbnailPath).SetWebThumbnailPath(vodDto.WebThumbnailPath).SetVideoPath(vodDto.VideoPath).SetChatPath(vodDto.ChatPath).SetChatVideoPath(vodDto.ChatVideoPath).SetInfoPath(vodDto.InfoPath).SetCaptionPath(vodDto.CaptionPath).SetStreamedAt(vodDto.StreamedAt).SetLocked(vodDto.Locked).Save(c.Request().Context()) if err != nil { log.Debug().Err(err).Msg("error updating vod") @@ -306,6 +368,12 @@ func (s *Service) GetVodsPagination(c echo.Context, limit int, offset int, chann return pagination, nil } +func (s *Service) GenerateStaticThumbnail(ctx context.Context, videoID uuid.UUID) (*rivertype.JobInsertResult, error) { + return s.RiverClient.Client.Insert(ctx, tasks.GenerateStaticThumbnailArgs{ + VideoId: videoID.String(), + }, nil) +} + func (s *Service) GetUserIdFromChat(c echo.Context, vodID uuid.UUID) (*int64, error) { v, err := s.Store.Client.Vod.Query().Where(vod.ID(vodID)).Only(c.Request().Context()) if err != nil { @@ -344,12 +412,6 @@ func (s *Service) GetUserIdFromChat(c echo.Context, vodID uuid.UUID) (*int64, er } func (s *Service) GetVodChatComments(c echo.Context, vodID uuid.UUID, start float64, end float64) (*[]chat.Comment, error) { - envDeployment := os.Getenv("ENV") - - if envDeployment == "development" { - utils.PrintMemUsage() - } - v, err := s.Store.Client.Vod.Query().Where(vod.ID(vodID)).Only(c.Request().Context()) if err != nil { log.Debug().Err(err).Msg("error getting vod chat") @@ -419,20 +481,10 @@ func (s *Service) GetVodChatComments(c echo.Context, vodID uuid.UUID, start floa defer runtime.GC() - if envDeployment == "development" { - utils.PrintMemUsage() - } - return &filteredComments, nil } func (s *Service) GetNumberOfVodChatCommentsFromTime(c echo.Context, vodID uuid.UUID, start float64, commentCount int64) (*[]chat.Comment, error) { - envDeployment := os.Getenv("ENV") - - if envDeployment == "development" { - utils.PrintMemUsage() - } - v, err := s.Store.Client.Vod.Query().Where(vod.ID(vodID)).Only(c.Request().Context()) if err != nil { log.Debug().Err(err).Msg("error getting vod chat") @@ -509,198 +561,157 @@ func (s *Service) GetNumberOfVodChatCommentsFromTime(c echo.Context, vodID uuid. comments = nil defer runtime.GC() - if envDeployment == "development" { - utils.PrintMemUsage() - } - return &filteredComments, nil } -func (s *Service) GetVodChatEmotes(c echo.Context, vodID uuid.UUID) (*chat.GanymedeEmotes, error) { - v, err := s.Store.Client.Vod.Query().Where(vod.ID(vodID)).Only(c.Request().Context()) +func (s *Service) GetChatEmotes(ctx context.Context, vodID uuid.UUID) (*platform.Emotes, error) { + v, err := s.Store.Client.Vod.Query().Where(vod.ID(vodID)).Only(ctx) if err != nil { - log.Debug().Err(err).Msg("error getting vod chat emotes") - return nil, fmt.Errorf("error getting vod chat emotes: %v", err) + return nil, err } data, err := utils.ReadChatFile(v.ChatPath) if err != nil { - log.Debug().Err(err).Msg("error getting vod chat emotes") - return nil, fmt.Errorf("error getting vod chat emotes: %v", err) + return nil, fmt.Errorf("error reading chat file: %v", err) } var chatData *chat.ChatOnlyEmotes err = json.Unmarshal(data, &chatData) if err != nil { - log.Debug().Err(err).Msg("error getting vod chat emotes") - return nil, fmt.Errorf("error getting vod chat emotes: %v", err) + return nil, fmt.Errorf("error unmarshalling chat data: %v", err) } defer runtime.GC() - var ganymedeEmotes chat.GanymedeEmotes + var emotes platform.Emotes + + // get streamer id from chat + streamerId, err := getStreamerIdFromInterface(chatData.Streamer.ID) + if err != nil { + return nil, err + } switch { + // check if emotes are embedded in the 'emotes' struct case len(chatData.Emotes.FirstParty) > 0 && len(chatData.Emotes.ThirdParty) > 0: - log.Debug().Msgf("VOD %s chat playback embedded emotes found in 'emotes'", vodID) + log.Debug().Str("video_id", v.ID.String()).Msg("chat emotes are embedded in 'emotes' struct") + // Loop through first party emotes and add them to the emotes slice for _, emote := range chatData.Emotes.FirstParty { - var ganymedeEmote chat.GanymedeEmote - ganymedeEmote.Name = fmt.Sprint(emote.Name) - ganymedeEmote.ID = emote.ID - ganymedeEmote.URL = emote.Data - ganymedeEmote.Type = "embed" - ganymedeEmote.Width = emote.Width - ganymedeEmote.Height = emote.Height - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, ganymedeEmote) - } - // Loop through third party emotes + emotes.Emotes = append(emotes.Emotes, platform.Emote{ + ID: emote.ID, + Name: fmt.Sprint(emote.Name), + URL: emote.Data, + Width: emote.Width, + Height: emote.Height, + Type: "embed", + }) + } + // Loop through third party emotes and add them to the emotes slice for _, emote := range chatData.Emotes.ThirdParty { - var ganymedeEmote chat.GanymedeEmote - ganymedeEmote.Name = fmt.Sprint(emote.Name) - ganymedeEmote.ID = emote.ID - ganymedeEmote.URL = emote.Data - ganymedeEmote.Type = "embed" - ganymedeEmote.Width = emote.Width - ganymedeEmote.Height = emote.Height - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, ganymedeEmote) + emotes.Emotes = append(emotes.Emotes, platform.Emote{ + ID: emote.ID, + Name: fmt.Sprint(emote.Name), + URL: emote.Data, + Width: emote.Width, + Height: emote.Height, + Type: "embed", + }) } case len(chatData.EmbeddedData.FirstParty) > 0 && len(chatData.EmbeddedData.ThirdParty) > 0: - log.Debug().Msgf("VOD %s chat playback embedded emotes found in 'emebeddedData'", vodID) + log.Debug().Str("video_id", v.ID.String()).Msg("chat emotes are embedded in 'embeddedData' struct") + // Loop through first party emotes and add them to the emotes slice for _, emote := range chatData.EmbeddedData.FirstParty { - var ganymedeEmote chat.GanymedeEmote - ganymedeEmote.Name = fmt.Sprint(emote.Name) - ganymedeEmote.ID = emote.ID - ganymedeEmote.URL = emote.Data - ganymedeEmote.Type = "embed" - ganymedeEmote.Width = emote.Width - ganymedeEmote.Height = emote.Height - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, ganymedeEmote) - } - // Loop through third party emotes + emotes.Emotes = append(emotes.Emotes, platform.Emote{ + ID: emote.ID, + Name: fmt.Sprint(emote.Name), + URL: emote.Data, + Width: emote.Width, + Height: emote.Height, + Type: "embed", + }) + } + // Loop through third party emotes and add them to the emotes slice for _, emote := range chatData.EmbeddedData.ThirdParty { - var ganymedeEmote chat.GanymedeEmote - ganymedeEmote.Name = fmt.Sprint(emote.Name) - ganymedeEmote.ID = emote.ID - ganymedeEmote.URL = emote.Data - ganymedeEmote.Type = "embed" - ganymedeEmote.Width = emote.Width - ganymedeEmote.Height = emote.Height - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, ganymedeEmote) + emotes.Emotes = append(emotes.Emotes, platform.Emote{ + ID: emote.ID, + Name: fmt.Sprint(emote.Name), + URL: emote.Data, + Width: emote.Width, + Height: emote.Height, + Type: "embed", + }) } + // no embedded emotes; fetch emotes from remote providers default: - log.Debug().Msgf("VOD %s chat playback embedded emotes not found, fetching emotes from providers", vodID) + log.Debug().Str("video_id", v.ID.String()).Msg("chat emotes are not embedded; fetching emotes from remote providers") - twitchGlobalEmotes, err := chat.GetTwitchGlobalEmotes() + // get platform global emotes + globalEmotes, err := s.Platform.GetGlobalEmotes(ctx) if err != nil { - log.Debug().Err(err).Msg("error getting twitch global emotes") - return nil, fmt.Errorf("error getting twitch global emotes: %v", err) - } - - // Older chat files have the streamer ID stored as a string, need to convert to an int64 - var sID int64 - switch streamerChatId := chatData.Streamer.ID.(type) { - case string: - sID, err = strconv.ParseInt(streamerChatId, 10, 64) - if err != nil { - log.Debug().Err(err).Msg("error parsing streamer chat id") - return nil, fmt.Errorf("error parsing streamer chat id: %v", err) - } - case float64: - sID = int64(streamerChatId) + return nil, fmt.Errorf("error getting global emotes: %v", err) } + emotes.Emotes = append(emotes.Emotes, globalEmotes...) - twitchChannelEmotes, err := chat.GetTwitchChannelEmotes(sID) + // get platform channel emotes + channelEmotes, err := s.Platform.GetChannelEmotes(ctx, streamerId) if err != nil { - log.Debug().Err(err).Msg("error getting twitch channel emotes") - return nil, fmt.Errorf("error getting twitch channel emotes: %v", err) + return nil, fmt.Errorf("error getting channel emotes: %v", err) } - sevenTVGlobalEmotes, err := chat.Get7TVGlobalEmotes() + emotes.Emotes = append(emotes.Emotes, channelEmotes...) + + // get 7tv emotes + sevenTVGlobalEmotes, err := chat.Get7TVGlobalEmotes(ctx) if err != nil { - log.Debug().Err(err).Msg("error getting 7tv global emotes") return nil, fmt.Errorf("error getting 7tv global emotes: %v", err) } - sevenTVChannelEmotes, err := chat.Get7TVChannelEmotes(sID) + emotes.Emotes = append(emotes.Emotes, sevenTVGlobalEmotes...) + + sevenTVChannelEmotes, err := chat.Get7TVChannelEmotes(ctx, streamerId) if err != nil { - log.Debug().Err(err).Msg("error getting 7tv channel emotes") return nil, fmt.Errorf("error getting 7tv channel emotes: %v", err) } - bttvGlobalEmotes, err := chat.GetBTTVGlobalEmotes() + emotes.Emotes = append(emotes.Emotes, sevenTVChannelEmotes...) + + // get bttv emotes + bttvGlobalEmotes, err := chat.GetBTTVGlobalEmotes(ctx) if err != nil { - log.Debug().Err(err).Msg("error getting bttv global emotes") return nil, fmt.Errorf("error getting bttv global emotes: %v", err) } - bttvChannelEmotes, err := chat.GetBTTVChannelEmotes(sID) + emotes.Emotes = append(emotes.Emotes, bttvGlobalEmotes...) + + bttvChannelEmotes, err := chat.GetBTTVChannelEmotes(ctx, streamerId) if err != nil { - log.Debug().Err(err).Msg("error getting bttv channel emotes") return nil, fmt.Errorf("error getting bttv channel emotes: %v", err) } - ffzGlobalEmotes, err := chat.GetFFZGlobalEmotes() + emotes.Emotes = append(emotes.Emotes, bttvChannelEmotes...) + + // get ffz emotes + ffzGlobalEmotes, err := chat.GetFFZGlobalEmotes(ctx) if err != nil { - log.Debug().Err(err).Msg("error getting ffz global emotes") return nil, fmt.Errorf("error getting ffz global emotes: %v", err) } - ffzChannelEmotes, err := chat.GetFFZChannelEmotes(sID) + emotes.Emotes = append(emotes.Emotes, ffzGlobalEmotes...) + + ffzChannelEmotes, err := chat.GetFFZChannelEmotes(ctx, streamerId) if err != nil { - log.Debug().Err(err).Msg("error getting ffz channel emotes") return nil, fmt.Errorf("error getting ffz channel emotes: %v", err) } - - // Loop through twitch global emotes - for _, emote := range twitchGlobalEmotes { - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, *emote) - } - // Loop through twitch channel emotes - for _, emote := range twitchChannelEmotes { - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, *emote) - } - // Loop through 7tv global emotes - for _, emote := range sevenTVGlobalEmotes { - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, *emote) - } - // Loop through 7tv channel emotes - for _, emote := range sevenTVChannelEmotes { - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, *emote) - } - // Loop through bttv global emotes - for _, emote := range bttvGlobalEmotes { - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, *emote) - } - // Loop through bttv channel emotes - for _, emote := range bttvChannelEmotes { - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, *emote) - } - // Loop through ffz global emotes - for _, emote := range ffzGlobalEmotes { - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, *emote) - } - // Loop through ffz channel emotes - for _, emote := range ffzChannelEmotes { - ganymedeEmotes.Emotes = append(ganymedeEmotes.Emotes, *emote) - } - + emotes.Emotes = append(emotes.Emotes, ffzChannelEmotes...) } chatData = nil defer runtime.GC() - return &ganymedeEmotes, nil + return &emotes, nil } -func (s *Service) GetVodChatBadges(c echo.Context, vodID uuid.UUID) (*chat.GanymedeBadges, error) { - envDeployment := os.Getenv("ENV") - - if envDeployment == "development" { - utils.PrintMemUsage() - } - - v, err := s.Store.Client.Vod.Query().Where(vod.ID(vodID)).Only(c.Request().Context()) +func (s *Service) GetChatBadges(ctx context.Context, vodID uuid.UUID) (*platform.Badges, error) { + v, err := s.Store.Client.Vod.Query().Where(vod.ID(vodID)).Only(ctx) if err != nil { - log.Debug().Err(err).Msg("error getting vod chat emotes") return nil, fmt.Errorf("error getting vod chat emotes: %v", err) } data, err := utils.ReadChatFile(v.ChatPath) if err != nil { - log.Debug().Err(err).Msg("error getting vod chat emotes") return nil, fmt.Errorf("error getting vod chat emotes: %v", err) } @@ -711,7 +722,6 @@ func (s *Service) GetVodChatBadges(c echo.Context, vodID uuid.UUID) (*chat.Ganym // attempt to unmarshal old format err = json.Unmarshal(data, &chatDataOld) if err != nil { - log.Debug().Err(err).Msg("error getting vod chat emotes") return nil, fmt.Errorf("error getting vod chat emotes: %v", err) } } @@ -735,11 +745,11 @@ func (s *Service) GetVodChatBadges(c echo.Context, vodID uuid.UUID) (*chat.Ganym } } - var badgeResp chat.GanymedeBadges + var badgeResp platform.Badges // If emebedded badges if len(chatData.EmbeddedData.TwitchBadges) != 0 { - log.Debug().Msgf("VOD %s chat playback embedded badges found", vodID) + log.Debug().Str("vod_id", vodID.String()).Msg("Found embedded badges") // Emebedded badges have duplicate arrays for each of the below // So we need to check if we have already added the badge to the response // To ensure we use the channel's badge and not the global one @@ -756,7 +766,7 @@ func (s *Service) GetVodChatBadges(c echo.Context, vodID uuid.UUID) (*chat.Ganym if imgData.Title == "" { empty = true } else { - badgeResp.Badges = append(badgeResp.Badges, chat.GanymedeBadge{ + badgeResp.Badges = append(badgeResp.Badges, platform.Badge{ Name: badge.Name, Version: v, Title: fmt.Sprintf("%s %s", badge.Name, v), @@ -778,7 +788,7 @@ func (s *Service) GetVodChatBadges(c echo.Context, vodID uuid.UUID) (*chat.Ganym if imgData.Title == "" { empty = true } else { - badgeResp.Badges = append(badgeResp.Badges, chat.GanymedeBadge{ + badgeResp.Badges = append(badgeResp.Badges, platform.Badge{ Name: badge.Name, Version: v, Title: fmt.Sprintf("%s %s", badge.Name, v), @@ -799,7 +809,7 @@ func (s *Service) GetVodChatBadges(c echo.Context, vodID uuid.UUID) (*chat.Ganym if imgData.Title == "" { empty = true } else { - badgeResp.Badges = append(badgeResp.Badges, chat.GanymedeBadge{ + badgeResp.Badges = append(badgeResp.Badges, platform.Badge{ Name: badge.Name, Version: v, Title: fmt.Sprintf("%s %s", badge.Name, v), @@ -819,7 +829,7 @@ func (s *Service) GetVodChatBadges(c echo.Context, vodID uuid.UUID) (*chat.Ganym for v, imgData := range badge.Versions { if imgData.Title == "" { } else { - badgeResp.Badges = append(badgeResp.Badges, chat.GanymedeBadge{ + badgeResp.Badges = append(badgeResp.Badges, platform.Badge{ Name: badge.Name, Version: v, Title: fmt.Sprintf("%s %s", badge.Name, v), @@ -833,52 +843,29 @@ func (s *Service) GetVodChatBadges(c echo.Context, vodID uuid.UUID) (*chat.Ganym } } else { - log.Debug().Msgf("VOD %s chat playback embedded badges not found, fetching badges from providers", vodID) - // Older chat files have the streamer ID stored as a string, need to convert to an int64 - var sID int64 - switch streamerChatId := chatData.Streamer.ID.(type) { - case string: - sID, err = strconv.ParseInt(streamerChatId, 10, 64) - if err != nil { - log.Debug().Err(err).Msg("error parsing streamer chat id") - return nil, fmt.Errorf("error parsing streamer chat id: %v", err) - } - case float64: - sID = int64(streamerChatId) + log.Debug().Str("vod_id", vodID.String()).Msg("No embedded badges found; fetching from provider") + // get streamer id from chat + streamerId, err := getStreamerIdFromInterface(chatData.Streamer.ID) + if err != nil { + return nil, err } - twitchBadges, err := chat.GetTwitchGlobalBadges() + twitchBadges, err := s.Platform.GetGlobalBadges(ctx) if err != nil { - log.Error().Err(err).Msg("error getting twitch global badges") return nil, fmt.Errorf("error getting twitch global badges: %v", err) } - channelBadges, err := chat.GetTwitchChannelBadges(sID) + badgeResp.Badges = append(badgeResp.Badges, twitchBadges...) + channelBadges, err := s.Platform.GetChannelBadges(ctx, streamerId) if err != nil { - log.Error().Err(err).Msg("error getting twitch channel badges") return nil, fmt.Errorf("error getting twitch channel badges: %v", err) } - - // Loop through twitch global badges - badgeResp.Badges = append(badgeResp.Badges, twitchBadges.Badges...) - - // Loop through twitch channel badges - - badgeResp.Badges = append(badgeResp.Badges, channelBadges.Badges...) - - twitchBadges = nil - channelBadges = nil - + badgeResp.Badges = append(badgeResp.Badges, channelBadges...) } chatData = nil defer runtime.GC() - if envDeployment == "development" { - utils.PrintMemUsage() - } - return &badgeResp, nil - } func (s *Service) LockVod(c echo.Context, vID uuid.UUID, status bool) error { @@ -895,3 +882,23 @@ func (s *Service) LockVod(c echo.Context, vID uuid.UUID, status bool) error { return nil } + +// getStreamerIdFromInterface returns the string representation of the streamer id +// +// Older chat files have the streamer ID stored as an int, need to convert to a string +func getStreamerIdFromInterface(id interface{}) (string, error) { + var streamerId string + switch i := id.(type) { + case string: + streamerId = i + case int: + streamerId = strconv.Itoa(i) + case int64: + streamerId = strconv.FormatInt(i, 10) + case float64: + streamerId = strconv.FormatFloat(i, 'f', -1, 64) + default: + return "", fmt.Errorf("unsupported streamer id type: %T", streamerId) + } + return streamerId, nil +} diff --git a/internal/workflows/video.go b/internal/workflows/video.go deleted file mode 100644 index f0d9791c..00000000 --- a/internal/workflows/video.go +++ /dev/null @@ -1,812 +0,0 @@ -package workflows - -import ( - "context" - "fmt" - "strings" - "time" - - "github.com/rs/zerolog/log" - "github.com/zibbp/ganymede/ent" - "github.com/zibbp/ganymede/ent/live" - "github.com/zibbp/ganymede/ent/queue" - "github.com/zibbp/ganymede/internal/activities" - "github.com/zibbp/ganymede/internal/database" - "github.com/zibbp/ganymede/internal/dto" - "github.com/zibbp/ganymede/internal/notification" - ganymedeTemporal "github.com/zibbp/ganymede/internal/temporal" - "github.com/zibbp/ganymede/internal/utils" - "go.temporal.io/sdk/temporal" - "go.temporal.io/sdk/workflow" -) - -func checkIfTasksAreDone(input dto.ArchiveVideoInput) error { - log.Debug().Msgf("checking if tasks are done for video %s", input.VideoID) - q, err := database.DB().Client.Queue.Query().Where(queue.ID(input.Queue.ID)).Only(context.Background()) - if err != nil { - log.Error().Err(err).Msg("error getting queue item") - return err - } - - if input.Queue.LiveArchive { - if q.TaskVideoDownload == utils.Success && q.TaskVideoConvert == utils.Success && q.TaskVideoMove == utils.Success && q.TaskChatDownload == utils.Success && q.TaskChatConvert == utils.Success && q.TaskChatRender == utils.Success && q.TaskChatMove == utils.Success { - log.Debug().Msgf("all tasks for video %s are done", input.VideoID) - - _, err := q.Update().SetVideoProcessing(false).SetChatProcessing(false).SetProcessing(false).Save(context.Background()) - if err != nil { - log.Error().Err(err).Msg("error updating queue item") - return err - } - - _, err = database.DB().Client.Vod.UpdateOneID(input.Vod.ID).SetProcessing(false).Save(context.Background()) - if err != nil { - log.Error().Err(err).Msg("error updating vod") - return err - } - - notification.SendLiveArchiveSuccessNotification(input.Channel, input.Vod, input.Queue) - } - } else { - if q.TaskVideoDownload == utils.Success && q.TaskVideoConvert == utils.Success && q.TaskVideoMove == utils.Success && q.TaskChatDownload == utils.Success && q.TaskChatRender == utils.Success && q.TaskChatMove == utils.Success { - log.Debug().Msgf("all tasks for video %s are done", input.VideoID) - - _, err := q.Update().SetVideoProcessing(false).SetChatProcessing(false).SetProcessing(false).Save(context.Background()) - if err != nil { - log.Error().Err(err).Msg("error updating queue item") - return err - } - - _, err = database.DB().Client.Vod.UpdateOneID(input.Vod.ID).SetProcessing(false).Save(context.Background()) - if err != nil { - log.Error().Err(err).Msg("error updating vod") - return err - } - - notification.SendVideoArchiveSuccessNotification(input.Channel, input.Vod, input.Queue) - } - } - - return nil -} - -func workflowErrorHandler(err error, input dto.ArchiveVideoInput, task string) error { - notification.SendErrorNotification(input.Channel, input.Vod, input.Queue, task) - - return err -} - -func cancelWorkflowAndCleanup(ctx context.Context, input dto.ArchiveVideoInput) error { - log.Info().Msg("no stream found for channel - cancelling workflow") - q, err := database.DB().Client.Queue.Query().Where(queue.ID(input.Queue.ID)).Only(context.Background()) - if err != nil { - log.Error().Err(err).Msg("error getting queue item") - return err - } - // cancel workflow - if q.WorkflowID != "" && q.WorkflowRunID != "" { - log.Debug().Msgf("cancelling workflow: %s run: %s", q.WorkflowID, q.WorkflowRunID) - err = ganymedeTemporal.GetTemporalClient().Client.TerminateWorkflow(ctx, q.WorkflowID, q.WorkflowRunID, "no stream found") - if err != nil { - log.Error().Err(err).Msg("error cancelling workflow") - return err - } - } - // delete directory - path := fmt.Sprintf("/vods/%s/%s", input.Channel.Name, input.Vod.FolderName) - err = utils.DeleteFolder(path) - if err != nil { - log.Error().Err(err).Msg("error deleting files") - return err - } - // delete queue item - err = database.DB().Client.Queue.DeleteOneID(input.Queue.ID).Exec(context.Background()) - if err != nil { - log.Error().Err(err).Msg("error deleting queue item") - return err - } - // delete vod - err = database.DB().Client.Vod.DeleteOneID(input.Vod.ID).Exec(context.Background()) - if err != nil { - log.Error().Err(err).Msg("error deleting vod") - return err - } - - return nil -} - -// *Top Level Workflow* -func ArchiveVideoWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{}) - - // create directory - err := workflow.ExecuteChildWorkflow(ctx, CreateDirectoryWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - // download thumbnails - err = workflow.ExecuteChildWorkflow(ctx, DownloadTwitchThumbnailsWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - // save video info - err = workflow.ExecuteChildWorkflow(ctx, SaveTwitchVideoInfoWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - // archive video - videoFuture := workflow.ExecuteChildWorkflow(ctx, ArchiveTwitchVideoWorkflow, input) - - if input.Queue.ChatProcessing { - chatFuture := workflow.ExecuteChildWorkflow(ctx, ArchiveTwitchChatWorkflow, input) - if err := chatFuture.Get(ctx, nil); err != nil { - return err - } - } - - if err := videoFuture.Get(ctx, nil); err != nil { - return err - } - - return nil -} - -// *Top Level Workflow* -func ArchiveLiveVideoWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{}) - - // create directory - err := workflow.ExecuteChildWorkflow(ctx, CreateDirectoryWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - // download thumbnails - err = workflow.ExecuteChildWorkflow(ctx, DownloadTwitchLiveThumbnailsWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - // download thumbnails againt in 5 minutes - _ = workflow.ExecuteChildWorkflow(ctx, DownloadTwitchLiveThumbnailsWorkflowWait, input) - - // save video info - err = workflow.ExecuteChildWorkflow(ctx, SaveTwitchLiveVideoInfoWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - chatCtx := workflow.WithChildOptions(ctx, workflow.ChildWorkflowOptions{}) - downloadChatCtx := workflow.WithChildOptions(ctx, workflow.ChildWorkflowOptions{}) - - var chatFuture workflow.ChildWorkflowFuture - if input.Queue.ChatProcessing { - chatFuture = workflow.ExecuteChildWorkflow(chatCtx, ArchiveTwitchLiveChatWorkflow, input) - var chatWorkflowExecution workflow.Execution - _ = chatFuture.GetChildWorkflowExecution().Get(chatCtx, &chatWorkflowExecution) - - log.Debug().Msgf("Live chat archive workflow ID: %s", chatWorkflowExecution.ID) - input.LiveChatArchiveWorkflowId = chatWorkflowExecution.ID - - // execute chat download first to get a workflow ID for signals - // the actual download of chat is held until the video is about to start - liveChatFuture := workflow.ExecuteChildWorkflow(downloadChatCtx, DownloadTwitchLiveChatWorkflow, input) - var liveChatWorkflowExecution workflow.Execution - _ = liveChatFuture.GetChildWorkflowExecution().Get(downloadChatCtx, &liveChatWorkflowExecution) - - log.Debug().Msgf("Live chat workflow ID: %s", liveChatWorkflowExecution.ID) - input.LiveChatWorkflowId = liveChatWorkflowExecution.ID - } - - // archive video - videoFuture := workflow.ExecuteChildWorkflow(ctx, ArchiveTwitchLiveVideoWorkflow, input) - - if err := videoFuture.Get(ctx, nil); err != nil { - return err - } - - if input.Queue.ChatProcessing { - if err := chatFuture.Get(ctx, nil); err != nil { - return err - } - } - - return nil -} - -// *Low Level Workflow* -func CreateDirectoryWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - StartToCloseTimeout: 10 * time.Second, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 5, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.CreateDirectory, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "create-directory") - } - - return nil -} - -// *Low Level Workflow* -func DownloadTwitchThumbnailsWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - StartToCloseTimeout: 10 * time.Second, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 5, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.DownloadTwitchThumbnails, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "download-thumbnails") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func DownloadTwitchLiveThumbnailsWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - StartToCloseTimeout: 10 * time.Second, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 2, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.DownloadTwitchLiveThumbnails, input).Get(ctx, nil) - if err != nil { - if strings.Contains(err.Error(), "no stream found for channel") { - err := cancelWorkflowAndCleanup(context.Background(), input) - if err != nil { - return err - } - return err - } - return workflowErrorHandler(err, input, "download-thumbnails") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -func DownloadTwitchLiveThumbnailsWorkflowWait(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - StartToCloseTimeout: 15 * time.Minute, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 2, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.Sleep(ctx, 10*time.Minute) - if err != nil { - return err - } - - err = workflow.ExecuteActivity(ctx, activities.DownloadTwitchLiveThumbnails, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "download-thumbnails") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func SaveTwitchVideoInfoWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - StartToCloseTimeout: 10 * time.Second, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 5, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.SaveTwitchVideoInfo, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "save-video-info") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func SaveTwitchLiveVideoInfoWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - StartToCloseTimeout: 10 * time.Second, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 3, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.SaveTwitchLiveVideoInfo, input).Get(ctx, nil) - if err != nil { - if strings.Contains(err.Error(), "no stream found for channel") { - err := cancelWorkflowAndCleanup(context.Background(), input) - if err != nil { - return err - } - return err - } - return workflowErrorHandler(err, input, "save-video-info") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Mid Level Workflow* -func ArchiveTwitchVideoWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - - err := workflow.ExecuteChildWorkflow(ctx, DownloadTwitchVideoWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - err = workflow.ExecuteChildWorkflow(ctx, PostprocessVideoWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - err = workflow.ExecuteChildWorkflow(ctx, MoveVideoWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - return nil -} - -// *Mid Level Workflow* -func ArchiveTwitchLiveVideoWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - - err := workflow.ExecuteChildWorkflow(ctx, DownloadTwitchLiveVideoWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - err = workflow.ExecuteChildWorkflow(ctx, PostprocessVideoWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - err = workflow.ExecuteChildWorkflow(ctx, MoveVideoWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - return nil - -} - -// *Mid Level Workflow* -func ArchiveTwitchLiveChatWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - // download happened earlier, this is post-download tasks - - var signal utils.ArchiveTwitchLiveChatStartSignal - signalChan := workflow.GetSignalChannel(ctx, "continue-chat-archive") - signalChan.Receive(ctx, &signal) - - log.Info().Msgf("Received signal: %v", signal) - - err := workflow.ExecuteChildWorkflow(ctx, ConvertTwitchLiveChatWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - if input.Queue.RenderChat { - err = workflow.ExecuteChildWorkflow(ctx, RenderTwitchChatWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - } - - err = workflow.ExecuteChildWorkflow(ctx, MoveTwitchChatWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func ConvertTwitchLiveChatWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 3, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.ConvertTwitchLiveChat, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "convert-chat") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil - -} - -// *Mid Level Workflow* -func ArchiveTwitchChatWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - - err := workflow.ExecuteChildWorkflow(ctx, DownloadTwitchChatWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - if input.Queue.RenderChat { - err = workflow.ExecuteChildWorkflow(ctx, RenderTwitchChatWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - } - - err = workflow.ExecuteChildWorkflow(ctx, MoveTwitchChatWorkflow, input).Get(ctx, nil) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func DownloadTwitchVideoWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - cctx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - TaskQueue: "video-download", - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 3, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(cctx, activities.DownloadTwitchVideo, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "download-video") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func DownloadTwitchLiveVideoWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 1, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.DownloadTwitchLiveVideo, input).Get(ctx, nil) - if err != nil { - // cleanup archive if no stream found - if strings.Contains(err.Error(), "no playable streams found on this URL") { - log.Error().Err(err).Msg("no stream found for channel") - err := cancelWorkflowAndCleanup(context.Background(), input) - if err != nil { - return err - } - err = workflow.ExecuteActivity(ctx, activities.KillTwitchLiveChatDownload, input).Get(ctx, nil) - if err != nil { - return err - } - return err - } - - return workflowErrorHandler(err, input, "download-video") - } - - // kill live chat download if chat is being archived - if input.Queue.ChatProcessing { - err = workflow.ExecuteActivity(ctx, activities.KillTwitchLiveChatDownload, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "kill-chat-download") - } - } - - // mark live channel as not live - live, err := database.DB().Client.Live.Query().Where(live.ID(input.LiveWatchChannel.ID)).Only(context.Background()) - if err != nil { - // allow not found error to pass - if _, ok := err.(*ent.NotFoundError); !ok { - log.Error().Err(err).Msg("error getting live channel") - return err - } - } - if live != nil { - _, err = live.Update().SetIsLive(false).Save(context.Background()) - if err != nil { - log.Error().Err(err).Msg("error updating live channel") - return err - } - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func PostprocessVideoWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - TaskQueue: "video-convert", - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 3, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.PostprocessVideo, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "postprocess-video") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func MoveVideoWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 3, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.MoveVideo, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "move-video") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func DownloadTwitchChatWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - TaskQueue: "chat-download", - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 3, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.DownloadTwitchChat, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "download-chat") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func DownloadTwitchLiveChatWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 1, - MaximumInterval: 15 * time.Minute, - }, - WaitForCancellation: false, - }) - - var signal utils.ArchiveTwitchLiveChatStartSignal - signalChan := workflow.GetSignalChannel(ctx, "start-chat-download") - signalChan.Receive(ctx, &signal) - - log.Info().Msgf("Received signal: %v", signal) - - err := workflow.ExecuteActivity(ctx, activities.DownloadTwitchLiveChat, input).Get(ctx, nil) - if err != nil { - return err - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - log.Debug().Msgf("Sending signal to continue chat archive: %s", input.LiveChatArchiveWorkflowId) - continueSignal := utils.ArchiveTwitchLiveChatStartSignal{ - Start: true, - } - err = workflow.SignalExternalWorkflow(ctx, input.LiveChatArchiveWorkflowId, "", "continue-chat-archive", continueSignal).Get(ctx, nil) - if err != nil { - log.Error().Err(err).Msgf("error sending signal to continue chat archive: %v", err) - } - - return nil -} - -// *Low Level Workflow* -func RenderTwitchChatWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - TaskQueue: "chat-render", - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 3, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.RenderTwitchChat, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "render-chat") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func MoveTwitchChatWorkflow(ctx workflow.Context, input dto.ArchiveVideoInput) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 3, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.MoveChat, input).Get(ctx, nil) - if err != nil { - return workflowErrorHandler(err, input, "move-chat") - } - - err = checkIfTasksAreDone(input) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func SaveTwitchVideoChapters(ctx workflow.Context) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 3, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.TwitchSaveVideoChapters).Get(ctx, nil) - if err != nil { - return err - } - - return nil -} - -// *Low Level Workflow* -func UpdateTwitchLiveStreamArchivesWithVodIds(ctx workflow.Context) error { - ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - HeartbeatTimeout: 90 * time.Second, - StartToCloseTimeout: 168 * time.Hour, - RetryPolicy: &temporal.RetryPolicy{ - InitialInterval: 1 * time.Minute, - BackoffCoefficient: 2, - MaximumAttempts: 3, - MaximumInterval: 15 * time.Minute, - }, - }) - - err := workflow.ExecuteActivity(ctx, activities.UpdateTwitchLiveStreamArchivesWithVodIds).Get(ctx, nil) - if err != nil { - return err - } - - return nil -} diff --git a/internal/workflows/workflows.go b/internal/workflows/workflows.go deleted file mode 100644 index 618cc116..00000000 --- a/internal/workflows/workflows.go +++ /dev/null @@ -1,35 +0,0 @@ -package workflows - -import ( - "context" - - "github.com/rs/zerolog/log" - "github.com/zibbp/ganymede/internal/temporal" - "go.temporal.io/sdk/client" -) - -type StartWorkflowResponse struct { - WorkflowId string `json:"workflow_id"` - RunId string `json:"run_id"` -} - -func StartWorkflow(ctx context.Context, workflowName string) (StartWorkflowResponse, error) { - // TODO: develop a better way to do this - - var startWorkflowResponse StartWorkflowResponse - - workflowOptions := client.StartWorkflowOptions{ - TaskQueue: "archive", - } - - we, err := temporal.GetTemporalClient().Client.ExecuteWorkflow(ctx, workflowOptions, workflowName) - if err != nil { - log.Error().Err(err).Msg("failed to start workflow") - return startWorkflowResponse, err - } - - startWorkflowResponse.WorkflowId = we.GetID() - startWorkflowResponse.RunId = we.GetRunID() - - return startWorkflowResponse, nil -} diff --git a/nginx.conf b/nginx.conf index 7b44911e..9c20082f 100644 --- a/nginx.conf +++ b/nginx.conf @@ -20,16 +20,16 @@ http { server { listen 8080; - root /mnt/vods; + root /data/videos; add_header 'Access-Control-Allow-Origin' '*' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; - location ^~ /vods { + location ^~ /data/videos { autoindex on; - alias /mnt/vods; + alias /data/videos; location ~* \.(ico|css|js|gif|jpeg|jpg|png|svg|webp)$ { expires 30d; diff --git a/tests/setup.go b/tests/setup.go new file mode 100644 index 00000000..e6f5f0de --- /dev/null +++ b/tests/setup.go @@ -0,0 +1,88 @@ +package tests + +import ( + "context" + "os" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/modules/postgres" + "github.com/testcontainers/testcontainers-go/wait" + "github.com/zibbp/ganymede/internal/server" +) + +// Setup initializes the integration test environment. +// It setups up the entire application and returns the various services for testing. +// A Postgres Testcontainer is used to provide a real database for further tersting. +func Setup(t *testing.T) (*server.Application, error) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // create temporary postgres container to run the tests + postgresContainer, err := postgres.Run(ctx, + "postgres:14-alpine", + postgres.WithDatabase("test"), + postgres.WithUsername("user"), + postgres.WithPassword("password"), + testcontainers.WithWaitStrategy( + wait.ForLog("database system is ready to accept connections"). + WithOccurrence(2). + WithStartupTimeout(5*time.Second)), + ) + if err != nil { + return nil, err + } + + port, err := postgresContainer.MappedPort(ctx, "5432") + if err != nil { + return nil, err + } + + // set environment variables + os.Setenv("DB_HOST", "localhost") + os.Setenv("DB_PORT", port.Port()) + os.Setenv("DB_USER", "user") + os.Setenv("DB_PASS", "password") + os.Setenv("DB_NAME", "test") + os.Setenv("JWT_SECRET", "secret") + os.Setenv("JWT_REFRESH_SECRET", "refresh_secret") + os.Setenv("FRONTEND_HOST", "http://localhost:1234") + + // set temporary directories + videosDir, err := os.MkdirTemp("/tmp", "ganymede-tests") + if err != nil { + return nil, err + } + os.Setenv("VIDEOS_DIR", videosDir) + t.Log("VIDEOS_DIR", videosDir) + + tempDir, err := os.MkdirTemp("/tmp", "ganymede-tests") + if err != nil { + return nil, err + } + os.Setenv("TEMP_DIR", tempDir) + t.Log("TEMP_DIR", tempDir) + + configDir, err := os.MkdirTemp("/tmp", "ganymede-tests") + if err != nil { + return nil, err + } + os.Setenv("CONFIG_DIR", configDir) + t.Log("CONFIG_DIR", configDir) + + logsDir, err := os.MkdirTemp("/tmp", "ganymede-tests") + if err != nil { + return nil, err + } + os.Setenv("LOGS_DIR", logsDir) + t.Log("LOGS_DIR", logsDir) + + // create the application. this does not start the HTTP server + app, err := server.SetupApplication(ctx) + if err != nil { + return nil, err + } + + return app, nil +}