diff --git a/.argo/build.yaml b/.argo/build.yaml new file mode 100644 index 00000000..22b20705 --- /dev/null +++ b/.argo/build.yaml @@ -0,0 +1,115 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + namespace: argo +spec: + entrypoint: main + serviceAccountName: argo-server + templates: + - name: main + steps: + - - name: publish-pkg-container-1 + templateRef: + name: cwft-kaniko + template: build-push-https + clusterScope: true + arguments: + parameters: + - name: mode + value: '{{workflow.parameters.mode}}' + - name: imageName + value: '{{workflow.parameters.imageName1}}' + - name: containerFilePath + value: '{{workflow.parameters.containerFilePath1}}' + - name: appName + value: '{{workflow.parameters.appName}}' + - name: branch + value: '{{workflow.parameters.branch}}' + - name: shortSha + value: '{{workflow.parameters.shortSha}}' + - name: containerRegistryURL1 + value: '{{workflow.parameters.containerRegistryURL1}}' + - name: containerRegistryURL2 + value: '{{workflow.parameters.containerRegistryURL2}}' + - name: gitUrlNoProtocol + value: '{{workflow.parameters.gitUrlNoProtocol}}' + - name: memoryRequest + value: "5Gi" # override default: 512Mi + - name: cpuRequest + value: "3" # override default: 500m + - name: memoryLimit + value: "16Gi" # override default: 2Gi + - name: cpuLimit + value: "8" # override default: 2 + - name: storageRequest + value: "35Gi" # override default: 15Gi + - name: publish-gpu-container-2 + templateRef: + name: cwft-kaniko + template: build-push-https + clusterScope: true + arguments: + parameters: + - name: mode + value: '{{workflow.parameters.mode}}' + - name: imageName + value: '{{workflow.parameters.imageName2}}' + - name: containerFilePath + value: '{{workflow.parameters.containerFilePath2}}' + - name: appName + value: '{{workflow.parameters.appName}}' + - name: branch + value: '{{workflow.parameters.branch}}' + - name: shortSha + value: '{{workflow.parameters.shortSha}}' + - name: containerRegistryURL1 + value: '{{workflow.parameters.containerRegistryURL1}}' + - name: containerRegistryURL2 + value: '{{workflow.parameters.containerRegistryURL2}}' + - name: gitUrlNoProtocol + value: '{{workflow.parameters.gitUrlNoProtocol}}' + - name: memoryRequest + value: "7Gi" # override default: 512Mi + - name: cpuRequest + value: "2" # override default: 500m + - name: memoryLimit + value: "32Gi" # override default: 2Gi + - name: cpuLimit + value: "8" # override default: 2 + - name: storageRequest + value: "50Gi" # override default: 15Gi + # - name: publish-conda-container-3 + # templateRef: + # name: cwft-kaniko + # template: build-push-https + # clusterScope: true + # arguments: + # parameters: + # - name: mode + # value: '{{workflow.parameters.mode}}' + # - name: imageName + # value: '{{workflow.parameters.imageName3}}' + # - name: containerFilePath + # value: '{{workflow.parameters.containerFilePath3}}' + # - name: appName + # value: '{{workflow.parameters.appName}}' + # - name: branch + # value: '{{workflow.parameters.branch}}' + # - name: shortSha + # value: '{{workflow.parameters.shortSha}}' + # - name: containerRegistryURL1 + # value: '{{workflow.parameters.containerRegistryURL1}}' + # - name: containerRegistryURL2 + # value: '{{workflow.parameters.containerRegistryURL2}}' + # - name: gitUrlNoProtocol + # value: '{{workflow.parameters.gitUrlNoProtocol}}' + # - name: memoryRequest + # value: "7Gi" # override default: 512Mi + # - name: cpuRequest + # value: "2" # override default: 500m + # - name: memoryLimit + # value: "32Gi" # override default: 2Gi + # - name: cpuLimit + # value: "8" # override default: 2 + # - name: storageRequest + # value: "50Gi" # override default: 15Gi diff --git a/.devcontainer/devcontainer.Dockerfile b/.devcontainer/devcontainer.Dockerfile new file mode 100644 index 00000000..e67721f9 --- /dev/null +++ b/.devcontainer/devcontainer.Dockerfile @@ -0,0 +1,4 @@ +FROM ghcr.io/cachix/devenv:latest + +RUN echo 'extra-substituters = https://devenv.cachix.org' >> /etc/nix/nix.conf && \ + echo 'extra-trusted-public-keys = devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw=' >> /etc/nix/nix.conf diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..923a0493 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,35 @@ +{ + "name": "test-devenv", + "build": { + "dockerfile": "./devcontainer.Dockerfile", + "context": ".." + }, + "customizations": { + "vscode": { + "extensions": [ + // "mkhl.direnv", + // vim extension breaks + // during installation + // install via `recommends` + // "vscodevim.vim@1.26.2", + "pmndrs.pmndrs", + "jnoortheen.nix-ide", + "tamasfe.even-better-toml", + "donjayamanne.python-extension-pack", + "charliermarsh.ruff", + "redhat.vscode-yaml", + "ms-kubernetes-tools.vscode-kubernetes-tools", + "ms-vsliveshare.vsli", + "eamodio.gitlens", + "GitHub.vscode-pull-request-github", + "github.vscode-github-actions", + "ms-azuretools.vscode-docker", + "ms-toolsai.jupyter", + "njzy.stats-bar", + "vscode-icons-team.vscode-icons" + ] + } + }, + "overrideCommand": false, + "forwardPorts": [53593] +} diff --git a/.envrc b/.envrc new file mode 100644 index 00000000..caeac5af --- /dev/null +++ b/.envrc @@ -0,0 +1,15 @@ +# if ! has nix_direnv_version || ! nix_direnv_version 2.2.1; then +# source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/2.2.1/direnvrc" "sha256-zelF0vLbEl5uaqrfIzbgNzJWGmLzCmYAkInj/LNxvKs=" +# fi + +# if ! use flake . --impure --accept-flake-config +# then +# echo "nix flake could not be built; update flake.nix and run direnv allow/reload" >&2 +# fi + +# printf "\nrun \`nix flake update --impure && nix flake check --impure\` to update the flake lockfile." +# printf "\nuse \`direnv revoke\` to unload the dev environment or \`direnv allow\` to reload it.\n\n" + +# not necessary if using nix flake +# devenv devshell with dotenv enabled +dotenv diff --git a/.example.env b/.example.env new file mode 100644 index 00000000..821b98ed --- /dev/null +++ b/.example.env @@ -0,0 +1,58 @@ +# Used as admin.clientSecretEnvVar with the unhashed version of +# configuration.auth.internal.clientSecretHash +# from the flyte-binary helm chart values.yaml file +# authType: ClientSecret +# clientId: flytepropeller +# clientSecretEnvVar: FLYTE_OAUTH_CLIENT_SECRET + +## nix +CACHIX_CACHE_NAME=dnadiffusion +CACHIX_AUTH_TOKEN=e1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111 + +## Compute +GCP_PROJECT_ID=project-111111 +GCP_STORAGE_SCOPES=https://www.googleapis.com/auth/devstorage.read_write +GCP_STORAGE_CONTAINER=project-111111-flyte-meta-cluster +GCP_ARTIFACT_REGISTRY_PATH=us.gcr.io/project/path + +## GitHub +GH_ORG=pinellolab +GH_REPO_NAME=DNA-Diffusion +GH_REPO_NAME_SLUG=dna-diffusion +GH_REPO=pinellolab/DNA-Diffusion + +## Flyte +FLYTE_CLUSTER_ENDPOINT=dns:///cluster.net +FLYTE_OAUTH_CLIENT_SECRET=O0000000000000000000000000000000 +FLYTECTL_CONFIG_TEMPLATE=.flyte/config-browser.yaml +FLYTECTL_CONFIG=.flyte/config.yaml # required CI, may override constants.py + +## Python +WORKFLOW_IMAGE="ghcr.io/pinellolab/dna-diffusion" # tag computed from git +LOG_LEVEL=INFO + +## Makefile +WORKFLOW_PROJECT=dnadiffusion +WORKFLOW_DOMAIN=development +WORKFLOW_PACKAGE_PATH=src +WORKFLOW_IMPORT_PATH=dnadiffusion.workflows.example +WORKFLOW_NAME=wf +# WORKFLOW_IMPORT_PATH=dnadiffusion.workflows.lrwine +# WORKFLOW_NAME=training_workflow +# WORKFLOW_REGISTRATION_MODE=prod +# WORKFLOW_REGISTRATION_MODE=dev +# WORKFLOW_VERSION="repo-branch-shortsha" # override auto version +# WORKFLOW_FILE=workflows/example.py +# WORKFLOW_FILE_WORKFLOW_ARGS='{"C": 0.1, "max_iter": 1000}' +GLOBAL_IMAGE_TAG=latest +WORKFLOW_OUTPUT_FORMAT=dot + +## Local development +# Toggle to use local dev cluster +# these will override the values above +# +LOCAL_CONTAINER_REGISTRY=localhost:30000 +ACTIVE_DOCKERFILE=containers/pkg.Dockerfile +# FLYTECTL_CONFIG=.flyte/config-local.yaml +# WORKFLOW_IMAGE=localhost:30000/dnadiffusion +# KUBECONFIG=/path/to/user/.flyte/sandbox/kubeconfig diff --git a/.flyte/config-browser.yaml b/.flyte/config-browser.yaml new file mode 100644 index 00000000..87b5bfff --- /dev/null +++ b/.flyte/config-browser.yaml @@ -0,0 +1,17 @@ +admin: + endpoint: ${FLYTE_CLUSTER_ENDPOINT} + # authenticate in browser via oauth2-proxy + authType: Pkce + +logger: + show-source: true + level: 0 +storage: + type: stow + stow: + kind: google + config: + json: "" + project_id: ${GCP_PROJECT_ID} + scopes: ${GCP_STORAGE_SCOPES} + container: ${GCP_STORAGE_CONTAINER} diff --git a/.flyte/config-local.yaml b/.flyte/config-local.yaml new file mode 100644 index 00000000..6f8a537d --- /dev/null +++ b/.flyte/config-local.yaml @@ -0,0 +1,8 @@ +admin: + endpoint: localhost:30080 + insecure: true +storage: + connection: + endpoint: http://localhost:30002 + access-key: minio + secret-key: miniostorage diff --git a/.flyte/config-template.yaml b/.flyte/config-template.yaml new file mode 100644 index 00000000..7372263a --- /dev/null +++ b/.flyte/config-template.yaml @@ -0,0 +1,25 @@ +admin: + endpoint: ${FLYTE_CLUSTER_ENDPOINT} + # authenticate in browser via oauth2-proxy + # authType: Pkce + + # programmatic auth for CI + # where EnvVar is the unhashed version of + # configuration.auth.internal.clientSecretHash + # from the helm chart values file + authType: ClientSecret + clientId: flytepropeller + clientSecretEnvVar: FLYTE_OAUTH_CLIENT_SECRET + insecure: false +logger: + show-source: true + level: 0 +storage: + type: stow + stow: + kind: google + config: + json: "" + project_id: ${GCP_PROJECT_ID} + scopes: ${GCP_STORAGE_SCOPES} + container: ${GCP_STORAGE_CONTAINER} diff --git a/.github/SECURITY.md b/.github/SECURITY.md new file mode 100644 index 00000000..7f1cded1 --- /dev/null +++ b/.github/SECURITY.md @@ -0,0 +1,3 @@ +# security + +To report a potential security vulnerability in tctsti, please follow the [instructions to create a draft security advisory](https://github.com/pinellolab/DNA-Diffusion/security/advisories/new). diff --git a/.github/actions/setup_environment/action.yml b/.github/actions/setup_environment/action.yml new file mode 100644 index 00000000..c8253ee1 --- /dev/null +++ b/.github/actions/setup_environment/action.yml @@ -0,0 +1,59 @@ +name: 'Setup Environment' +description: 'Shared steps to setup the environment for CI jobs' + +inputs: + python_version: + description: 'Python version to setup' + required: true + debug_enabled: + description: "Run with tmate.io debugging enabled" + required: true + default: "false" + gpu_enabled: + description: "Do not downgrade to CPU libraries" + required: true + default: "false" + +runs: + using: "composite" + steps: + - name: Set up Python + id: setup-python + uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.python_version }} + - name: Install system dependencies + shell: bash + run: | + echo "install system dependencies" + # sudo apt-get update + # sudo apt-get install -y tree + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.7.1 + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: .venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + - name: Install dependencies + shell: bash + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --with docs,lint,test,workflows,bioinformatics --no-interaction --no-root + - name: Install project + shell: bash + run: poetry install --with docs,lint,test,workflows,bioinformatics --no-interaction + - name: Downgrade to torch CPU + if: ${{ inputs.gpu_enabled != 'true' }} + shell: bash + run: | + source $VENV + poe torch-cpu + - name: "Setup tmate debug session" + uses: mxschmitt/action-tmate@v3 + if: ${{ inputs.debug_enabled == 'true' }} diff --git a/.github/codecov.yml b/.github/codecov.yml new file mode 100644 index 00000000..df6a6179 --- /dev/null +++ b/.github/codecov.yml @@ -0,0 +1,15 @@ +codecov: + branch: main + +comment: false +github_checks: false + +coverage: + status: + project: + default: + threshold: 10% + + patch: off + + changes: false diff --git a/.github/workflows/build.yml b/.github/deprecated/build.yml similarity index 92% rename from .github/workflows/build.yml rename to .github/deprecated/build.yml index 3870164b..1f818bae 100644 --- a/.github/workflows/build.yml +++ b/.github/deprecated/build.yml @@ -2,10 +2,10 @@ name: Build on: workflow_dispatch: - push: - branches: - - main - pull_request: + # push: + # branches: + # - main + # pull_request: jobs: test: diff --git a/.github/workflows/docker.yml b/.github/deprecated/docker.yml similarity index 92% rename from .github/workflows/docker.yml rename to .github/deprecated/docker.yml index bfdb087c..35e2d139 100644 --- a/.github/workflows/docker.yml +++ b/.github/deprecated/docker.yml @@ -2,10 +2,10 @@ name: Docker on: workflow_dispatch: - push: - branches: - - main - pull_request: + # push: + # branches: + # - main + # pull_request: env: REGISTRY: ghcr.io @@ -46,7 +46,7 @@ jobs: uses: docker/build-push-action@v5 with: context: . - file: ./dockerfiles/Dockerfile + file: ./containers/Dockerfile platforms: linux/amd64 push: ${{ github.event_name != 'pull_request' }} tags: ${{ steps.meta.outputs.tags }} diff --git a/.github/workflows/documentation.yml b/.github/deprecated/documentation.yml similarity index 96% rename from .github/workflows/documentation.yml rename to .github/deprecated/documentation.yml index bc8d086d..9cf30642 100644 --- a/.github/workflows/documentation.yml +++ b/.github/deprecated/documentation.yml @@ -2,9 +2,9 @@ name: Build documentation on: workflow_dispatch: - push: - branches: - - main + # push: + # branches: + # - main # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages permissions: diff --git a/.github/workflows/release.yml b/.github/deprecated/release.yml similarity index 100% rename from .github/workflows/release.yml rename to .github/deprecated/release.yml diff --git a/.github/workflows/test-release.yml b/.github/deprecated/test-release.yml similarity index 92% rename from .github/workflows/test-release.yml rename to .github/deprecated/test-release.yml index 9daf0f49..e3f90acd 100644 --- a/.github/workflows/test-release.yml +++ b/.github/deprecated/test-release.yml @@ -2,13 +2,13 @@ name: Test Release on: workflow_dispatch: - workflow_run: - workflows: - - Build - - Docker - branches: main - types: - - completed + # workflow_run: + # workflows: + # - Build + # - Docker + # branches: main + # types: + # - completed jobs: release: diff --git a/.github/labels.yml b/.github/labels.yml index c547348b..7790bfb7 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -28,8 +28,8 @@ - name: enhancement description: New feature or request color: a2eeef -- name: github_actions - description: Pull requests that update Github_actions code +- name: github-actions + description: Pull requests that update Github actions code color: "000000" - name: good first issue description: Good for newcomers @@ -67,3 +67,21 @@ - name: work-in-progress description: Work in progress that should not be auto-merged color: ffffff +- name: build-images + description: Control image build on PRs + color: fef2c0 +- name: execute-workflow + description: Control workflow execution on PRs + color: 74da90 +- name: skip-ci + description: Skip CI workflow + color: 35539d +- name: skip-tests + description: Skip Tests in CI workflow + color: 1dd225 +- name: actions-debug + description: Enable debugging in actions execution + color: 923411 +- name: workflow-mode-dev + description: Execute workflows in dev mode (branch name for container image tags) + color: 2085aa diff --git a/.github/workflows/CD.yml b/.github/workflows/CD.yml new file mode 100644 index 00000000..259c94f6 --- /dev/null +++ b/.github/workflows/CD.yml @@ -0,0 +1,85 @@ +name: CD + +on: + workflow_dispatch: + workflow_run: + workflows: + - CI + branches: main + types: + - completed + +jobs: + release: + name: Release + permissions: + contents: write + pull-requests: write + runs-on: ubuntu-latest + steps: + - name: Check out the repository + uses: actions/checkout@v4 + with: + fetch-depth: 2 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Upgrade pip + run: | + pip install --constraint=.github/workflows/constraints.txt pip + pip --version + + - name: Install Poetry + run: | + pip install --constraint=.github/workflows/constraints.txt poetry + poetry --version + + - name: Check if there is a parent commit + id: check-parent-commit + run: | + echo "SHA=$(git rev-parse --verify --quiet HEAD^)" >> $GITHUB_ENV + + - name: Detect and tag new version + id: check-version + if: env.SHA + uses: salsify/action-detect-and-tag-new-version@v2.0.3 + with: + version-command: | + bash -o pipefail -c "poetry version -s" + + - name: Bump version for developmental release + if: "! steps.check-version.outputs.tag" + run: | + poetry version patch && + version=$(poetry version | awk '{ print $2 }') && + poetry version $version.dev.$(date +%s) + + - name: Build package + run: | + poetry build --ansi + + - name: Publish package on PyPI + if: steps.check-version.outputs.tag + uses: pypa/gh-action-pypi-publish@v1.8.10 + with: + user: __token__ + password: ${{ secrets.PYPI_TOKEN }} + + - name: Publish package on TestPyPI + if: "! steps.check-version.outputs.tag" + uses: pypa/gh-action-pypi-publish@v1.8.10 + with: + user: __token__ + password: ${{ secrets.TEST_PYPI_TOKEN }} + repository-url: https://test.pypi.org/legacy/ + + - name: Publish the release notes + uses: release-drafter/release-drafter@v5.25.0 + with: + publish: ${{ steps.check-version.outputs.tag != '' }} + tag: ${{ steps.check-version.outputs.tag }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/CI.yaml b/.github/workflows/CI.yaml new file mode 100644 index 00000000..f4cd5fec --- /dev/null +++ b/.github/workflows/CI.yaml @@ -0,0 +1,252 @@ +name: CI + +on: + push: + branches: + - main + paths-ignore: ["docs/**", "scripts/**", "**.md", "*"] + pull_request: + branches: + - main + types: [opened, synchronize, labeled, reopened, ready_for_review] + paths-ignore: ["docs/**", "scripts/**", "**.md", "*"] + + workflow_dispatch: + inputs: + debug_enabled: + description: "Run with tmate.io debugging enabled" + required: true + type: boolean + default: false + run_build_images: + description: "Run build-images job" + required: false + type: boolean + default: false + run_execute_workflow: + description: "Run execute-workflow job" + required: false + type: boolean + default: false + workflow_execution_mode: + description: "Workflow execution mode" + required: false + type: string + default: "prod" + +defaults: + run: + shell: bash + +permissions: + actions: write + contents: read + +# concurrency: +# group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.event.pull_request.number || github.ref }} +# cancel-in-progress: true + +jobs: + + set-variables: + runs-on: ubuntu-latest + outputs: + debug: ${{ steps.set-variables.outputs.debug }} + skip_ci: ${{ steps.set-variables.outputs.skip_ci }} + skip_tests: ${{ steps.set-variables.outputs.skip_tests }} + mode: ${{ steps.set-variables.outputs.mode }} + + steps: + - name: Set action variables + id: set-variables + run: | + DEBUG="false" + MODE="prod" + SKIP_CI="false" + SKIP_TESTS="false" + + if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then + DEBUG="${{ inputs.debug_enabled }}" + MODE="${{ inputs.workflow_execution_mode }}" + fi + + if [[ "${{ github.event_name }}" == "pull_request" ]]; then + if ${{ contains(github.event.pull_request.labels.*.name, 'skip-ci') }}; then + SKIP_CI="true" + fi + if ${{ contains(github.event.pull_request.labels.*.name, 'skip-tests') }}; then + SKIP_TESTS="true" + fi + if ${{ contains(github.event.pull_request.labels.*.name, 'actions-debug') }}; then + DEBUG="true" + fi + if ${{ contains(github.event.pull_request.labels.*.name, 'workflow-mode-dev') }}; then + MODE="dev" + fi + fi + + echo "DEBUG=$DEBUG" + echo "MODE=$MODE" + echo "SKIP_CI=$SKIP_CI" + echo "SKIP_TESTS=$SKIP_TESTS" + + echo "DEBUG=$DEBUG" >> $GITHUB_OUTPUT + echo "MODE=$MODE" >> $GITHUB_OUTPUT + echo "SKIP_CI=$SKIP_CI" >> $GITHUB_OUTPUT + echo "SKIP_TESTS=$SKIP_TESTS" >> $GITHUB_OUTPUT + + config-workflows: + needs: [set-variables] + if: ${{ needs.set-variables.outputs.skip_ci != 'true' && (contains(github.event.pull_request.labels.*.name, 'execute-workflow') || (github.event_name == 'workflow_dispatch' && inputs.run_execute_workflow)) }} + runs-on: ubuntu-latest + outputs: + config-path: ${{ steps.config-output.outputs.path }} + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Create Flyte config from YAML template + id: yq-process + uses: mikefarah/yq@master + with: + cmd: "yq e \ + '.admin.endpoint = strenv(FLYTE_CLUSTER_ENDPOINT) | \ + .storage.stow.config.project_id = strenv(GCP_PROJECT_ID) | \ + .storage.stow.config.scopes = strenv(GCP_STORAGE_SCOPES) | \ + .storage.container = strenv(GCP_STORAGE_CONTAINER)' \ + .flyte/config-template.yaml > .flyte/config.yaml" + env: + FLYTE_CLUSTER_ENDPOINT: ${{ secrets.FLYTE_CLUSTER_ENDPOINT }} + GCP_PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }} + GCP_STORAGE_SCOPES: ${{ secrets.GCP_STORAGE_SCOPES }} + GCP_STORAGE_CONTAINER: ${{ secrets.GCP_STORAGE_CONTAINER }} + + - name: Upload Flyte config as an artifact + id: config-output + uses: actions/upload-artifact@v3 + with: + name: flyte-config + path: ${{ secrets.FLYTECTL_CONFIG }} + + test: + runs-on: ubuntu-latest + needs: [set-variables, config-workflows] + if: ${{ needs.set-variables.outputs.skip_ci != 'true' && needs.set-variables.outputs.skip_tests != 'true' }} + concurrency: + group: test-${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.event.pull_request.number || github.ref }}-${{ needs.set-variables.outputs.mode }} + cancel-in-progress: true + strategy: + matrix: + python_version: ['3.10'] + + steps: + - name: Check Variables + run: | + echo "SKIP_CI=${{ needs.set-variables.outputs.skip_ci }}" + echo "SKIP_TESTS=${{ needs.set-variables.outputs.skip_tests }}" + echo "DEBUG=${{ needs.set-variables.outputs.debug }}" + echo "MODE=${{ needs.set-variables.outputs.mode }}" + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup environment + uses: ./.github/actions/setup_environment + with: + python_version: ${{ matrix.python_version }} + debug_enabled: ${{ needs.set-variables.outputs.debug }} + + - name: Download Flyte config + uses: actions/download-artifact@v3 + with: + name: flyte-config + path: .flyte/ + + - name: "Setup tmate debug session" + uses: mxschmitt/action-tmate@v3 + if: ${{ inputs.debug_enabled }} + + - name: Lint and typecheck + run: | + make lint-check + + - name: Run tests + run: | + make test-cov-xml + + - name: Upload coverage + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: false + verbose: true + + build-images: + needs: [set-variables] + if: ${{ needs.set-variables.outputs.skip_ci != 'true' && (contains(github.event.pull_request.labels.*.name, 'build-images') || contains(github.event.pull_request.labels.*.name, 'execute-workflow') || (github.event_name == 'workflow_dispatch' && inputs.run_build_images)) }} + uses: ./.github/workflows/build-images.yaml + concurrency: + group: bi-${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.event.pull_request.number || github.ref }}-${{ needs.set-variables.outputs.mode }} + cancel-in-progress: true + secrets: + GCP_ARTIFACT_REGISTRY_PATH: ${{ secrets.GCP_ARTIFACT_REGISTRY_PATH }} + with: + debug_enabled: ${{ needs.set-variables.outputs.debug }} + mode: ${{ needs.set-variables.outputs.mode }} + + execute-workflow: + needs: [config-workflows, build-images, set-variables] + if: ${{ needs.set-variables.outputs.skip_ci != 'true' && (contains(github.event.pull_request.labels.*.name, 'execute-workflow') || (github.event_name == 'workflow_dispatch' && inputs.run_execute_workflow)) }} + runs-on: ubuntu-latest + concurrency: + group: ef-${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.event.pull_request.number || github.ref }}-${{ needs.set-variables.outputs.mode }} + cancel-in-progress: true + strategy: + matrix: + python_version: ['3.10'] + env: + FLYTECTL_CONFIG: ${{ secrets.FLYTECTL_CONFIG }} + FLYTE_OAUTH_CLIENT_SECRET: ${{ secrets.FLYTE_OAUTH_CLIENT_SECRET }} + WORKFLOW_IMAGE: ${{ vars.WORKFLOW_IMAGE }} + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Inject slug/short variables + uses: rlespinasse/github-slug-action@v4 + + - name: Setup environment + uses: ./.github/actions/setup_environment + with: + python_version: ${{ matrix.python_version }} + debug_enabled: ${{ needs.set-variables.outputs.debug }} + + - name: Download Flyte config + uses: actions/download-artifact@v3 + with: + name: flyte-config + path: .flyte/ + + - name: Setup tmate debug session + if: ${{ inputs.debug_enabled == 'true' }} + uses: mxschmitt/action-tmate@v3 + + - name: Execute workflow + id: execute + run: | + make run_${{ needs.set-variables.outputs.mode }} + + - name: Create config tarball + id: save-hydra-outputs + run: | + TAR_FILENAME="hydra_outputs_${GITHUB_SHA_SHORT}.tar.gz" + tar -czf $TAR_FILENAME ./outputs/ + tar -tzf $TAR_FILENAME + echo "HYDRA_OUTPUTS_TAR=$TAR_FILENAME" >> $GITHUB_ENV + + - name: Upload config artifact + uses: actions/upload-artifact@v3 + with: + name: hydra-outputs + path: ${{ env.HYDRA_OUTPUTS_TAR }} diff --git a/.github/workflows/build-images.yaml b/.github/workflows/build-images.yaml new file mode 100644 index 00000000..903411cc --- /dev/null +++ b/.github/workflows/build-images.yaml @@ -0,0 +1,101 @@ +name: Build Images + +on: + workflow_dispatch: + inputs: + debug_enabled: + description: "Run with tmate.io debugging enabled" + required: true + type: boolean + default: false + mode: + description: "Container build mode ('dev' uses branch name image tags and 'prod' uses short sha.)" + required: true + type: choice + default: "dev" + options: + - dev + - prod + workflow_call: + inputs: + debug_enabled: + description: "Run with tmate.io debugging enabled" + required: true + type: string + default: "false" + mode: + description: "Container build mode ('dev' uses branch name image tags and 'prod' uses short sha.)" + required: true + type: string + default: "dev" + secrets: + GCP_ARTIFACT_REGISTRY_PATH: + description: "GCP Artifact Registry Path" + required: true + +env: + ARGO_NAMESPACE: argo + ARGO_VERSION: v3.5.1 + CONTAINER_REGISTRY_URL1: ${{ secrets.GCP_ARTIFACT_REGISTRY_PATH }} + CONTAINER_REGISTRY_URL2: 'ghcr.io/pinellolab' + +defaults: + run: + shell: bash + +permissions: + actions: write + contents: read + +jobs: + build: + runs-on: pinellolab-runners + steps: + - name: Setup Runner for Argo + run: | + cd $HOME + sudo apt-get update && sudo apt-get install -y curl + curl -sLO https://github.com/argoproj/argo-workflows/releases/download/${ARGO_VERSION}/argo-linux-amd64.gz + gunzip argo-linux-amd64.gz + chmod +x argo-linux-amd64 + sudo mv ./argo-linux-amd64 /usr/local/bin/argo + argo version + - name: Check out repository code + uses: actions/checkout@v4 + - name: Inject slug/short variables + uses: rlespinasse/github-slug-action@v4 + with: + prefix: CI_ + - name: Setup tmate debug session + if: ${{ inputs.debug_enabled == 'true' }} + uses: mxschmitt/action-tmate@v3 + - name: build + run: | + echo "CI commit sha: $CI_GITHUB_SHA" + echo "GitHub ref: $GITHUB_REF" + echo "CI ref: $CI_GITHUB_REF" + echo "CI ref name: $CI_GITHUB_REF_NAME" + echo "CI HEAD ref: $CI_GITHUB_HEAD_REF" + echo "Image build mode: $MODE" + echo "MODE=${{ inputs.mode }}" + MODE="${{ inputs.mode }}" + argo version --short + + # disabled conda image build arguments + # -p imageName3="${CI_GITHUB_REPOSITORY_NAME_PART_SLUG}-conda" \ # + # -p containerFilePath3="containers/conda.Dockerfile" \ # + + argo submit .argo/build.yaml \ + --generate-name="${CI_GITHUB_REPOSITORY_NAME_PART_SLUG}-build-${CI_GITHUB_SHA_SHORT}-" \ + -p mode="${MODE}" \ + -p imageName1="${CI_GITHUB_REPOSITORY_NAME_PART_SLUG}" \ + -p containerFilePath1="containers/pkg.Dockerfile" \ + -p imageName2="${CI_GITHUB_REPOSITORY_NAME_PART_SLUG}-gpu" \ + -p containerFilePath2="containers/gpu.Dockerfile" \ + -p appName="${CI_GITHUB_REPOSITORY_NAME_PART_SLUG}" \ + -p branch="${CI_GITHUB_REF_NAME}" \ + -p shortSha="${CI_GITHUB_SHA_SHORT}" \ + -p containerRegistryURL1="${CONTAINER_REGISTRY_URL1}" \ + -p containerRegistryURL2="${CONTAINER_REGISTRY_URL2}" \ + -p gitUrlNoProtocol="github.com/${CI_GITHUB_REPOSITORY_OWNER_PART_SLUG}" \ + --wait --log diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt new file mode 100644 index 00000000..21a8ddcb --- /dev/null +++ b/.github/workflows/constraints.txt @@ -0,0 +1,2 @@ +pip==23.3 +poetry==1.7.1 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..9601bd55 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,71 @@ +name: Docs + +on: + workflow_dispatch: + workflow_call: + push: + branches: ["main"] + pull_request: + types: [ready_for_review] + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: Docs-${{ github.event_name == 'pull_request' && github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +defaults: + run: + shell: bash + +jobs: + + set-debug: + name: Set debug flag + runs-on: ubuntu-latest + outputs: + debug: ${{ steps.set-debug.outputs.debug }} + steps: + - id: set-debug + run: | + if [[ "${{ github.event_name }}" == "workflow_dispatch" || "${{ github.event_name }}" == "workflow_call" ]]; then + echo "DEBUG=${{ inputs.debug_enabled }}" >> $GITHUB_ENV + else + echo "DEBUG=false" >> $GITHUB_ENV + fi + + build: + name: Build documentation + runs-on: ubuntu-latest + needs: set-debug + strategy: + matrix: + python_version: ['3.10'] + steps: + - uses: actions/checkout@v4 + - name: Setup environment + uses: ./.github/actions/setup_environment + with: + python_version: ${{ matrix.python_version }} + debug_enabled: ${{ needs.set-debug.outputs.debug }} + - name: Build + run: make docs-build + - name: Upload artifact + uses: actions/upload-pages-artifact@v2 + with: + path: ./site + + deploy: + name: Deploy documentation + environment: + name: ${{ github.ref == 'refs/heads/main' && 'github-pages' || 'github-pages-dev' }} + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + needs: build + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v2 diff --git a/.github/workflows/inactive-issues-prs.yml b/.github/workflows/inactivity.yml similarity index 97% rename from .github/workflows/inactive-issues-prs.yml rename to .github/workflows/inactivity.yml index 332a75e9..ea894637 100644 --- a/.github/workflows/inactive-issues-prs.yml +++ b/.github/workflows/inactivity.yml @@ -1,4 +1,4 @@ -name: inactivity +name: Inactivity on: schedule: - cron: "30 1 * * *" diff --git a/.gitignore b/.gitignore index e077f25f..4152dd51 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,15 @@ # Custom +# nix +.devenv +.direnv +# this is a bespoke "manual direnv" folder +.dirman +.pre-commit-config.yaml + +# flyte config file +.flyte/config.yaml + .hatch/ .ruff_cache/ @@ -10,7 +20,8 @@ logs/* # Hydra outputs folder -outputs/* +outputs/ +multirun/ # Checkpoints folder dnadiffusion/checkpoints/* diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..0765b80b --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,6 @@ +{ + "recommendations": [ + "vscodevim.vim", + "googlecloudtools.cloudcode" + ] +} diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..e7597793 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,33 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "dnadiffusion debug", + "type": "python", + "request": "launch", + "module": "dnadiffusion", + "args": [ + // toggle to print config with overrides vs execute + "-c", "job", + // + // set execution context + "execution_context=local_shell", + // "execution_context=local_cluster_dev", + // "execution_context=remote_dev", + // + // workflow example + "entity_config=lrwine_training_workflow", + "entity_config.inputs._args_.0.logistic_regression.max_iter=1200", + // + // task example + // "entity_config=lrwine_process_data", + // "entity_config.inputs._args_=[]", + // "entity_config.inputs._args_.0.data.data=[[12.0, 0],[13.0, 1],[9.5, 2]]", + // "entity_config.inputs._args_.0.data.columns='[ash, target]'", + ], + "console": "integratedTerminal", + "cwd": "${workspaceFolder}", + "preLaunchTask": "direnv allow" + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..344930f5 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,26 @@ +{ + "cloudcode.autoDependencies": "on", + "workbench.iconTheme": "vscode-icons", + "workbench.colorTheme": "Catppuccin Mocha", + "search.exclude": { + "**/.devenv/**": true, + "**/.direnv/**": true, + "**/.venv/**": true, + "**/.hatch/**": true, + "**/.mypy_cache/**": true, + "**/.pytest_cache/**": true, + "**/.ruff_cache/**": true, + "**/.solid/**": true + }, + "files.watcherExclude": { + "**/.devenv/**": true, + "**/.direnv/**": true, + "**/.venv/**": true, + "**/.hatch/**": true, + "**/.mypy_cache/**": true, + "**/.nox/**": true, + "**/.pytest_cache/**": true, + "**/.ruff_cache/**": true, + "**/.solid/**": true + } +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 00000000..40b6b893 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,13 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "direnv allow", + "type": "shell", + "command": "direnv allow && env", + "options": { + "cwd": "${workspaceFolder}" + } + } + ] +} diff --git a/Makefile b/Makefile index 2c910145..7003d7a6 100644 --- a/Makefile +++ b/Makefile @@ -1,141 +1,375 @@ -.PHONY: clean requirements - -################################################################################# -# Inspired by -# Goals of the Makefile is: -# 1. Compiling and linking source code -# 2. Generating documentation -# 3. Running tests -# 4. Packaging the application for distribution -# 5. Cleaning up intermediate files and artifacts - -# To use it type: make $command -# NOTE: majority of the commands are exposed through hatch -# TODO: fix the build process and the commands that accompany the build process -################################################################################# - -################################################################################# -# GLOBALS # -################################################################################# - -PROJECT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST)))) -BUCKET = [OPTIONAL] your-bucket-for-syncing-data (do not include 's3://') -PROJECT_NAME = dna-diffusion -PYTHON_INTERPRETER = python3 - -ifeq (,$(shell which conda)) -HAS_CONDA=False -else -HAS_CONDA=True -endif +.DEFAULT_GOAL := help -################################################################################# -# COMMANDS # -################################################################################# - -## Install Python Dependencies -requirements: - conda install environments/conda/environment.yml - -## Delete all compiled Python files -clean: - find . -type f -name "*.py[co]" -delete - find . -type d -name "__pycache__" -delete - -## Set up python interpreter environment -create_environment: -ifeq (True,$(HAS_CONDA)) - @echo ">>> Detected conda, creating conda environment." -ifeq (3,$(findstring 3,$(PYTHON_INTERPRETER))) - conda create --name $(PROJECT_NAME) python=3 -else - conda create --name $(PROJECT_NAME) python=2.7 -endif - @echo ">>> New conda env created. Activate with:\nsource activate $(PROJECT_NAME)" +ENV_PREFIX ?= ./ +ENV_FILE := $(wildcard $(ENV_PREFIX)/.env) + +ifeq ($(strip $(ENV_FILE)),) +$(info $(ENV_PREFIX)/.env file not found, skipping inclusion) else - @pip install -q virtualenv virtualenvwrapper - @echo ">>> Installing virtualenvwrapper if not already intalled.\nMake sure the following lines are in shell startup file\n\ - export WORKON_HOME=$$HOME/.virtualenvs\nexport PROJECT_HOME=$$HOME/Devel\nsource /usr/local/bin/virtualenvwrapper.sh\n" - @bash -c "source `which virtualenvwrapper.sh`;mkvirtualenv $(PROJECT_NAME) --python=$(PYTHON_INTERPRETER)" - @echo ">>> New virtualenv created. Activate with:\nworkon $(PROJECT_NAME)" +include $(ENV_PREFIX)/.env +export endif -## Test python environment is setup correctly -test_environment: - $(PYTHON_INTERPRETER) test_environment.py - -## Run tests -test: requirements - PYTHONPATH=src/ py.test --cov=src/ --cov-report html:cov_html_doctests --doctest-modules -v tests/ - -## [TODO: add commands for packaging the project] - -################################################################################# -# PROJECT RULES # -################################################################################# - -################################################################################# -# Self Documenting Commands # -################################################################################# - -.DEFAULT_GOAL := show-help - -# Inspired by -# sed script explained: -# /^##/: -# * save line in hold space -# * purge line -# * Loop: -# * append newline + line to hold space -# * go to next line -# * if line starts with doc comment, strip comment character off and loop -# * remove target prerequisites -# * append hold space (+ newline) to line -# * replace newline plus comments by `---` -# * print line -# Separate expressions are necessary because labels cannot be delimited by -# semicolon; see -.PHONY: show-help -show-help: - @echo "$$(tput bold)Available rules:$$(tput sgr0)" +GIT_SHORT_SHA = $(shell git rev-parse --short HEAD) +GIT_BRANCH = $(shell git rev-parse --abbrev-ref HEAD) + +##@ Utility +help: ## Display this help. (Default) +# based on "https://gist.github.com/prwhite/8168133?permalink_comment_id=4260260#gistcomment-4260260" + @grep -hE '^[A-Za-z0-9_ \-]*?:.*##.*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + +##@ Utility +help_sort: ## Display alphabetized version of help. + @grep -hE '^[A-Za-z0-9_ \-]*?:.*##.*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + + +#-------- +# package +#-------- + +test: ## Run tests. See pyproject.toml for configuration. + poetry run pytest + +test-cov-xml: ## Run tests with coverage + poetry run pytest --cov-report=xml + +lint: ## Run linter + poetry run ruff format . + poetry run ruff --fix . + +lint-check: ## Run linter in check mode + poetry run ruff format --check . + poetry run ruff . + +typecheck: ## Run typechecker + poetry run pyright + +docs-build: ## Build documentation + poetry run mkdocs build + +docs-serve: ## Serve documentation +docs-serve: docs-build + poetry run mkdocs serve + +export_pip_requirements: ## Export requirements.txt for pip. + poetry export \ + --format=requirements.txt \ + --with=workflows,bioinformatics \ + --output=requirements.txt \ + --without-hashes + +#------------- +# CI +#------------- + +browse: ## Open github repo in browser at HEAD commit. + gh browse $(GIT_SHORT_SHA) + +GH_ACTIONS_DEBUG ?= false + +ci: ## Run CI (GH_ACTIONS_DEBUG default is false). + gh workflow run "CI" --ref $(GIT_BRANCH) -f debug_enabled=$(GH_ACTIONS_DEBUG) + +build_images: ## Run Build Images (GH_ACTIONS_DEBUG default is false). + gh workflow run "Build Images" --ref $(GIT_BRANCH) -f debug_enabled=$(GH_ACTIONS_DEBUG) + +ci_view_workflow: ## Open CI workflow summary. + gh workflow view "CI" + +build_images_view_workflow: ## Open Build Images workflow summary. + gh workflow view "Build Images" + +# CPU | MEM | DISK | MACHINE_TYPE +# ----|-----|------|---------------- +# 2 | 8 | 32 | basicLinux32gb +# 4 | 16 | 32 | standardLinux32gb +# 8 | 32 | 64 | premiumLinux +# 16 | 64 | 128 | largePremiumLinux +MACHINE_TYPE ?= standardLinux32gb +codespace_create: ## Create codespace. make -n codespace_create MACHINE_TYPE=largePremiumLinux + gh codespace create -R $(GH_REPO) -b $(GIT_BRANCH) -m $(MACHINE_TYPE) + +code: ## Open codespace in browser. + gh codespace code -R $(GH_REPO) --web + +codespace_list: ## List codespace. + PAGER=cat gh codespace list + +codespace_stop: ## Stop codespace. + gh codespace stop + +codespace_delete: ## Delete codespace. + gh codespace delete + +docker_login: ## Login to ghcr docker registry. Check regcreds in $HOME/.docker/config.json. + docker login ghcr.io -u $(GH_ORG) -p $(GITHUB_TOKEN) + +EXISTING_IMAGE_TAG ?= main +NEW_IMAGE_TAG ?= $(GIT_BRANCH) + +# Default bumps main to the checked out branch for dev purposes +tag_images: ## Add tag to existing images, (default main --> branch, override with make -n tag_images NEW_IMAGE_TAG=latest). + crane tag $(WORKFLOW_IMAGE):$(EXISTING_IMAGE_TAG) $(NEW_IMAGE_TAG) + crane tag ghcr.io/$(GH_ORG)/$(GH_REPO_NAME_SLUG):$(EXISTING_IMAGE_TAG) $(NEW_IMAGE_TAG) + +list_gcr_workflow_image_tags: ## List images in gcr. + gcloud container images list --repository=$(GCP_ARTIFACT_REGISTRY_PATH) + gcloud container images list-tags $(WORKFLOW_IMAGE) + +#------------------- +# workflow execution +#------------------- + +run_help: ## Print hydra help for execute script. + poetry run dna --help + +# Capture additional arguments to pass to hydra-zen cli +# converting them to make do-nothing targets +# supports passing hydra overrides as ARGS, e.g.: +# make run HYDRA_OVERRIDES="entity_config.inputs.logistic_regression.max_iter=2000 execution_context=local_shell" +HYDRA_OVERRIDES = $(filter-out $@,$(MAKECMDGOALS)) +%: + @: + +.PHONY: run +run: ## Run registered workflow in remote dev mode. (default) + poetry run dna $(HYDRA_OVERRIDES) + +run_dev: ## Run registered workflow in remote dev mode. + poetry run dna execution_context=remote_dev $(HYDRA_OVERRIDES) + +run_prod: ## Run registered workflow in remote prod mode. (ci default) + poetry run dna execution_context=remote_prod $(HYDRA_OVERRIDES) + +run_local_cluster: ## Run registered workflow in local cluster dev mode. + poetry run dna execution_context=local_cluster_dev $(HYDRA_OVERRIDES) + +run_local: ## Run registered workflow in local shell mode. (only with all python tasks) + poetry run dna execution_context=local_shell $(HYDRA_OVERRIDES) + +#------------------ +# local image build +#------------------ + +# make -n build_local_image LOCAL_CONTAINER_REGISTRY=localhost:30000 GH_REPO_NAME_SLUG=dna-diffusion +build_local_image: ## Build local image. + @echo "building image: $(LOCAL_CONTAINER_REGISTRY)/$(GH_REPO_NAME_SLUG):$(GIT_BRANCH)" + @echo + docker images -a --digests $(LOCAL_CONTAINER_REGISTRY)/$(GH_REPO_NAME_SLUG) + @echo + docker build -t $(LOCAL_CONTAINER_REGISTRY)/$(GH_REPO_NAME_SLUG):$(GIT_BRANCH) -f $(ACTIVE_DOCKERFILE) . + @echo + docker push $(LOCAL_CONTAINER_REGISTRY)/$(GH_REPO_NAME_SLUG):$(GIT_BRANCH) + @echo + docker images -a --digests $(LOCAL_CONTAINER_REGISTRY)/$(GH_REPO_NAME_SLUG) + +# Use as: make remove_local_image +# or: make remove_local_image GIT_BRANCH=tag-other-than-current-branch +# or: make remove_local_image GIT_BRANCH=sha256: +remove_local_image: ## Remove local image. + @echo "removing image: $(LOCAL_CONTAINER_REGISTRY)/$(GH_REPO_NAME_SLUG):$(GIT_BRANCH)" + @echo + docker images -a --digests $(LOCAL_CONTAINER_REGISTRY)/$(GH_REPO_NAME_SLUG) + @echo + # Check if GIT_BRANCH is a sha256 digest + if echo $(GIT_BRANCH) | grep -qE 'sha256:[0-9a-f]{64}'; then \ + docker rmi $(LOCAL_CONTAINER_REGISTRY)/$(GH_REPO_NAME_SLUG)@$(GIT_BRANCH); \ + else \ + docker rmi $(LOCAL_CONTAINER_REGISTRY)/$(GH_REPO_NAME_SLUG):$(GIT_BRANCH); \ + fi @echo - @sed -n -e "/^## / { \ - h; \ - s/.*//; \ - :doc" \ - -e "H; \ - n; \ - s/^## //; \ - t doc" \ - -e "s/:.*//; \ - G; \ - s/\\n## /---/; \ - s/\\n/ /g; \ - p; \ - }" ${MAKEFILE_LIST} \ - | LC_ALL='C' sort --ignore-case \ - | awk -F '---' \ - -v ncol=$$(tput cols) \ - -v indent=19 \ - -v col_on="$$(tput setaf 6)" \ - -v col_off="$$(tput sgr0)" \ - '{ \ - printf "%s%*s%s ", col_on, -indent, $$1, col_off; \ - n = split($$2, words, " "); \ - line_length = ncol - indent; \ - for (i = 1; i <= n; i++) { \ - line_length -= length(words[i]) + 1; \ - if (line_length <= 0) { \ - line_length = ncol - indent - length(words[i]) - 1; \ - printf "\n%*s ", -indent, " "; \ - } \ - printf "%s ", words[i]; \ - } \ - printf "\n"; \ - }' \ - | more $(shell test $(shell uname) = Darwin && echo '--no-init --raw-control-chars') - -# Custom function for fetching new/current version for release -define fetch_version - @bump2version --allow-dirty --dry-run --list $(2) | grep $(1)_version | sed -r s,"^.*=",, -endef \ No newline at end of file + docker images -a --digests $(LOCAL_CONTAINER_REGISTRY)/$(GH_REPO_NAME_SLUG) + +#---- +# nix +#---- + +meta: ## Generate nix flake metadata. + nix flake metadata --impure + nix flake show --impure + +up: ## Update nix flake lock file. + nix flake update --impure --accept-flake-config + nix flake check --impure + +dup: ## Debug update nix flake lock file. + nix flake update --impure --accept-flake-config + nix flake check --show-trace --print-build-logs --impure + +re: ## Reload direnv. + direnv reload + +al: ## Enable direnv. + direnv allow + +devshell_info: ## Print devshell info. + nix build .#devShells.$(shell nix eval --impure --expr 'builtins.currentSystem').default --impure + nix path-info --recursive ./result + du -chL ./result + rm ./result + +cache: ## Push devshell to cachix + nix build --json \ + .#devShells.$(shell nix eval --impure --expr 'builtins.currentSystem').default \ + --impure \ + --accept-flake-config | \ + jq -r '.[].outputs | to_entries[].value' | \ + cachix push $(CACHIX_CACHE_NAME) + +#------- +# system +#------- + +uninstall_nix: ## Uninstall nix. + (cat /nix/receipt.json && \ + /nix/nix-installer uninstall) || echo "nix not found, skipping uninstall" + +install_nix: ## Install nix. Check script before execution: https://install.determinate.systems/nix . +install_nix: uninstall_nix + @which nix > /dev/null || \ + curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | sh -s -- install + +install_direnv: ## Install direnv to `/usr/local/bin`. Check script before execution: https://direnv.net/ . + @which direnv > /dev/null || \ + (curl -sfL https://direnv.net/install.sh | bash && \ + sudo install -c -m 0755 direnv /usr/local/bin && \ + rm -f ./direnv) + @echo "see https://direnv.net/docs/hook.html" + +setup_dev: ## Setup nix development environment. +setup_dev: install_direnv install_nix + @. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && \ + nix profile install nixpkgs#cachix && \ + echo "trusted-users = root $$USER" | sudo tee -a /etc/nix/nix.conf && sudo pkill nix-daemon && \ + cachix use devenv + +.PHONY: devshell +devshell: ## Enter nix devshell. See use_flake in `direnv stdlib`. + ./scripts/flake + +cdirenv: ## !!Enable direnv in zshrc.!! + @if ! grep -q 'direnv hook zsh' "${HOME}/.zshrc"; then \ + printf '\n%s\n' 'eval "$$(direnv hook zsh)"' >> "${HOME}/.zshrc"; \ + fi + +cstarship: ## !!Enable starship in zshrc.!! + @if ! grep -q 'starship init zsh' "${HOME}/.zshrc"; then \ + printf '\n%s\n' 'eval "$$(starship init zsh)"' >> "${HOME}/.zshrc"; \ + fi + +catuin: ## !!Enable atuin in zshrc.!! + @if ! grep -q 'atuin init zsh' "${HOME}/.zshrc"; then \ + printf '\n%s\n' 'eval "$$(atuin init zsh)"' >> "${HOME}/.zshrc"; \ + fi + +czsh: ## !!Enable zsh with command line info and searchable history.!! +czsh: catuin cstarship cdirenv + +install_flytectl: ## Install flytectl. Check script before execution: https://docs.flyte.org/ . + @which flytectl > /dev/null || \ + (curl -sL https://ctl.flyte.org/install | bash) + +install_poetry: ## Install poetry. Check script before execution: https://python-poetry.org/docs/#installation . + @which poetry > /dev/null || (curl -sSL https://install.python-poetry.org | python3 -) + +install_crane: ## Install crane. Check docs before execution: https://github.com/google/go-containerregistry/blob/main/cmd/crane/doc/crane.md . + @which crane > /dev/null || ( \ + set -e; \ + CRANE_VERSION="0.16.1"; \ + OS=$$(uname -s | tr '[:upper:]' '[:lower:]'); \ + ARCH=$$(uname -m); \ + case $$ARCH in \ + x86_64|amd64) ARCH="x86_64" ;; \ + aarch64|arm64) ARCH="arm64" ;; \ + *) echo "Unsupported architecture: $$ARCH" && exit 1 ;; \ + esac; \ + TMP_DIR=$$(mktemp -d); \ + trap 'rm -rf "$$TMP_DIR"' EXIT; \ + echo "Downloading crane $$CRANE_VERSION for $$OS $$ARCH to $$TMP_DIR"; \ + FILENAME="go-containerregistry_$$OS"_$$ARCH".tar.gz"; \ + URL="https://github.com/google/go-containerregistry/releases/download/v$$CRANE_VERSION/$$FILENAME"; \ + curl -sSL "$$URL" | tar xz -C $$TMP_DIR; \ + sudo mv $$TMP_DIR/crane /usr/local/bin/crane; \ + echo "Crane installed successfully to /usr/local/bin/crane" \ + ) + +env_print: ## Print a subset of environment variables defined in ".env" file. + env | grep "GITHUB\|GH_\|GCP_\|FLYTE\|WORKFLOW" | sort + +# gh secret set GOOGLE_APPLICATION_CREDENTIALS_DATA --repo="$(GH_REPO)" --body='$(shell cat $(GCP_GACD_PATH))' +ghsecrets: ## Update github secrets for GH_REPO from ".env" file. + @echo "secrets before updates:" + @echo + PAGER=cat gh secret list --repo=$(GH_REPO) + @echo + gh secret set FLYTE_CLUSTER_ENDPOINT --repo="$(GH_REPO)" --body="$(FLYTE_CLUSTER_ENDPOINT)" + gh secret set FLYTE_OAUTH_CLIENT_SECRET --repo="$(GH_REPO)" --body="$(FLYTE_OAUTH_CLIENT_SECRET)" + gh secret set FLYTECTL_CONFIG --repo="$(GH_REPO)" --body="$(FLYTECTL_CONFIG)" + gh secret set FLYTECTL_CONFIG_TEMPLATE --repo="$(GH_REPO)" --body="$(FLYTECTL_CONFIG_TEMPLATE)" + gh secret set GCP_PROJECT_ID --repo="$(GH_REPO)" --body="$(GCP_PROJECT_ID)" + gh secret set GCP_STORAGE_SCOPES --repo="$(GH_REPO)" --body="$(GCP_STORAGE_SCOPES)" + gh secret set GCP_STORAGE_CONTAINER --repo="$(GH_REPO)" --body="$(GCP_STORAGE_CONTAINER)" + gh secret set GCP_ARTIFACT_REGISTRY_PATH --repo="$(GH_REPO)" --body="$(GCP_ARTIFACT_REGISTRY_PATH)" + @echo + @echo secrets after updates: + @echo + PAGER=cat gh secret list --repo=$(GH_REPO) + +ghvars: ## Update github secrets for GH_REPO from ".env" file. + @echo "variables before updates:" + @echo + PAGER=cat gh variable list --repo=$(GH_REPO) + @echo + gh variable set WORKFLOW_IMAGE --repo="$(GH_REPO)" --body="$(WORKFLOW_IMAGE)" + @echo + @echo variables after updates: + @echo + PAGER=cat gh variable list --repo=$(GH_REPO) + +update_config: ## Update flytectl config file from template. + yq e '.admin.endpoint = strenv(FLYTE_CLUSTER_ENDPOINT) | .storage.stow.config.project_id = strenv(GCP_PROJECT_ID) | .storage.stow.config.scopes = strenv(GCP_STORAGE_SCOPES) | .storage.container = strenv(GCP_STORAGE_CONTAINER)' \ + $(FLYTECTL_CONFIG_TEMPLATE) > $(FLYTECTL_CONFIG) + +tree: ## Print directory tree. + tree -a --dirsfirst -L 4 -I ".git|.direnv|.devenv|*pycache*|*ruff_cache*|*pytest_cache*|outputs|multirun|conf|scripts" + +approve_prs: ## Approve github pull requests from bots: PR_ENTRIES="2-5 10 12-18" + for entry in $(PR_ENTRIES); do \ + if [[ "$$entry" == *-* ]]; then \ + start=$${entry%-*}; \ + end=$${entry#*-}; \ + for pr in $$(seq $$start $$end); do \ + gh pr review $$pr --approve; \ + done; \ + else \ + gh pr review $$entry --approve; \ + fi; \ + done + +CURRENT_BRANCH_OR_SHA = $(shell git symbolic-ref --short HEAD 2>/dev/null || git rev-parse HEAD) + +get_pr_source_branch: ## Get source branch from detached head as in PR CI checkouts. +ifndef PR + $(error PR is not set. Usage: make get_pr_source_branch PR=) +endif + + @echo "Current Branch or SHA: $(CURRENT_BRANCH_OR_SHA)" + + # The command + # gh pr checkout --detach $(PR) + # checks out the PR source branch commit which is NOT equivalent to checking + # out the staged merge commit. The latter is what occurs in PR CI checkouts + # which is available at `refs/pull/$(PR)/merge` and we store in $(PR)-merge + git fetch --force origin pull/$(PR)/merge:$(PR)-merge + git checkout $(PR)-merge + + git fetch origin +refs/heads/*:refs/remotes/origin/* + PAGER=cat git log -1 + @echo "\nExtracted Source Commit SHA:" + git log -1 --pretty=%B | grep -oE 'Merge [0-9a-f]{40}' | awk '{print $$2}' + @echo "\nExtracted Source Branch Name:" + source_commit_sha=$$(git log -1 --pretty=%B | grep -oE 'Merge [0-9a-f]{40}' | awk '{print $$2}') && \ + git branch -r --contains $$source_commit_sha | grep -v HEAD | sed -n 's|origin/||p' | xargs + + @echo "\nReturning to Branch or SHA: $(CURRENT_BRANCH_OR_SHA)" + git checkout $(CURRENT_BRANCH_OR_SHA) diff --git a/README.md b/README.md index 59d4bcde..b17b0834 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,29 @@ --- - +## Contents + +- [Contents](#contents) +- [Abstract](#abstract) +- [Introduction and Prior Work](#introduction-and-prior-work) +- [Proposed framework](#proposed-framework) +- [Tasks and potential roadmap](#tasks-and-potential-roadmap) +- [Deliverables](#deliverables) +- [Datasets](#datasets) +- [Models](#models) +- [Input modality](#input-modality) +- [APIs](#apis) +- [Paper](#paper) +- [Resources Requirements](#resources-requirements) +- [Broader Impact](#broader-impact) +- [Reproducibility](#reproducibility) +- [Failure Case](#failure-case) +- [Preliminary Findings](#preliminary-findings) +- [Next Steps](#next-steps) +- [How to contribute](#how-to-contribute) +- [Development](#development) +- [Serve the documentation](#serve-the-documentation) +- [Contributors ✨](#contributors-) ## Abstract @@ -40,59 +62,61 @@ Here, we instead propose to use a large-scale data-driven approach to learn and ## Introduction and Prior Work -The goal of this project is to investigate the application and adaptation of recent diffusion models (see https://lilianweng.github.io/posts/2021-07-11-diffusion-models/ for a nice intro and references) to genomics data. Diffusion models are powerful models that have been used for image generation (e.g. stable diffusion, DALL-E), music generation (recent version of the magenta project) with outstanding results. -A particular model formulation called "guided" diffusion allows to bias the generative process toward a particular direction if during training a text or continuous/discrete labels are provided. This allows the creation of "AI artists" that, based on a text prompt, can create beautiful and complex images (a lot of examples here: https://www.reddit.com/r/StableDiffusion/). +The goal of this project is to investigate the application and adaptation of recent diffusion models (see for a nice intro and references) to genomics data. Diffusion models are powerful models that have been used for image generation (e.g. stable diffusion, DALL-E), music generation (recent version of the magenta project) with outstanding results. +A particular model formulation called "guided" diffusion allows to bias the generative process toward a particular direction if during training a text or continuous/discrete labels are provided. This allows the creation of "AI artists" that, based on a text prompt, can create beautiful and complex images (a lot of examples here: ). Some groups have reported the possibility of generating synthetic DNA regulatory elements in a context-dependent system, for example, cell-specific enhancers. -(https://elifesciences.org/articles/41279 , -https://www.biorxiv.org/content/10.1101/2022.07.26.501466v1) +( , +) ### Step 1: generative model We propose to develop models that can generate cell type specific or context specific DNA-sequences with certain regulatory properties based on an input text prompt. For example: -- "A sequence that will correspond to open (or closed) chromatin in cell type X" +- "A sequence that will correspond to open (or closed) chromatin in cell type X" -- "A sequence that will activate a gene to its maximum expression level in cell type X" +- "A sequence that will activate a gene to its maximum expression level in cell type X" -- "A sequence active in cell type X that contains binding site(s) for the transcription factor Y" +- "A sequence active in cell type X that contains binding site(s) for the transcription factor Y" -- "A sequence that activates a gene in liver and heart, but not in brain" +- "A sequence that activates a gene in liver and heart, but not in brain" ### Step 2: extensions and improvements Beyond individual regulatory elements, so called "Locus Control Regions" are known to harbour multiple regulatory elements in specific configurations, working in concert to result in more complex regulatory rulesets. Having parallels with "collaging" approaches, in which multiple stable diffusion steps are combined into one final (graphical) output, we want to apply this notion to DNA sequences with the goal of designing larger regulatory loci. This is a particularly exciting and, to our knowledge, hitherto unexplored direction. -Besides synthetic DNA creations, a diffusion model can help understand and interpret regulatory sequence element components and for instance be a valuable tool for studying single nucleotide variations (https://www.biorxiv.org/content/10.1101/2022.08.22.504706v1) and evolution. -(https://genomebiology.biomedcentral.com/articles/10.1186/s13059-018-1502-5) +Besides synthetic DNA creations, a diffusion model can help understand and interpret regulatory sequence element components and for instance be a valuable tool for studying single nucleotide variations () and evolution. +() Taken together, we believe our work can accelerate our understanding of the intrinsic properties of DNA-regulatory sequence in normal development and different diseases. +[↪](#contents) + ## Proposed framework -For this work we propose to build a Bit Diffusion model based on the formulation proposed by Chen, Zhang and Hinton https://arxiv.org/abs/2208.04202. This model is a generic approach for generating discrete data with continuous diffusion models. An implementation of this approach already exists, and this is a potential code base to build upon: +For this work we propose to build a Bit Diffusion model based on the formulation proposed by Chen, Zhang and Hinton . This model is a generic approach for generating discrete data with continuous diffusion models. An implementation of this approach already exists, and this is a potential code base to build upon: -https://github.com/lucidrains/bit-diffusion + -## Tasks and potential roadmap: +## Tasks and potential roadmap -- Collecting genomic datasets -- Implementing the guided diffusion based on the code base -- Thinking about the best encoding of biological information for the guided diffusion (e.g. cell type: K562, very strong activating sequence for chromatin, or cell type: GM12878, very open chromatin) -- Plans for validation based on existing datasets or how to perform new biological experiments (we need to think about potential active learning strategies). +- Collecting genomic datasets +- Implementing the guided diffusion based on the code base +- Thinking about the best encoding of biological information for the guided diffusion (e.g. cell type: K562, very strong activating sequence for chromatin, or cell type: GM12878, very open chromatin) +- Plans for validation based on existing datasets or how to perform new biological experiments (we need to think about potential active learning strategies). ## Deliverables -- **Dataset:** compile and provide a complete database of cell-specific regulatory regions (DNAse assay) to allow scientists to train and generate different diffusion models based on the regulatory sequences. +- **Dataset:** compile and provide a complete database of cell-specific regulatory regions (DNAse assay) to allow scientists to train and generate different diffusion models based on the regulatory sequences. -- **Models:** Provide a model that can generate regulatory sequences given a specific cell type and genomic context. +- **Models:** Provide a model that can generate regulatory sequences given a specific cell type and genomic context. -- **API:** Provide an API to make it possible to manipulate DNA regulatory models and a visual playground to generate synthetic contextual sequences. +- **API:** Provide an API to make it possible to manipulate DNA regulatory models and a visual playground to generate synthetic contextual sequences. ## Datasets -### DHS Index: +### DHS Index Chromatin (DNA + associated proteins) that is actively used for the regulation of genes (i.e. "regulatory elements") is typically accessible to DNA-binding proteins such as transcription factors ([review](https://www.nature.com/articles/s41576-018-0089-8), [relevant paper](https://www.nature.com/articles/nature11232)). Through the use of a technique called [DNase-seq](https://en.wikipedia.org/wiki/DNase-Seq), we've measured which parts of the genome are accessible across 733 human biosamples encompassing 438 cell and tissue types and states, resulting in more than 3.5 million DNase Hypersensitive Sites (DHSs). @@ -103,26 +127,28 @@ we've put together smaller subsets of these data that can be used to train model Please find these data, along with a data dictionary, [here](https://www.meuleman.org/research/synthseqs/#material). -### Other potential datasets: +### Other potential datasets -- DNA-sequences data corresponding to annotated regulatory sequences such as gene promoters or distal regulatory sequences such as enhancers annotated (based on chromatin marks or accessibility) for hundreds of cells by the NHGRI funded projects like ENCODE or Roadmap Epigenomics. +- DNA-sequences data corresponding to annotated regulatory sequences such as gene promoters or distal regulatory sequences such as enhancers annotated (based on chromatin marks or accessibility) for hundreds of cells by the NHGRI funded projects like ENCODE or Roadmap Epigenomics. -- Data from MPRA assays that test the regulatory potential of hundred of DNA sequences in parallel (https://elifesciences.org/articles/69479.pdf , https://www.nature.com/articles/s41588-021-01009-4 , ... ) +- Data from MPRA assays that test the regulatory potential of hundred of DNA sequences in parallel ( , , ... ) -- MIAA assays that test the ability of open chromatin within a given cell type. +- MIAA assays that test the ability of open chromatin within a given cell type. + +[↪](#contents) ## Models -## Input modality: +## Input modality - A) Cell type + regulatory element ex: Brain tumor cell weak Enhancer - B) Cell type + regulatory elements + TF combination (presence or absence) Ex: Prostate cell, enhancer , AR(present), TAFP2a (present) and ER (absent), - C) Cell type + TF combination + TF positions Ex: Blood Stem cell GATA2(presence) and ER(absent) + GATA1 (100-108) - D) Sequencing having a GENETIC VARIANT -> low number diffusion steps = nucleotide importance prediction +A) Cell type + regulatory element ex: Brain tumor cell weak Enhancer +B) Cell type + regulatory elements + TF combination (presence or absence) Ex: Prostate cell, enhancer , AR(present), TAFP2a (present) and ER (absent), +C) Cell type + TF combination + TF positions Ex: Blood Stem cell GATA2(presence) and ER(absent) + GATA1 (100-108) +D) Sequencing having a GENETIC VARIANT -> low number diffusion steps = nucleotide importance prediction -### Output: +### Output - DNA-sequence +DNA-sequence **Model size:** The number of enhancers and biological sequences isn’t bigger than the number of available images on the Lion dataset. The dimensionality of our generated DNA outputs should not be longer than 4 bases [A,C,T,G] X ~1kb. The final models should be bigger than ~2 GB. @@ -130,6 +156,8 @@ The number of enhancers and biological sequences isn’t bigger than the number **Models:** Different models can be created based on the total sequence length. +[↪](#contents) + ## APIs TBD depending on interest @@ -147,18 +175,14 @@ Our group and collaborators present a substantial reputation in the academic com Our initial model can be trained with small datasets (~1k sequences) in about 3 hours ( ~500 epochs) on a colab PRO (24GB ram ) single GPU Tesla K80. Based on this we expect that to train this or similar models on the large dataset mentioned above ( ~3 million sequences (4x200) we will need several high-performant GPUs for about 3 months. ( Optimization suggestions are welcome!) -## Timeline - -**What is a (rough) timeline for this project?** - -6 months to 1 year. - ## Broader Impact **How is the project expected to positively impact biological research at large?** We believe this project will help to better understand genomic regulatory sequences: their composition and the potential regulators acting on them in different biological contexts and with the potential to create therapeutics based on this knowledge. +[↪](#contents) + ## Reproducibility We will use best practices to make sure our code is reproducible and with versioning. We will release data processing scripts and conda environments/docker to make sure other researchers can easily run it. @@ -178,27 +202,25 @@ Using the Bit Diffusion model we were able to reconstruct 200 bp sequences that Expand the model length to generate complete regulatory regions (enhancers + Gene promoter pairs) Use our synthetic enhancers on in-vivo models and check how they can regulate the transcriptional dynamics in biological scenarios (Besides the MPRA arrays). +[↪](#contents) + ## How to contribute If this project sounds exciting to you, **please join us**! -Join the OpenBioML discord: https://discord.gg/Y9CN2dUzQJ, we are discussing this project in the **dna-diffusion** channel and we will provide instructions on how to get involved. - -## Known contributors - -You can access the contributor list [here](https://docs.google.com/spreadsheets/d/1_nxDI6DIoWbyUDpIDX-tJIILejrJ0kEYrcXXdWlzPvU/edit#gid=1871728801). +Join the OpenBioML discord: , we are discussing this project in the **dna-diffusion** channel and we will provide instructions on how to get involved. ## Development ### Setup environment -We use [hatch](https://hatch.pypa.io/latest/install/) to manage the development environment and production build. It is often convenient to install hatch with [pipx](https://pypa.github.io/pipx/installation/). +We use [poetry](https://python-poetry.org/docs/#installation) to manage the development environment and production build. It is often convenient to install it with [pipx](https://pypa.github.io/pipx/installation/). The commands from the [Makefile](./Makefile) will be listed with brief help on running `make` alone. If you know what to do you can enable your system's (darwin or linux only) [devShell](https://nixos.wiki/wiki/Flakes#Output_schema) declared in the [nix flake](./flake.nix) by toggling the comments in the [direnv config](./envrc). ### Run unit tests You can run all the tests with: ```bash -hatch run test +make test ``` ### Format the code @@ -206,7 +228,7 @@ hatch run test Execute the following command to apply linting and check typing: ```bash -hatch run lint +make lint ``` ### Publish a new version @@ -214,17 +236,19 @@ hatch run lint You can check the current version with: ```bash -hatch version +poetry version ``` -You can bump the version with commands such as `hatch version dev` or `patch`, `minor` or `major`. Or edit the `src/dnadiffusion/__about__.py` file. After changing the version, when you push to github, the Test Release workflow will automatically publish it on Test-PyPI and a github release will be created as a draft. +You can bump the version with commands such as `poetry version dev` or `patch`, `minor` or `major`. Or edit the [pyproject.toml](./pyproject.toml) file. After changing the version, when you push to github, the CD workflow will automatically publish it on Test-PyPI and a github release will be created as a draft. + +[↪](#contents) ## Serve the documentation You can serve the mkdocs documentation with: ```bash -hatch run docs-serve +make docs-serve ``` This will automatically watch for changes in your code. @@ -255,4 +279,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d -This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! \ No newline at end of file +This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! +You can access the original contributor list [here](https://docs.google.com/spreadsheets/d/1_nxDI6DIoWbyUDpIDX-tJIILejrJ0kEYrcXXdWlzPvU/edit#gid=1871728801). + +[↪](#contents) diff --git a/dockerfiles/Dockerfile b/containers/Dockerfile similarity index 100% rename from dockerfiles/Dockerfile rename to containers/Dockerfile diff --git a/containers/conda-manual.Dockerfile b/containers/conda-manual.Dockerfile new file mode 100644 index 00000000..6d6c9d3f --- /dev/null +++ b/containers/conda-manual.Dockerfile @@ -0,0 +1,18 @@ +FROM condaforge/mambaforge:23.1.0-4 + +ARG CONDA_OVERRIDE_CUDA=12.1 +ENV NVIDIA_VISIBLE_DEVICES=all +ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib64 + +WORKDIR /DNA-Diffusion +COPY . . + +RUN mamba env update -n base -f environments/conda/conda-lock.yml && \ + pip install --no-deps -e . + +# RUN conda-lock install \ +# --micromamba \ +# --platform=linux-64 \ +# -e workflows \ +# -n dnadiffusion \ +# environments/conda/conda-lock.yml diff --git a/containers/conda.Dockerfile b/containers/conda.Dockerfile new file mode 100644 index 00000000..fb5a8275 --- /dev/null +++ b/containers/conda.Dockerfile @@ -0,0 +1,48 @@ +FROM mambaorg/micromamba:1.5.3-jammy-cuda-12.3.0 + +USER root + +RUN apt-get update -yq && \ + apt-get install -yq --no-install-recommends \ + curl \ + git \ + && rm -rf /var/lib/apt/lists/* + +USER ${MAMBA_USER} +ENV HOME="/home/${MAMBA_USER}" +WORKDIR ${HOME} + +COPY --chown=${MAMBA_USER}:${MAMBA_USER} . ${HOME} + +RUN micromamba install \ + --yes \ + --channel=conda-forge \ + --name=base \ + condax + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 +ENV ENV_NAME=base +ENV PATH="${PATH}:${HOME}/.local/bin" + +# /opt/conda/bin/condax +RUN condax install \ + --channel=conda-forge \ + --link-conflict=overwrite \ + conda-lock + +# ${HOME}/.condax/conda-lock/bin/conda-lock +RUN conda-lock install \ + --micromamba \ + --name=dnadiffusion \ + --extras=workflows \ + environments/conda/conda-lock.yml + +ENV ENV_NAME=dnadiffusion +# If the environment is not activated, +# it is also possible to use `micromamba run` +# RUN micromamba run -n dnadiffusion \ +# pip install --no-deps -e . +RUN pip install --no-deps -e . + +ARG tag +ENV FLYTE_INTERNAL_IMAGE ${tag} diff --git a/containers/gpu.Dockerfile b/containers/gpu.Dockerfile new file mode 100644 index 00000000..b9d56cdd --- /dev/null +++ b/containers/gpu.Dockerfile @@ -0,0 +1,84 @@ +# Set args for Python and CUDA versions +ARG CUDA_VERSION=12.3.0 +# TODO: Cache can cause failure to set ENV from ARG. +# See hardcoded ENV below. +# ARG PYTHON_VERSION=3.10 + +# For GPU-enabled images, use nvidia/cuda as the base +FROM nvidia/cuda:${CUDA_VERSION}-runtime-ubuntu22.04 +# See Dockerfile sources at +# +# https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.3.0/ubuntu2204/base/Dockerfile +# https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.3.0/ubuntu2204/runtime/Dockerfile +# +# If GPUs are available and the image is not derived from nvidia/cuda the +# following ENVs are required: +# +# ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH} +# ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 +# ENV NVIDIA_VISIBLE_DEVICES=all +# ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility + +WORKDIR /root +ENV HOME /root +ENV LANG C.UTF-8 +ENV LC_ALL C.UTF-8 + +# Install pyenv +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update -yq \ + && apt-get install -yq \ + build-essential \ + curl \ + git \ + tree \ + make \ + libssl-dev \ + zlib1g-dev \ + libbz2-dev \ + libreadline-dev \ + libsqlite3-dev \ + libncursesw5-dev \ + xz-utils \ + tk-dev \ + libxml2-dev \ + libxmlsec1-dev \ + libffi-dev \ + liblzma-dev \ + swig \ + && rm -rf /var/lib/apt/lists/* + +RUN curl https://pyenv.run | bash + +ENV VENV /opt/venv +ENV PYTHONPATH /root +ENV PYENV_ROOT="/root/.pyenv" +ENV PATH="${PYENV_ROOT}/shims:${PYENV_ROOT}/bin:${PATH}" +# TODO: Cache can lead to failure to set ENV from ARG +# ENV PYTHON_VERSION=${PYTHON_VERSION} +ENV PYTHON_VERSION=3.10.13 + +RUN eval "$(pyenv init -)" && \ + /root/.pyenv/bin/pyenv install --skip-existing ${PYTHON_VERSION} && \ + /root/.pyenv/bin/pyenv global ${PYTHON_VERSION} + +# Setup venv for package installation +RUN python -m venv ${VENV} +ENV PATH="${VENV}/bin:$PATH" + +COPY pyproject.toml poetry.lock requirements.txt /root/ + +# requirements.txt auto-generated by poetry export +# see `make -n export_pip_requirements` +RUN pip install --upgrade pip && \ + pip install -r requirements.txt + +COPY . /root + +# development +RUN pip install --no-deps -e . +# distribution +# RUN pip install dnadiffusion==0.1.0 + +ARG tag +ENV FLYTE_INTERNAL_IMAGE $tag diff --git a/containers/pkg.Dockerfile b/containers/pkg.Dockerfile new file mode 100644 index 00000000..a2616626 --- /dev/null +++ b/containers/pkg.Dockerfile @@ -0,0 +1,41 @@ +FROM python:3.10.13-slim + +WORKDIR /root +ENV VENV /opt/venv +ENV LANG C.UTF-8 +ENV LC_ALL C.UTF-8 +ENV PYTHONPATH /root + +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update -q && \ + apt-get install -yq \ + build-essential \ + zlib1g-dev \ + swig \ + curl \ + git \ + make \ + tree \ + && rm -rf /var/lib/apt/lists/* + +ENV VENV /opt/venv + +RUN python3 -m venv ${VENV} +ENV PATH="${VENV}/bin:$PATH" + +COPY pyproject.toml poetry.lock requirements.txt /root/ + +# requirements.txt auto-generated by poetry export +# see `make -n export_pip_requirements` +RUN pip install --upgrade pip && \ + pip install -r requirements.txt + +COPY . /root + +# development +RUN pip install --no-deps -e . +# distribution +# RUN pip install dnadiffusion==0.1.0 + +ARG tag +ENV FLYTE_INTERNAL_IMAGE $tag diff --git a/devshell b/devshell new file mode 100755 index 00000000..b18e2860 --- /dev/null +++ b/devshell @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +nix shell nixpkgs#gnumake -c make czsh devshell diff --git a/dockerfiles/Dockerfile.conda b/dockerfiles/Dockerfile.conda deleted file mode 100644 index 4e47b990..00000000 --- a/dockerfiles/Dockerfile.conda +++ /dev/null @@ -1,11 +0,0 @@ -FROM condaforge/mambaforge:23.1.0-4 - -ARG CONDA_OVERRIDE_CUDA=12.1 -ENV NVIDIA_VISIBLE_DEVICES=all -ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib64 - -WORKDIR /DNA-Diffusion -COPY . . - -RUN mamba env update -n base -f environments/conda/environment.yml && \ - pip install --no-deps -e . diff --git a/docs/images/diff_static.png b/docs/images/diff_static.png new file mode 100644 index 00000000..bfddb914 Binary files /dev/null and b/docs/images/diff_static.png differ diff --git a/docs/specification.md b/docs/specification.md index b0728468..2061a0b3 100644 --- a/docs/specification.md +++ b/docs/specification.md @@ -191,7 +191,7 @@ DNA-Diffusion ├─ .gitignore ├─ CITATION.cff ├─ CODE_OF_CONDUCT.md -├─ dockerfiles +├─ containerfiles │ └─ Dockerfile ├─ docs │ ├─ contributors.md diff --git a/environments/conda/conda-linux-64.lock.yml b/environments/conda/conda-linux-64.lock.yml new file mode 100644 index 00000000..9e896036 --- /dev/null +++ b/environments/conda/conda-linux-64.lock.yml @@ -0,0 +1,370 @@ +# Generated by conda-lock. +# platform: linux-64 +# input_hash: 3994540e7daf257a06a306658222c321a21711251d7e29bdde0fc13fe96f3ee5 + +channels: + - pytorch + - nvidia + - conda-forge + - bioconda +dependencies: + - _libgcc_mutex=0.1=conda_forge + - ca-certificates=2023.11.17=hbcca054_0 + - cuda-cudart=12.1.105=0 + - cuda-cupti=12.1.105=0 + - cuda-nvrtc=12.1.105=0 + - cuda-nvtx=12.1.105=0 + - cuda-opencl=12.3.101=0 + - ld_impl_linux-64=2.40=h41732ed_0 + - libcublas=12.1.0.26=0 + - libcufft=11.0.2.4=0 + - libcufile=1.8.1.2=0 + - libcurand=10.3.4.101=0 + - libcusolver=11.4.4.55=0 + - libcusparse=12.0.2.55=0 + - libnpp=12.0.2.50=0 + - libnvjitlink=12.1.105=0 + - libnvjpeg=12.1.1.14=0 + - libstdcxx-ng=13.2.0=h7e041cc_3 + - python_abi=3.10=4_cp310 + - pytorch-mutex=1.0=cuda + - tzdata=2023c=h71feb2d_0 + - cuda-libraries=12.1.0=0 + - cuda-runtime=12.1.0=0 + - pytorch-cuda=12.1=ha16c6d3_5 + - _openmp_mutex=4.5=2_kmp_llvm + - libgcc-ng=13.2.0=h807b86a_3 + - bzip2=1.0.8=hd590300_5 + - c-ares=1.23.0=hd590300_0 + - gmp=6.3.0=h59595ed_0 + - icu=73.2=h59595ed_0 + - keyutils=1.6.1=h166bdaf_0 + - lame=3.100=h166bdaf_1003 + - lerc=4.0.0=h27087fc_0 + - libbrotlicommon=1.1.0=hd590300_1 + - libdeflate=1.18=h0b41bf4_0 + - libev=4.33=hd590300_2 + - libffi=3.4.2=h7f98852_5 + - libgfortran5=13.2.0=ha4646dd_3 + - libiconv=1.17=hd590300_1 + - libjpeg-turbo=2.1.5.1=hd590300_1 + - libnsl=2.0.1=hd590300_0 + - libsodium=1.0.18=h36c2ea0_1 + - libuuid=2.38.1=h0b41bf4_0 + - libwebp-base=1.3.2=hd590300_0 + - libzlib=1.2.13=hd590300_5 + - ncurses=6.4=h59595ed_2 + - nettle=3.6=he412f7d_0 + - openssl=3.2.0=hd590300_1 + - pthread-stubs=0.4=h36c2ea0_1001 + - xorg-libxau=1.0.11=hd590300_0 + - xorg-libxdmcp=1.1.3=h7f98852_0 + - xz=5.2.6=h166bdaf_0 + - yaml=0.2.5=h7f98852_2 + - gnutls=3.6.13=h85f3911_1 + - libbrotlidec=1.1.0=hd590300_1 + - libbrotlienc=1.1.0=hd590300_1 + - libedit=3.1.20191231=he28a2e2_2 + - libgfortran-ng=13.2.0=h69a702a_3 + - libnghttp2=1.58.0=h47da74e_1 + - libpng=1.6.39=h753d276_0 + - libsqlite=3.44.2=h2797004_0 + - libssh2=1.11.0=h0841786_0 + - libxcb=1.15=h0b41bf4_0 + - libxml2=2.11.6=h232c23b_0 + - llvm-openmp=15.0.7=h0cdce71_0 + - mpfr=4.2.1=h9458935_0 + - readline=8.2=h8228510_1 + - tk=8.6.13=noxft_h4845f30_101 + - zeromq=4.3.5=h59595ed_0 + - zlib=1.2.13=hd590300_5 + - zstd=1.5.5=hfc55251_0 + - brotli-bin=1.1.0=hd590300_1 + - freetype=2.12.1=h267a509_2 + - krb5=1.21.2=h659d440_0 + - libhwloc=2.9.3=default_h554bfaf_1009 + - libtiff=4.6.0=h8b53f26_0 + - mpc=1.3.1=hfe3b2da_0 + - openh264=2.1.1=h780b84a_0 + - python=3.10.13=hd12c33a_0_cpython + - attrs=23.1.0=pyh71513ae_1 + - bcrypt=4.1.1=py310hcb5633a_0 + - bitstring=3.1.9=pyhd8ed1ab_0 + - brotli=1.1.0=hd590300_1 + - brotli-python=1.1.0=py310hc6cd4ac_1 + - cached_property=1.5.2=pyha770c72_1 + - cachetools=4.2.4=pyhd8ed1ab_0 + - certifi=2023.11.17=pyhd8ed1ab_0 + - charset-normalizer=3.3.2=pyhd8ed1ab_0 + - click=8.1.7=unix_pyh707e725_0 + - cycler=0.12.1=pyhd8ed1ab_0 + - debugpy=1.8.0=py310hc6cd4ac_1 + - decorator=5.1.1=pyhd8ed1ab_0 + - defusedxml=0.7.1=pyhd8ed1ab_0 + - einops=0.7.0=pyhd8ed1ab_1 + - entrypoints=0.4=pyhd8ed1ab_0 + - exceptiongroup=1.2.0=pyhd8ed1ab_0 + - executing=2.0.1=pyhd8ed1ab_0 + - ffmpeg=4.3=hf484d3e_0 + - filelock=3.13.1=pyhd8ed1ab_0 + - fsspec=2023.9.2=pyh1a96a4e_0 + - gmpy2=2.1.2=py310h3ec546c_1 + - idna=3.6=pyhd8ed1ab_0 + - json5=0.9.14=pyhd8ed1ab_0 + - jsonpointer=2.4=py310hff52083_3 + - kiwisolver=1.4.5=py310hd41b1e2_1 + - lcms2=2.15=h7f713cb_2 + - libcurl=8.5.0=hca28451_0 + - markupsafe=2.1.3=py310h2372a71_1 + - mdurl=0.1.0=pyhd8ed1ab_0 + - mistune=3.0.2=pyhd8ed1ab_0 + - mpmath=1.3.0=pyhd8ed1ab_0 + - munkres=1.1.4=pyh9f0ad1d_0 + - nest-asyncio=1.5.8=pyhd8ed1ab_0 + - networkx=3.2.1=pyhd8ed1ab_0 + - openjpeg=2.5.0=h488ebb8_3 + - packaging=23.2=pyhd8ed1ab_0 + - pandocfilters=1.5.0=pyhd8ed1ab_0 + - parso=0.8.3=pyhd8ed1ab_0 + - pickleshare=0.7.5=py_1003 + - pkgutil-resolve-name=1.3.10=pyhd8ed1ab_1 + - platformdirs=4.1.0=pyhd8ed1ab_0 + - prometheus_client=0.19.0=pyhd8ed1ab_0 + - psutil=5.9.5=py310h2372a71_1 + - ptyprocess=0.7.0=pyhd3deb0d_0 + - pure_eval=0.2.2=pyhd8ed1ab_0 + - pycparser=2.21=pyhd8ed1ab_0 + - pygments=2.17.2=pyhd8ed1ab_0 + - pyparsing=3.1.1=pyhd8ed1ab_0 + - pyperclip=1.8.2=pyhd8ed1ab_2 + - pysocks=1.7.1=pyha2e5f31_6 + - python-dotenv=1.0.0=pyhd8ed1ab_1 + - python-fastjsonschema=2.19.0=pyhd8ed1ab_0 + - python-json-logger=2.0.7=pyhd8ed1ab_0 + - python-tzdata=2023.3=pyhd8ed1ab_0 + - pytz=2023.3.post1=pyhd8ed1ab_0 + - pywin32-on-windows=0.1.0=pyh1179c8e_3 + - pyyaml=6.0.1=py310h2372a71_1 + - pyzmq=25.1.2=py310h795f18f_0 + - rfc3986-validator=0.1.1=pyh9f0ad1d_0 + - rpds-py=0.13.2=py310hcb5633a_0 + - send2trash=1.8.2=pyh41d4057_0 + - setuptools=68.2.2=pyhd8ed1ab_0 + - six=1.16.0=pyh6c4a22f_0 + - sniffio=1.3.0=pyhd8ed1ab_0 + - soupsieve=2.5=pyhd8ed1ab_1 + - tbb=2021.11.0=h00ab1b0_0 + - threadpoolctl=3.2.0=pyha21a80b_0 + - tomli=2.0.1=pyhd8ed1ab_0 + - tornado=6.3.3=py310h2372a71_1 + - traitlets=5.14.0=pyhd8ed1ab_0 + - types-python-dateutil=2.8.19.14=pyhd8ed1ab_0 + - typing_extensions=4.9.0=pyha770c72_0 + - typing_utils=0.1.0=pyhd8ed1ab_0 + - unicodedata2=15.1.0=py310h2372a71_0 + - uri-template=1.3.0=pyhd8ed1ab_0 + - wcwidth=0.2.12=pyhd8ed1ab_0 + - webcolors=1.13=pyhd8ed1ab_0 + - webencodings=0.5.1=pyhd8ed1ab_2 + - websocket-client=1.7.0=pyhd8ed1ab_0 + - wheel=0.42.0=pyhd8ed1ab_0 + - zipp=3.17.0=pyhd8ed1ab_0 + - anyio=4.1.0=pyhd8ed1ab_0 + - asttokens=2.4.1=pyhd8ed1ab_0 + - async-lru=2.0.4=pyhd8ed1ab_0 + - babel=2.14.0=pyhd8ed1ab_0 + - beautifulsoup4=4.12.2=pyha770c72_0 + - bleach=6.1.0=pyhd8ed1ab_0 + - cached-property=1.5.2=hd8ed1ab_1 + - cffi=1.16.0=py310h2fee648_0 + - comm=0.1.4=pyhd8ed1ab_0 + - deprecation=2.1.0=pyh9f0ad1d_0 + - fonttools=4.46.0=py310h2372a71_0 + - importlib-metadata=7.0.0=pyha770c72_0 + - importlib_resources=6.1.1=pyhd8ed1ab_0 + - jedi=0.19.1=pyhd8ed1ab_0 + - jinja2=3.1.2=pyhd8ed1ab_1 + - joblib=1.3.2=pyhd8ed1ab_0 + - jupyter_core=5.5.0=py310hff52083_0 + - jupyterlab_pygments=0.3.0=pyhd8ed1ab_0 + - markdown-it-py=3.0.0=pyhd8ed1ab_0 + - matplotlib-inline=0.1.6=pyhd8ed1ab_0 + - mkl=2022.2.1=h84fe81f_16997 + - overrides=7.4.0=pyhd8ed1ab_0 + - pexpect=4.8.0=pyh1a96a4e_2 + - pillow=10.0.1=py310h29da1c1_1 + - pip=23.3.1=pyhd8ed1ab_0 + - prompt-toolkit=3.0.42=pyha770c72_0 + - pysam=0.22.0=py310h41dec4a_0 + - python-dateutil=2.8.2=pyhd8ed1ab_0 + - referencing=0.32.0=pyhd8ed1ab_0 + - rfc3339-validator=0.1.4=pyhd8ed1ab_0 + - screed=1.1.3=pyhd8ed1ab_0 + - sympy=1.12=pypyh9d50eac_103 + - terminado=0.18.0=pyh0d859eb_0 + - tinycss2=1.2.1=pyhd8ed1ab_0 + - typing-extensions=4.9.0=hd8ed1ab_0 + - urllib3=2.1.0=pyhd8ed1ab_0 + - argon2-cffi-bindings=21.2.0=py310h2372a71_4 + - arrow=1.3.0=pyhd8ed1ab_0 + - blas=1.0=mkl + - cryptography=41.0.7=py310hb8475ec_1 + - fqdn=1.5.1=pyhd8ed1ab_0 + - importlib_metadata=7.0.0=hd8ed1ab_0 + - jsonschema-specifications=2023.11.2=pyhd8ed1ab_0 + - jupyter_server_terminals=0.5.0=pyhd8ed1ab_0 + - libblas=3.9.0=16_linux64_mkl + - mashumaro=3.11=pyhd8ed1ab_0 + - pynacl=1.5.0=py310h2372a71_3 + - requests=2.31.0=pyhd8ed1ab_0 + - rich=13.6.0=pyhd8ed1ab_0 + - stack_data=0.6.2=pyhd8ed1ab_0 + - argon2-cffi=23.1.0=pyhd8ed1ab_0 + - ipython=8.18.1=pyh707e725_3 + - isoduration=20.11.0=pyhd8ed1ab_0 + - jsonschema=4.20.0=pyhd8ed1ab_0 + - jupyter_client=8.6.0=pyhd8ed1ab_0 + - libcblas=3.9.0=16_linux64_mkl + - liblapack=3.9.0=16_linux64_mkl + - paramiko=3.3.1=pyhd8ed1ab_0 + - pooch=1.8.0=pyhd8ed1ab_0 + - ipykernel=6.26.0=pyhf8b6a83_0 + - jsonschema-with-format-nongpl=4.20.0=pyhd8ed1ab_0 + - nbformat=5.9.2=pyhd8ed1ab_0 + - numpy=1.26.2=py310hb13e2d6_0 + - plumbum=1.8.2=pyhd8ed1ab_0 + - contourpy=1.2.0=py310hd41b1e2_0 + - jupyter_events=0.9.0=pyhd8ed1ab_0 + - nbclient=0.8.0=pyhd8ed1ab_0 + - pandas=2.1.3=py310hcc13569_0 + - scipy=1.10.1=py310ha4c1d20_3 + - matplotlib-base=3.8.1=py310h62c0568_0 + - nbconvert-core=7.12.0=pyhd8ed1ab_0 + - scikit-learn=1.3.2=py310h1fdf081_2 + - jupyter_server=2.12.1=pyhd8ed1ab_0 + - seaborn-base=0.13.0=pyhd8ed1ab_0 + - sourmash-minimal=4.8.4=py310hcb5633a_1 + - jupyter-lsp=2.2.1=pyhd8ed1ab_0 + - jupyterlab_server=2.25.2=pyhd8ed1ab_0 + - notebook-shim=0.2.3=pyhd8ed1ab_0 + - jupyterlab=4.0.9=pyhd8ed1ab_0 + - accelerate=0.24.1=pyhd8ed1ab_0 + - pytorch=2.1.0=py3.10_cuda12.1_cudnn8.9.2_0 + - torchtriton=2.1.0=py310 + - torchvision=0.16.0=py310_cu121 + - pip: + - aioitertools === 0.11.0 --hash=sha256:04b95e3dab25b449def24d7df809411c10e62aab0cbe31a50ca4e68748c43394 + - antlr4-python3-runtime === 4.9.3 --hash=sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b + - appdirs === 1.4.4 --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 + - async-timeout === 4.0.3 --hash=sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028 + - cachetools === 5.3.2 --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 + - chardet === 5.2.0 --hash=sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970 + - cloudpickle === 3.0.0 --hash=sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7 + - colorama === 0.4.6 --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + - configparser === 6.0.0 --hash=sha256:900ea2bb01b2540b1a644ad3d5351e9b961a4a012d4732f619375fb8f641ee19 + - diskcache === 5.6.3 --hash=sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19 + - docstring-parser === 0.15 --hash=sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9 + - frozenlist === 1.4.0 --hash=sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300 + - google-crc32c === 1.5.0 --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 + - grpcio === 1.59.3 --hash=sha256:60cddafb70f9a2c81ba251b53b4007e07cca7389e704f86266e22c4bffd8bf1d + - iteround === 1.0.4 --hash=sha256:17947dd5479177e6fb186b0a3d5d594b55eedea14dc722c6da7e84bbed45f5b2 + - jeepney === 0.8.0 --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + - jmespath === 1.0.1 --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 + - jsonpickle === 3.0.2 --hash=sha256:4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f + - llvmlite === 0.41.1 --hash=sha256:0dd0338da625346538f1173a17cabf21d1e315cf387ca21b294ff209d176e244 + - loguru === 0.7.2 --hash=sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb + - more-itertools === 10.1.0 --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 + - multidict === 6.0.4 --hash=sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93 + - mypy-extensions === 1.0.0 --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d + - nose === 1.3.7 --hash=sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac + - oauthlib === 3.2.2 --hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca + - palettable === 3.3.3 --hash=sha256:74e9e7d7fe5a9be065e02397558ed1777b2df0b793a6f4ce1a5ee74f74fb0caa + - portalocker === 2.8.2 --hash=sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e + - protobuf === 4.24.4 --hash=sha256:b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9 + - pyasn1 === 0.5.1 --hash=sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58 + - pybigwig === 0.3.22 --hash=sha256:55031f67de6b117d49ba191738ea9707239bdacbd623a046e03917913257ac29 + - pytimeparse === 1.1.8 --hash=sha256:04b7be6cc8bd9f5647a6325444926c3ac34ee6bc7e69da4367ba282f076036bd + - setproctitle === 1.3.3 --hash=sha256:fc74e84fdfa96821580fb5e9c0b0777c1c4779434ce16d3d62a9c4d8c710df39 + - smmap === 5.0.1 --hash=sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da + - statsd === 3.3.0 --hash=sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa + - text-unidecode === 1.3 --hash=sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8 + - tqdm === 4.66.1 --hash=sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386 + - urllib3 === 1.26.18 --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 + - wrapt === 1.16.0 --hash=sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf + - xdg === 6.0.0 --hash=sha256:df3510755b4395157fc04fc3b02467c777f3b3ca383257397f09ab0d4c16f936 + - xxhash === 3.4.1 --hash=sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b + - aiosignal === 1.3.1 --hash=sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17 + - azure-core === 1.29.5 --hash=sha256:0fa04b7b1f7d44a4fb8468c4093deb2ea01fdf4faddbf802ed9205615f99d68c + - binaryornot === 0.4.4 --hash=sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4 + - biopython === 1.81 --hash=sha256:6ebfbce0d91796c7aef422ee9dffe8827e07e5abaa94545e006f1f20e965c80b + - biothings-client === 0.3.1 --hash=sha256:c08437f652d9282da785e098288ef7cf3aa2a79f5d90c480eadfce96b846013e + - botocore === 1.31.17 --hash=sha256:6ac34a1d34aa3750e78b77b8596617e2bab938964694d651939dba2cbde2c12b + - croniter === 2.0.1 --hash=sha256:4cb064ce2d8f695b3b078be36ff50115cf8ac306c10a7e8653ee2a5b534673d7 + - docker === 6.1.3 --hash=sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9 + - docker-pycreds === 0.4.0 --hash=sha256:7266112468627868005106ec19cd0d722702d2b7d5912a28e19b826c3d37af49 + - gitdb === 4.0.11 --hash=sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4 + - google-resumable-media === 2.6.0 --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b + - googleapis-common-protos === 1.61.0 --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 + - htseq === 2.0.5 --hash=sha256:a0441e9f5cc89828c71d9ecb2c3e8e653b0e9ec967c43958103891dafc4d2df0 + - isodate === 0.6.1 --hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 + - jaraco.classes === 3.3.0 --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb + - marshmallow === 3.20.1 --hash=sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c + - memory-efficient-attention-pytorch === 0.1.6 --hash=sha256:efbb2676f8695b21a29d96d83f84818be257a35ac4c89f94d7d93f59819d38ed + - mysql-connector-python === 8.0.23 --hash=sha256:c783e1dc8b78a1b1a9ebbf3ccb12d17e4513d91fafeb5b6c06a29f2d5619e285 + - norns === 0.1.6 --hash=sha256:1f3c6ccbe79b2cb3076f66a352cd76462593adbabe9ebb262f879a9d0a6634e4 + - numba === 0.58.1 --hash=sha256:4e79b6cc0d2bf064a955934a2e02bf676bc7995ab2db929dbbc62e4c16551be6 + - omegaconf === 2.3.0 --hash=sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b + - pandas === 1.5.3 --hash=sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23 + - patsy === 0.5.4 --hash=sha256:0486413077a527db51ddea8fa94a5234d0feb17a4f4dc01b59b6086c58a70f80 + - protoc-gen-swagger === 0.1.0 --hash=sha256:cdc043da538865f055a7f22b304a35085cef269dc33e2f3408b12d397e8d8b4b + - pyarrow === 14.0.1 --hash=sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93 + - pyasn1-modules === 0.3.0 --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d + - pybedtools @ git+https://github.com/cameronraysmith/pybedtools@9876fa25e80c7547101e662ebe1c6388579405d5 + - pyfaidx === 0.7.2.2 --hash=sha256:4e689bc09f3c5de1d2a1099d059b3b9914629c1c5c3ad08b49ff05af33392e0e + - pyjwt === 2.8.0 --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 + - python-slugify === 8.0.1 --hash=sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395 + - requests-oauthlib === 1.3.1 --hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 + - rich-click === 1.7.2 --hash=sha256:a42bcdcb8696c4ca7a3b1a39e1aba3d2cb64ad00690b4c022fdcb2cbccebc3fc + - rsa === 4.9 --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 + - secretstorage === 3.3.3 --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + - sentry-sdk === 1.38.0 --hash=sha256:0017fa73b8ae2d4e57fd2522ee3df30453715b29d2692142793ec5d5f90b94a6 + - typing-inspect === 0.9.0 --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f + - yarl === 1.9.3 --hash=sha256:c5f3faeb8100a43adf3e7925d556801d14b5816a0ac9e75e22948e787feec642 + - aiohttp === 3.9.1 --hash=sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f + - azure-storage-blob === 12.19.0 --hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b + - biofluff === 3.0.4 --hash=sha256:ef7b0a54103a830f197f21aa3d1ade8bdcddf613b437ea38c95260bb45324d6b + - cookiecutter === 2.5.0 --hash=sha256:8aa2f12ed11bc05628651e9dc4353a10571dd9908aaaaeec959a2b9ea465a5d2 + - feather-format === 0.4.1 --hash=sha256:45f67e3745d394d4f160ca6d636bbfd4f8b68d01199dc1649b6e487d3e878903 + - flyteidl === 1.10.0 --hash=sha256:21d9fcc21217e95fd940964e704cb11e74ce707d625dd954a54e5e7c39143db1 + - gitpython === 3.1.40 --hash=sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a + - google-auth === 2.24.0 --hash=sha256:9b82d5c8d3479a5391ea0a46d81cca698d328459da31d4a459d4e901a5d927e0 + - grpcio-status === 1.59.3 --hash=sha256:2fd2eb39ca4e9afb3c874c0878ff75b258db0b7dcc25570fc521f16ae0ab942a + - gtfparse === 1.3.0 --hash=sha256:d957f18e5f70413f89a28ef83068c461b6407eb38fd30e99b8da3d69143527b1 + - hydra-core === 1.3.2 --hash=sha256:fa0238a9e31df3373b35b0bfb672c34cc92718d21f81311d8996a16de1141d8b + - keyring === 24.3.0 --hash=sha256:4446d35d636e6a10b8bce7caa66913dd9eca5fd222ca03a3d42c38608ac30836 + - logomaker === 0.8 --hash=sha256:6766a0d83de4990ea859366a661ba72c580a7b73ac3c8b526204a0be7d65a50d + - marshmallow-enum === 1.5.1 --hash=sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072 + - marshmallow-jsonschema === 0.13.0 --hash=sha256:2814f2afb94a6e01b3c0a5795b3dfb142b628763655f20378400af5c0a2307fb + - msal === 1.25.0 --hash=sha256:386df621becb506bc315a713ec3d4d5b5d6163116955c7dde23622f156b81af6 + - mygene === 3.2.2 --hash=sha256:18d85d1b28ecee2be31d844607fb0c5f7d7c58573278432df819ee2a5e88fe46 + - qnorm === 0.8.1 --hash=sha256:9d6ce4e82444155922baf06aa89f9f939b54f53844e340bf2c6d9e7ff8821c41 + - statsmodels === 0.14.0 --hash=sha256:9c64ebe9cf376cba0c31aed138e15ed179a1d128612dd241cdf299d159e5e882 + - aiobotocore === 2.5.4 --hash=sha256:4b32218728ca3d0be83835b604603a0cd6c329066e884bb78149334267f92440 + - azure-datalake-store === 0.0.53 --hash=sha256:a30c902a6e360aa47d7f69f086b426729784e71c536f330b691647a51dc42b2b + - dataclasses-json === 0.5.9 --hash=sha256:1280542631df1c375b7bc92e5b86d39e06c44760d7e3571a537b3b8acabf2f0c + - genomepy === 0.16.1 --hash=sha256:820d46bce1503f66aa82e795a9a33e53a89e4d4f3f79b5c105ae452164f47635 + - google-api-core === 2.14.0 --hash=sha256:de2fb50ed34d47ddbb2bd2dcf680ee8fead46279f4ed6b16de362aca23a18952 + - google-auth-oauthlib === 1.1.0 --hash=sha256:089c6e587d36f4803ac7e0720c045c6a8b1fd1790088b8424975b90d0ee61c12 + - hydra-joblib-launcher === 1.2.0 --hash=sha256:57bfd042b015056157297de93e8ec1c6bc75fd39bd3b300e1599db0c5d992eee + - hydra-zen === 0.11.0 --hash=sha256:a99a87ec5ae758f57d43c35ae20e172d849e67c0b916c2f91daa4f90b52dc6e5 + - kubernetes === 28.1.0 --hash=sha256:10f56f8160dcb73647f15fafda268e7f60cf7dbc9f8e46d52fcd46d3beb0c18d + - msal-extensions === 1.0.0 --hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee + - wandb === 0.16.0 --hash=sha256:e103142a5ecdb158d29441c2bf9f935ae149ed562377f7cebffd2a6f7c9de949 + - azure-identity === 1.15.0 --hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912 + - gimmemotifs @ git+https://github.com/cameronraysmith/gimmemotifs@74512747f54102500a248d56db5b469fc75961bc + - google-cloud-core === 2.3.3 --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 + - s3fs === 2023.9.2 --hash=sha256:d0e0ad0267820f4e9ff16556e004e6759010e92378aebe2ac5d71419a6ff5387 + - adlfs === 2023.10.0 --hash=sha256:dfdc8cc782bd78262435fb1bc2a8cfdbdd80342bb1b1ae9dfff968de912b0b09 + - google-cloud-storage === 2.13.0 --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d + - gcsfs === 2023.9.2 --hash=sha256:b3e61d07b0ecf3e04627b0cc0df30ee728bc49e31d42de180815601041e62c1b + - flytekit @ git+https://github.com/cameronraysmith/flytekit@9c8481ec30ad13707c1ebe8e13c9cb6962e75276 diff --git a/environments/conda/conda-lock.yml b/environments/conda/conda-lock.yml new file mode 100644 index 00000000..245a3465 --- /dev/null +++ b/environments/conda/conda-lock.yml @@ -0,0 +1,4858 @@ +# This lock file was generated by conda-lock (https://github.com/conda/conda-lock). DO NOT EDIT! +# +# A "lock file" contains a concrete list of package versions (with checksums) to be installed. Unlike +# e.g. `conda env create`, the resulting environment will not change as new package versions become +# available, unless you explicitly update the lock file. +# +# Install this environment as "YOURENV" with: +# conda-lock install -n YOURENV --file conda-lock.yml +# This lock contains optional dependency categories bioinformatics, workflows. Include them in the installed environment with: +# conda-lock install -e bioinformatics -e workflows -n YOURENV --file conda-lock.yml +# To update a single package to the latest version compatible with the version constraints in the source: +# conda-lock lock --lockfile conda-lock.yml --update PACKAGE +# To re-solve the entire environment, e.g. after changing a version constraint in the source file: +# conda-lock -f ../../pyproject.toml --lockfile conda-lock.yml +version: 1 +metadata: + content_hash: + linux-64: 3994540e7daf257a06a306658222c321a21711251d7e29bdde0fc13fe96f3ee5 + channels: + - url: pytorch + used_env_vars: [] + - url: nvidia + used_env_vars: [] + - url: conda-forge + used_env_vars: [] + - url: bioconda + used_env_vars: [] + platforms: + - linux-64 + sources: + - ../../pyproject.toml +package: +- name: _libgcc_mutex + version: '0.1' + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 + hash: + md5: d7c89558ba9fa0495403155b64376d81 + sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 + category: main + optional: false +- name: _openmp_mutex + version: '4.5' + manager: conda + platform: linux-64 + dependencies: + _libgcc_mutex: '0.1' + llvm-openmp: '>=9.0.1' + url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_kmp_llvm.tar.bz2 + hash: + md5: 562b26ba2e19059551a811e72ab7f793 + sha256: 84a66275da3a66e3f3e70e9d8f10496d807d01a9e4ec16cd2274cc5e28c478fc + category: main + optional: false +- name: accelerate + version: 0.24.1 + manager: conda + platform: linux-64 + dependencies: + numpy: '>=1.17' + packaging: '>=20.0' + psutil: '' + python: '>=3.8.0' + pytorch: '>=1.10.0' + pyyaml: '' + url: https://conda.anaconda.org/conda-forge/noarch/accelerate-0.24.1-pyhd8ed1ab_0.conda + hash: + md5: f145fda3d83dd29c8cdb5966bea45c56 + sha256: a897315648a9eeafaff6ca43d928c93801abbee2c3542ec17934a21ff055c1a4 + category: main + optional: false +- name: anyio + version: 4.1.0 + manager: conda + platform: linux-64 + dependencies: + exceptiongroup: '>=1.0.2' + idna: '>=2.8' + python: '>=3.8' + sniffio: '>=1.1' + url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.1.0-pyhd8ed1ab_0.conda + hash: + md5: 76a3b574717769c4c937c2afa2f1069f + sha256: d9d64b29d8a4f58a8d5f9cb0af80b70fd4e038a6e328b039899f7cd93863a82e + category: main + optional: false +- name: argon2-cffi + version: 23.1.0 + manager: conda + platform: linux-64 + dependencies: + argon2-cffi-bindings: '' + python: '>=3.7' + typing-extensions: '' + url: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda + hash: + md5: 3afef1f55a1366b4d3b6a0d92e2235e4 + sha256: 130766446f5507bd44df957b6b5c898a8bd98f024bb426ed6cb9ff1ad67fc677 + category: main + optional: false +- name: argon2-cffi-bindings + version: 21.2.0 + manager: conda + platform: linux-64 + dependencies: + cffi: '>=1.0.1' + libgcc-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py310h2372a71_4.conda + hash: + md5: 68ee85860502d53c8cbfa0e4cef0f6cb + sha256: af94cc9b4dcaa164e1cc7e7fa0b9eb56b87ea3dc6e093c8ef6c31cfa02d9ffdf + category: main + optional: false +- name: arrow + version: 1.3.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + python-dateutil: '>=2.7.0' + types-python-dateutil: '>=2.8.10' + url: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda + hash: + md5: b77d8c2313158e6e461ca0efb1c2c508 + sha256: ff49825c7f9e29e09afa6284300810e7a8640d621740efb47c4541f4dc4969db + category: main + optional: false +- name: asttokens + version: 2.4.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.5' + six: '>=1.12.0' + url: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda + hash: + md5: 5f25798dcefd8252ce5f9dc494d5f571 + sha256: 708168f026df19a0344983754d27d1f7b28bb21afc7b97a82f02c4798a3d2111 + category: main + optional: false +- name: async-lru + version: 2.0.4 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + typing_extensions: '>=4.0.0' + url: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda + hash: + md5: 3d081de3a6ea9f894bbb585e8e3a4dcb + sha256: 7ed83731979fe5b046c157730e50af0e24454468bbba1ed8fc1a3107db5d7518 + category: main + optional: false +- name: attrs + version: 23.1.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/attrs-23.1.0-pyh71513ae_1.conda + hash: + md5: 3edfead7cedd1ab4400a6c588f3e75f8 + sha256: 063639cd568f5c7a557b0fb1cc27f098598c0d8ff869088bfeb82934674f8821 + category: main + optional: false +- name: babel + version: 2.14.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + pytz: '' + setuptools: '' + url: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda + hash: + md5: 9669586875baeced8fc30c0826c3270e + sha256: 8584e3da58e92b72641c89ff9b98c51f0d5dbe76e527867804cbdf03ac91d8e6 + category: main + optional: false +- name: bcrypt + version: 4.1.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/bcrypt-4.1.1-py310hcb5633a_0.conda + hash: + md5: e6792675103087f822f9705a22e4845d + sha256: 070fdd2e887a0b898764bfd6aa603365cf79dda30b1161ae06c607d510920f7d + category: workflows + optional: true +- name: beautifulsoup4 + version: 4.12.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + soupsieve: '>=1.2' + url: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda + hash: + md5: a362ff7d976217f8fa78c0f1c4f59717 + sha256: 52d3e6bcd442537e22699cd227d8fdcfd54b708eeb8ee5b4c671a6a9b9cd74da + category: main + optional: false +- name: bitstring + version: 3.1.9 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.2' + url: https://conda.anaconda.org/conda-forge/noarch/bitstring-3.1.9-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 94a9bc42263c78cd2ee8868d0b5986a0 + sha256: 14b3b51d20047c5d264da3280f1e686164f3af646661c8db0a4f6decb6709de7 + category: bioinformatics + optional: true +- name: blas + version: '1.0' + manager: conda + platform: linux-64 + dependencies: + mkl: '' + url: https://conda.anaconda.org/conda-forge/linux-64/blas-1.0-mkl.tar.bz2 + hash: + md5: 349aef876b1d8c9dccae01de20d5b385 + sha256: a9a9125029a66905fc9e932dfd4f595be3a59a30db37fd7bf4a675a5c6151d62 + category: main + optional: false +- name: bleach + version: 6.1.0 + manager: conda + platform: linux-64 + dependencies: + packaging: '' + python: '>=3.6' + setuptools: '' + six: '>=1.9.0' + webencodings: '' + url: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda + hash: + md5: 0ed9d7c0e9afa7c025807a9a8136ea3e + sha256: 845e77ef495376c5c3c328ccfd746ca0ef1978150cae8eae61a300fe7755fb08 + category: main + optional: false +- name: brotli + version: 1.1.0 + manager: conda + platform: linux-64 + dependencies: + brotli-bin: 1.1.0 + libbrotlidec: 1.1.0 + libbrotlienc: 1.1.0 + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda + hash: + md5: f27a24d46e3ea7b70a1f98e50c62508f + sha256: f2d918d351edd06c55a6c2d84b488fe392f85ea018ff227daac07db22b408f6b + category: main + optional: false +- name: brotli-bin + version: 1.1.0 + manager: conda + platform: linux-64 + dependencies: + libbrotlidec: 1.1.0 + libbrotlienc: 1.1.0 + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda + hash: + md5: 39f910d205726805a958da408ca194ba + sha256: a641abfbaec54f454c8434061fffa7fdaa9c695e8a5a400ed96b4f07c0c00677 + category: main + optional: false +- name: brotli-python + version: 1.1.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hc6cd4ac_1.conda + hash: + md5: 1f95722c94f00b69af69a066c7433714 + sha256: e22268d81905338570786921b3def88e55f9ed6d0ccdd17d9fbae31a02fbef69 + category: main + optional: false +- name: bzip2 + version: 1.0.8 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda + hash: + md5: 69b8b6202a07720f448be700e300ccf4 + sha256: 242c0c324507ee172c0e0dd2045814e746bb303d1eb78870d182ceb0abc726a8 + category: main + optional: false +- name: c-ares + version: 1.23.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.23.0-hd590300_0.conda + hash: + md5: d459949bc10f64dee1595c176c2e6291 + sha256: 6b0eee827bade11c2964a05867499a50ad2a9d1b14dfe18fb867a3bc9357f56f + category: bioinformatics + optional: true +- name: ca-certificates + version: 2023.11.17 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda + hash: + md5: 01ffc8d36f9eba0ce0b3c1955fa780ee + sha256: fb4b9f4b7d885002db0b93e22f44b5b03791ef3d4efdc9d0662185a0faafd6b6 + category: main + optional: false +- name: cached-property + version: 1.5.2 + manager: conda + platform: linux-64 + dependencies: + cached_property: '>=1.5.2,<1.5.3.0a0' + url: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 + hash: + md5: 9b347a7ec10940d3f7941ff6c460b551 + sha256: 561e6660f26c35d137ee150187d89767c988413c978e1b712d53f27ddf70ea17 + category: main + optional: false +- name: cached_property + version: 1.5.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 + hash: + md5: 576d629e47797577ab0f1b351297ef4a + sha256: 6dbf7a5070cc43d90a1e4c2ec0c541c69d8e30a0e25f50ce9f6e4a432e42c5d7 + category: main + optional: false +- name: cachetools + version: 4.2.4 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.5' + url: https://conda.anaconda.org/conda-forge/noarch/cachetools-4.2.4-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 059a8732c1d81ffc93997cb8a236331f + sha256: c73709b168d12c802532706961dc8e54418b61cf99341fd6738bae060460c477 + category: bioinformatics + optional: true +- name: certifi + version: 2023.11.17 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda + hash: + md5: 2011bcf45376341dd1d690263fdbc789 + sha256: afa22b77128a812cb57bc707c297d926561bd225a3d9dd74205d87a3b2d14a96 + category: main + optional: false +- name: cffi + version: 1.16.0 + manager: conda + platform: linux-64 + dependencies: + libffi: '>=3.4,<4.0a0' + libgcc-ng: '>=12' + pycparser: '' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py310h2fee648_0.conda + hash: + md5: 45846a970e71ac98fd327da5d40a0a2c + sha256: 007e7f69ab45553b7bf11f2c1b8d3f3a13fd42997266a0d57795f41c7d38df36 + category: main + optional: false +- name: charset-normalizer + version: 3.3.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda + hash: + md5: 7f4a9e3fcff3f6356ae99244a014da6a + sha256: 20cae47d31fdd58d99c4d2e65fbdcefa0b0de0c84e455ba9d6356a4bdbc4b5b9 + category: main + optional: false +- name: click + version: 8.1.7 + manager: conda + platform: linux-64 + dependencies: + __unix: '' + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + hash: + md5: f3ad426304898027fc619827ff428eca + sha256: f0016cbab6ac4138a429e28dbcb904a90305b34b3fe41a9b89d697c90401caec + category: main + optional: false +- name: comm + version: 0.1.4 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + traitlets: '>=5.3' + url: https://conda.anaconda.org/conda-forge/noarch/comm-0.1.4-pyhd8ed1ab_0.conda + hash: + md5: c8eaca39e2b6abae1fc96acc929ae939 + sha256: 11057745946a95ee7cc4c98900a60c7362266a4cb28bc97d96cd88e3056eb701 + category: main + optional: false +- name: contourpy + version: 1.2.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + numpy: '>=1.20,<2' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py310hd41b1e2_0.conda + hash: + md5: 85d2aaa7af046528d339da1e813c3a9f + sha256: 73dd7868bfd98fa9e4d2cc524687b5c5c8f9d427d4e521875aacfe152eae4715 + category: main + optional: false +- name: cryptography + version: 41.0.7 + manager: conda + platform: linux-64 + dependencies: + cffi: '>=1.12' + libgcc-ng: '>=12' + openssl: '>=3.1.4,<4.0a0' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/cryptography-41.0.7-py310hb8475ec_1.conda + hash: + md5: 8a84d96d106767c08d6154ed5c8aae2c + sha256: 493feafc2492e841d361affb0bba2e29ab41d73b8db2d58c5abdfd4ccf1d29ad + category: workflows + optional: true +- name: cuda-cudart + version: 12.1.105 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/cuda-cudart-12.1.105-0.tar.bz2 + hash: + md5: 001823a01c0d49300fd9622c4578eb40 + sha256: 8ce27449a0a1d98630b8b13669590a34b04bccf11bf9a98151a40541f7edf7ba + category: main + optional: false +- name: cuda-cupti + version: 12.1.105 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/cuda-cupti-12.1.105-0.tar.bz2 + hash: + md5: c0bfa9a714fd8099e085c70ab2ebe6ec + sha256: b7d23bb1cf1f8e47c097ecbdb7fae8edc845a6b0d26a222ca1559bdae41ce8ad + category: main + optional: false +- name: cuda-libraries + version: 12.1.0 + manager: conda + platform: linux-64 + dependencies: + cuda-cudart: '>=12.1.55' + cuda-nvrtc: '>=12.1.55' + cuda-opencl: '>=12.1.56' + libcublas: '>=12.1.0.26' + libcufft: '>=11.0.2.4' + libcufile: '>=1.6.0.25' + libcurand: '>=10.3.2.56' + libcusolver: '>=11.4.4.55' + libcusparse: '>=12.0.2.55' + libnpp: '>=12.0.2.50' + libnvjitlink: '>=12.1.55' + libnvjpeg: '>=12.1.0.39' + url: https://conda.anaconda.org/nvidia/linux-64/cuda-libraries-12.1.0-0.tar.bz2 + hash: + md5: 8c08238819848e471a6213db526dbf15 + sha256: f282fdaf5ce1bbcb998126bcf4e551468cb2dfa8ed31a959aa6d6861e3c044ee + category: main + optional: false +- name: cuda-nvrtc + version: 12.1.105 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/cuda-nvrtc-12.1.105-0.tar.bz2 + hash: + md5: 12c15423c4eeb41fee6df3a2e5ab53ab + sha256: 33866252ee8515a30211ac8792fec628de1e9ead7f8e54deac5fe330b2e9a44c + category: main + optional: false +- name: cuda-nvtx + version: 12.1.105 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/cuda-nvtx-12.1.105-0.tar.bz2 + hash: + md5: 813ed3c0b687b8bb5daebf43889d8317 + sha256: ea28adfbbaaeb4e35a8af9f312ff37d7da6480b35134843f8c69704905ec4c81 + category: main + optional: false +- name: cuda-opencl + version: 12.3.101 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/cuda-opencl-12.3.101-0.tar.bz2 + hash: + md5: 591651406c8164f57ec2a4ed8dfe8ccf + sha256: 2ab503941e36d540bc955bd764e0554f9a472fd0f057ee652f7d90fe866238d2 + category: main + optional: false +- name: cuda-runtime + version: 12.1.0 + manager: conda + platform: linux-64 + dependencies: + cuda-libraries: '>=12.1.0' + url: https://conda.anaconda.org/nvidia/linux-64/cuda-runtime-12.1.0-0.tar.bz2 + hash: + md5: 95e8c2f09ec28cce7cdecd6200b5d26e + sha256: ddcef7597007188a71bddba2eeacb603ff89328bdcae311210abd083ade57a30 + category: main + optional: false +- name: cycler + version: 0.12.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda + hash: + md5: 5cd86562580f274031ede6aa6aa24441 + sha256: f221233f21b1d06971792d491445fd548224641af9443739b4b7b6d5d72954a8 + category: main + optional: false +- name: debugpy + version: 1.8.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.0-py310hc6cd4ac_1.conda + hash: + md5: 01388b4ec9eed3b26fa732aa39745475 + sha256: 77593f7b60d8f3b4d27a97a1b9e6c07c3f2490cfab77039d5e403166448b5de2 + category: main + optional: false +- name: decorator + version: 5.1.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.5' + url: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 43afe5ab04e35e17ba28649471dd7364 + sha256: 328a6a379f9bdfd0230e51de291ce858e6479411ea4b0545fb377c71662ef3e2 + category: main + optional: false +- name: defusedxml + version: 0.7.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 961b3a227b437d82ad7054484cfa71b2 + sha256: 9717a059677553562a8f38ff07f3b9f61727bd614f505658b0a5ecbcf8df89be + category: main + optional: false +- name: deprecation + version: 2.1.0 + manager: conda + platform: linux-64 + dependencies: + packaging: '' + python: '' + url: https://conda.anaconda.org/conda-forge/noarch/deprecation-2.1.0-pyh9f0ad1d_0.tar.bz2 + hash: + md5: 7b6747d7cc2076341029cff659669e8b + sha256: 2695a60ff355b114d0c459458461d941d2209ec9aff152853b6a3ca8700c94ec + category: bioinformatics + optional: true +- name: einops + version: 0.7.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/einops-0.7.0-pyhd8ed1ab_1.conda + hash: + md5: 1641890c9375ddb22381f3eb9ac157df + sha256: cc08bb969a4458b7afd48e7ba8151c95b48f1c315d3567644ed4a97ee2987247 + category: main + optional: false +- name: entrypoints + version: '0.4' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 3cf04868fee0a029769bd41f4b2fbf2d + sha256: 2ec4a0900a4a9f42615fc04d0fb3286b796abe56590e8e042f6ec25e102dd5af + category: main + optional: false +- name: exceptiongroup + version: 1.2.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda + hash: + md5: f6c211fee3c98229652b60a9a42ef363 + sha256: cf83dcaf9006015c8ccab3fc6770f478464a66a8769e1763ca5d7dff09d11d08 + category: main + optional: false +- name: executing + version: 2.0.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=2.7' + url: https://conda.anaconda.org/conda-forge/noarch/executing-2.0.1-pyhd8ed1ab_0.conda + hash: + md5: e16be50e378d8a4533b989035b196ab8 + sha256: c738804ab1e6376f8ea63372229a04c8d658dc90fd5a218c6273a2eaf02f4057 + category: main + optional: false +- name: ffmpeg + version: '4.3' + manager: conda + platform: linux-64 + dependencies: + bzip2: '>=1.0.8,<2.0a0' + freetype: '>=2.10.2,<3.0a0' + gmp: '>=6.1.2' + gnutls: '>=3.6.5,<3.7.0a0' + lame: '>=3.100,<3.101.0a0' + libgcc-ng: '>=7.3.0' + libiconv: '' + libstdcxx-ng: '>=7.3.0' + openh264: '>=2.1.0,<2.2.0a0' + zlib: '>=1.2.11,<1.3.0a0' + url: https://conda.anaconda.org/pytorch/linux-64/ffmpeg-4.3-hf484d3e_0.tar.bz2 + hash: + md5: 0b0bf7c3d7e146ef91de5310bbf7a230 + sha256: 60b3e36cb36b706f5850f155bd9d3f33194a522b5ef20be46cb37dbc987a6741 + category: main + optional: false +- name: filelock + version: 3.13.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda + hash: + md5: 0c1729b74a8152fde6a38ba0a2ab9f45 + sha256: 4d742d91412d1f163e5399d2b50c5d479694ebcd309127abb549ca3977f89d2b + category: bioinformatics + optional: true +- name: fonttools + version: 4.46.0 + manager: conda + platform: linux-64 + dependencies: + brotli: '' + libgcc-ng: '>=12' + munkres: '' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + unicodedata2: '>=14.0.0' + url: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py310h2372a71_0.conda + hash: + md5: 3c0109417cbcdabfed289360886b036d + sha256: 48682fe7bc12bae520dd67b3d6a84acdac52488d75278d91be62b266c8bb3c91 + category: main + optional: false +- name: fqdn + version: 1.5.1 + manager: conda + platform: linux-64 + dependencies: + cached-property: '>=1.3.0' + python: '>=2.7,<4' + url: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 642d35437078749ef23a5dca2c9bb1f3 + sha256: 6cfd1f9bcd2358a69fb571f4b3af049b630d52647d906822dbedac03e84e4f63 + category: main + optional: false +- name: freetype + version: 2.12.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libpng: '>=1.6.39,<1.7.0a0' + libzlib: '>=1.2.13,<1.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda + hash: + md5: 9ae35c3d96db2c94ce0cef86efdfa2cb + sha256: b2e3c449ec9d907dd4656cb0dc93e140f447175b125a3824b31368b06c666bb6 + category: main + optional: false +- name: fsspec + version: 2023.9.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.2-pyh1a96a4e_0.conda + hash: + md5: 9d15cd3a0e944594ab528da37dc72ecc + sha256: d95d11d1f501cb69528bb2b620b728f12caf872cb23837bc9bdd6ef405b4ecfb + category: main + optional: false +- name: gmp + version: 6.3.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-h59595ed_0.conda + hash: + md5: 0e33ef437202db431aa5a928248cf2e8 + sha256: 2a50495b6bbbacb03107ea0b752d8358d4a40b572d124a8cade068c147f344f5 + category: main + optional: false +- name: gmpy2 + version: 2.1.2 + manager: conda + platform: linux-64 + dependencies: + gmp: '>=6.2.1,<7.0a0' + libgcc-ng: '>=12' + mpc: '>=1.2.1,<2.0a0' + mpfr: '>=4.1.0,<5.0a0' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/gmpy2-2.1.2-py310h3ec546c_1.tar.bz2 + hash: + md5: 73f6fa50c32ddd985cf5fba7b890a75c + sha256: aeb52e14f33c17e11248bfe58383b935e101d86e89b5246373c590c7b127ab47 + category: main + optional: false +- name: gnutls + version: 3.6.13 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=7.5.0' + libstdcxx-ng: '>=7.5.0' + nettle: '>=3.6,<3.7.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/gnutls-3.6.13-h85f3911_1.tar.bz2 + hash: + md5: 7d1b6fff16c1431d96cb4934938799fd + sha256: 6c9307f0fedce2c4d060bba9ac888b300bc0912effab423d67b8e1b661a93305 + category: main + optional: false +- name: icu + version: '73.2' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda + hash: + md5: cc47e1facc155f91abd89b11e48e72ff + sha256: e12fd90ef6601da2875ebc432452590bc82a893041473bc1c13ef29001a73ea8 + category: main + optional: false +- name: idna + version: '3.6' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda + hash: + md5: 1a76f09108576397c41c0b0c5bd84134 + sha256: 6ee4c986d69ce61e60a20b2459b6f2027baeba153f0a64995fd3cb47c2cc7e07 + category: main + optional: false +- name: importlib-metadata + version: 7.0.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + zipp: '>=0.5' + url: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.0-pyha770c72_0.conda + hash: + md5: a941237cd06538837b25cd245fcd25d8 + sha256: 9731e82a00d36b182dc515e31723e711ac82890bb1ca86c6a17a4b471135564f + category: main + optional: false +- name: importlib_metadata + version: 7.0.0 + manager: conda + platform: linux-64 + dependencies: + importlib-metadata: '>=7.0.0,<7.0.1.0a0' + url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda + hash: + md5: 12aff14f84c337be5e5636bf612f4140 + sha256: b9e8ed41df6c55222e3777f422e77a22a6a19ff779b2e65aa8dfdea792c1f7de + category: main + optional: false +- name: importlib_resources + version: 6.1.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + zipp: '>=3.1.0' + url: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda + hash: + md5: 3d5fa25cf42f3f32a12b2d874ace8574 + sha256: e584f9ae08fb2d242af0ce7e19e3cd2f85f362d8523119e08f99edb962db99ed + category: main + optional: false +- name: ipykernel + version: 6.26.0 + manager: conda + platform: linux-64 + dependencies: + __linux: '' + comm: '>=0.1.1' + debugpy: '>=1.6.5' + ipython: '>=7.23.1' + jupyter_client: '>=6.1.12' + jupyter_core: '>=4.12,!=5.0.*' + matplotlib-inline: '>=0.1' + nest-asyncio: '' + packaging: '' + psutil: '' + python: '>=3.8' + pyzmq: '>=20' + tornado: '>=6.1' + traitlets: '>=5.4.0' + url: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.26.0-pyhf8b6a83_0.conda + hash: + md5: 2307f71f5f0896d4b91b93e6b468abff + sha256: 9e647454f7572101657a07820ebed294df9a6a527b041cd5e4dd98b8aa3db625 + category: main + optional: false +- name: ipython + version: 8.18.1 + manager: conda + platform: linux-64 + dependencies: + __unix: '' + decorator: '' + exceptiongroup: '' + jedi: '>=0.16' + matplotlib-inline: '' + pexpect: '>4.3' + pickleshare: '' + prompt-toolkit: '>=3.0.41,<3.1.0' + pygments: '>=2.4.0' + python: '>=3.9' + stack_data: '' + traitlets: '>=5' + typing_extensions: '' + url: https://conda.anaconda.org/conda-forge/noarch/ipython-8.18.1-pyh707e725_3.conda + hash: + md5: 15c6f45a45f7ac27f6d60b0b084f6761 + sha256: d98d615ac8ad71de698afbc50e8269570d4b89706821c4ff3058a4ceec69bd9b + category: main + optional: false +- name: isoduration + version: 20.11.0 + manager: conda + platform: linux-64 + dependencies: + arrow: '>=0.15.0' + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 4cb68948e0b8429534380243d063a27a + sha256: 7bb5c4d994361022f47a807b5e7d101b3dce16f7dd8a0af6ffad9f479d346493 + category: main + optional: false +- name: jedi + version: 0.19.1 + manager: conda + platform: linux-64 + dependencies: + parso: '>=0.8.3,<0.9.0' + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda + hash: + md5: 81a3be0b2023e1ea8555781f0ad904a2 + sha256: 362f0936ef37dfd1eaa860190e42a6ebf8faa094eaa3be6aa4d9ace95f40047a + category: main + optional: false +- name: jinja2 + version: 3.1.2 + manager: conda + platform: linux-64 + dependencies: + markupsafe: '>=2.0' + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2 + hash: + md5: c8490ed5c70966d232fdd389d0dbed37 + sha256: b045faba7130ab263db6a8fdc96b1a3de5fcf85c4a607c5f11a49e76851500b5 + category: main + optional: false +- name: joblib + version: 1.3.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + setuptools: '' + url: https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda + hash: + md5: 4da50d410f553db77e62ab62ffaa1abc + sha256: 31e05d47970d956206188480b038829d24ac11fe8216409d8584d93d40233878 + category: main + optional: false +- name: json5 + version: 0.9.14 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7,<4.0' + url: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.14-pyhd8ed1ab_0.conda + hash: + md5: dac1dabba2b5a9d1aee175c5fcc7b436 + sha256: 41514104208c092959bef0713cbd795e72c535f2f939b7903d8c97809f2adaa7 + category: main + optional: false +- name: jsonpointer + version: '2.4' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-2.4-py310hff52083_3.conda + hash: + md5: 08ec1463dbc5c806a32fc431874032ca + sha256: 316db08863469a56cdbfd030de5a2cc11ec7649ed7c50eff507e9caa0070ccaa + category: main + optional: false +- name: jsonschema + version: 4.20.0 + manager: conda + platform: linux-64 + dependencies: + attrs: '>=22.2.0' + importlib_resources: '>=1.4.0' + jsonschema-specifications: '>=2023.03.6' + pkgutil-resolve-name: '>=1.3.10' + python: '>=3.8' + referencing: '>=0.28.4' + rpds-py: '>=0.7.1' + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.20.0-pyhd8ed1ab_0.conda + hash: + md5: 1116d79def5268414fb0917520b2bbf1 + sha256: 77aae609097d06deedb8ef8407a44b23d5fef95962ba6fe1c959ac7bd6195296 + category: main + optional: false +- name: jsonschema-specifications + version: 2023.11.2 + manager: conda + platform: linux-64 + dependencies: + importlib_resources: '>=1.4.0' + python: '>=3.8' + referencing: '>=0.31.0' + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.11.2-pyhd8ed1ab_0.conda + hash: + md5: 73884ca36d6d96cbce498cde99fab40f + sha256: e26115d02dc208a05b557c8dd670923270803b9b3b8af4e22b93d659d1ec77ec + category: main + optional: false +- name: jsonschema-with-format-nongpl + version: 4.20.0 + manager: conda + platform: linux-64 + dependencies: + fqdn: '' + idna: '' + isoduration: '' + jsonpointer: '>1.13' + jsonschema: '>=4.20.0,<4.20.1.0a0' + python: '' + rfc3339-validator: '' + rfc3986-validator: '>0.1.0' + uri-template: '' + webcolors: '>=1.11' + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.20.0-pyhd8ed1ab_0.conda + hash: + md5: a168c5f84010711f6d4ae650bc22b480 + sha256: 03558b25daa57137fdf98e92731ba50ff5506f265294ac2eef5ec465c76ecf57 + category: main + optional: false +- name: jupyter-lsp + version: 2.2.1 + manager: conda + platform: linux-64 + dependencies: + importlib-metadata: '>=4.8.3' + jupyter_server: '>=1.1.2' + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.1-pyhd8ed1ab_0.conda + hash: + md5: d1a5efc65bfabc3bfebf4d3a204da897 + sha256: 0f995f60609fb50db74bed3637165ad202cf091ec0804519c11b6cffce901e88 + category: main + optional: false +- name: jupyter_client + version: 8.6.0 + manager: conda + platform: linux-64 + dependencies: + importlib_metadata: '>=4.8.3' + jupyter_core: '>=4.12,!=5.0.*' + python: '>=3.8' + python-dateutil: '>=2.8.2' + pyzmq: '>=23.0' + tornado: '>=6.2' + traitlets: '>=5.3' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.0-pyhd8ed1ab_0.conda + hash: + md5: 6bd3f1069cdebb44c7ae9efb900e312d + sha256: 86cbb9070862cf23a245451efce539ca214e610849d0950bb8ac90c545bd158d + category: main + optional: false +- name: jupyter_core + version: 5.5.0 + manager: conda + platform: linux-64 + dependencies: + platformdirs: '>=2.5' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + traitlets: '>=5.3' + url: https://conda.anaconda.org/conda-forge/linux-64/jupyter_core-5.5.0-py310hff52083_0.conda + hash: + md5: 9ca8f0d07c512cef3fd07b121bb2b023 + sha256: 35e05ff1ad8b070b1378886c5ee4b82a000ea078494af6d0552d1c455b3f6220 + category: main + optional: false +- name: jupyter_events + version: 0.9.0 + manager: conda + platform: linux-64 + dependencies: + jsonschema-with-format-nongpl: '>=4.18.0' + python: '>=3.8' + python-json-logger: '>=2.0.4' + pyyaml: '>=5.3' + referencing: '' + rfc3339-validator: '' + rfc3986-validator: '>=0.1.1' + traitlets: '>=5.3' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.9.0-pyhd8ed1ab_0.conda + hash: + md5: 00ba25993f0dba38cf72a7224e33289f + sha256: 713f0cc927a862862a6d35bfb29c4114f987e4f59e2a8a14f71f23fcd7edfec3 + category: main + optional: false +- name: jupyter_server + version: 2.12.1 + manager: conda + platform: linux-64 + dependencies: + anyio: '>=3.1.0' + argon2-cffi: '' + jinja2: '' + jupyter_client: '>=7.4.4' + jupyter_core: '>=4.12,!=5.0.*' + jupyter_events: '>=0.9.0' + jupyter_server_terminals: '' + nbconvert-core: '>=6.4.4' + nbformat: '>=5.3.0' + overrides: '' + packaging: '' + prometheus_client: '' + python: '>=3.8' + pyzmq: '>=24' + send2trash: '>=1.8.2' + terminado: '>=0.8.3' + tornado: '>=6.2.0' + traitlets: '>=5.6.0' + websocket-client: '' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.12.1-pyhd8ed1ab_0.conda + hash: + md5: e9781be1e6c93b5df2c180a9f9242420 + sha256: c4aabe2041afb8fde1f049549c2e292265612d07dd4d1156f3e183ba6a6f007b + category: main + optional: false +- name: jupyter_server_terminals + version: 0.5.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + terminado: '>=0.8.3' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.0-pyhd8ed1ab_0.conda + hash: + md5: 37a8b4098d428ecd40e58f8ec8a8e77d + sha256: b2c769977c258e5a81d541fd526d01083fc6b8c8dfdd4822795a898626bc81e6 + category: main + optional: false +- name: jupyterlab + version: 4.0.9 + manager: conda + platform: linux-64 + dependencies: + async-lru: '>=1.0.0' + importlib_metadata: '>=4.8.3' + importlib_resources: '>=1.4' + ipykernel: '' + jinja2: '>=3.0.3' + jupyter-lsp: '>=2.0.0' + jupyter_core: '' + jupyter_server: '>=2.4.0,<3' + jupyterlab_server: '>=2.19.0,<3' + notebook-shim: '>=0.2' + packaging: '' + python: '>=3.8' + tomli: '' + tornado: '>=6.2.0' + traitlets: '' + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.0.9-pyhd8ed1ab_0.conda + hash: + md5: 7da6e874b0904e411ec2fd8e6082841e + sha256: 1c55e63e4b84810796c8827370ebd597ad3f45bcd0c1fa9975a363bc6a895f23 + category: main + optional: false +- name: jupyterlab_pygments + version: 0.3.0 + manager: conda + platform: linux-64 + dependencies: + pygments: '>=2.4.1,<3' + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_0.conda + hash: + md5: 3f0915b1fb2252ab73686a533c5f9d3f + sha256: 6ee596138a778a841261476408435da78e3000661f3ee025fb6c3ed17d28c8b3 + category: main + optional: false +- name: jupyterlab_server + version: 2.25.2 + manager: conda + platform: linux-64 + dependencies: + babel: '>=2.10' + importlib-metadata: '>=4.8.3' + jinja2: '>=3.0.3' + json5: '>=0.9.0' + jsonschema: '>=4.18' + jupyter_server: '>=1.21,<3' + packaging: '>=21.3' + python: '>=3.8' + requests: '>=2.31' + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.25.2-pyhd8ed1ab_0.conda + hash: + md5: f45557d5551b54dc2a74133a310bc1ba + sha256: 51c13a87072a64df1a0ae14fbb470bc4e36becf4d50693ffab53174199ca4f4b + category: main + optional: false +- name: keyutils + version: 1.6.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=10.3.0' + url: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 + hash: + md5: 30186d27e2c9fa62b45fb1476b7200e3 + sha256: 150c05a6e538610ca7c43beb3a40d65c90537497a4f6a5f4d15ec0451b6f5ebb + category: bioinformatics + optional: true +- name: kiwisolver + version: 1.4.5 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_1.conda + hash: + md5: b8d67603d43b23ce7e988a5d81a7ab79 + sha256: bb51906639bced3de1d4d7740ac284cdaa89e2f22e0b1ec796378b090b0648ba + category: main + optional: false +- name: krb5 + version: 1.21.2 + manager: conda + platform: linux-64 + dependencies: + keyutils: '>=1.6.1,<2.0a0' + libedit: '>=3.1.20191231,<4.0a0' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + openssl: '>=3.1.2,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda + hash: + md5: cd95826dbd331ed1be26bdf401432844 + sha256: 259bfaae731989b252b7d2228c1330ef91b641c9d68ff87dae02cbae682cb3e4 + category: bioinformatics + optional: true +- name: lame + version: '3.100' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 + hash: + md5: a8832b479f93521a9e7b5b743803be51 + sha256: aad2a703b9d7b038c0f745b853c6bb5f122988fe1a7a096e0e606d9cbec4eaab + category: main + optional: false +- name: lcms2 + version: '2.15' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libjpeg-turbo: '>=2.1.5.1,<3.0a0' + libtiff: '>=4.6.0,<4.7.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-h7f713cb_2.conda + hash: + md5: 9ab79924a3760f85a799f21bc99bd655 + sha256: 9125833b3019bf29c4a20295665e7bc912de581086a53693f10709fae409a3b2 + category: main + optional: false +- name: ld_impl_linux-64 + version: '2.40' + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda + hash: + md5: 7aca3059a1729aa76c597603f10b0dd3 + sha256: f6cc89d887555912d6c61b295d398cff9ec982a3417d38025c45d5dd9b9e79cd + category: main + optional: false +- name: lerc + version: 4.0.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2 + hash: + md5: 76bbff344f0134279f225174e9064c8f + sha256: cb55f36dcd898203927133280ae1dc643368af041a48bcf7c026acb7c47b0c12 + category: main + optional: false +- name: libblas + version: 3.9.0 + manager: conda + platform: linux-64 + dependencies: + mkl: '>=2022.1.0,<2023.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_mkl.tar.bz2 + hash: + md5: 85f61af03fd291dae33150ffe89dc09a + sha256: 24e656f13b402b6fceb88df386768445ab9beb657d451a8e5a88d4b3380cf7a4 + category: main + optional: false +- name: libbrotlicommon + version: 1.1.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda + hash: + md5: aec6c91c7371c26392a06708a73c70e5 + sha256: 40f29d1fab92c847b083739af86ad2f36d8154008cf99b64194e4705a1725d78 + category: main + optional: false +- name: libbrotlidec + version: 1.1.0 + manager: conda + platform: linux-64 + dependencies: + libbrotlicommon: 1.1.0 + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda + hash: + md5: f07002e225d7a60a694d42a7bf5ff53f + sha256: 86fc861246fbe5ad85c1b6b3882aaffc89590a48b42d794d3d5c8e6d99e5f926 + category: main + optional: false +- name: libbrotlienc + version: 1.1.0 + manager: conda + platform: linux-64 + dependencies: + libbrotlicommon: 1.1.0 + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda + hash: + md5: 5fc11c6020d421960607d821310fcd4d + sha256: f751b8b1c4754a2a8dfdc3b4040fa7818f35bbf6b10e905a47d3a194b746b071 + category: main + optional: false +- name: libcblas + version: 3.9.0 + manager: conda + platform: linux-64 + dependencies: + libblas: 3.9.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_mkl.tar.bz2 + hash: + md5: 361bf757b95488de76c4f123805742d3 + sha256: 892ba10508f22310ccfe748df1fd3b6c7f20e7b6f6b79e69ed337863551c1bd8 + category: main + optional: false +- name: libcublas + version: 12.1.0.26 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/libcublas-12.1.0.26-0.tar.bz2 + hash: + md5: 74f872929a02e01ef746a064fa46a80c + sha256: d269774ee934885d43c13464ffa8cec69440868c85a993b906bce56f8de203ae + category: main + optional: false +- name: libcufft + version: 11.0.2.4 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/libcufft-11.0.2.4-0.tar.bz2 + hash: + md5: b53f7ea28a363eb6d218bcbffb9d26aa + sha256: 56936216c0abaa8a8ef46fa0be89f0c9066b8fd439b7bcab7f6c5a3c465670fc + category: main + optional: false +- name: libcufile + version: 1.8.1.2 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/libcufile-1.8.1.2-0.tar.bz2 + hash: + md5: 9216c3ab5eb551bb7c2c4f604b4e7614 + sha256: f399e21e2d8a286d74354c0df94635f259cfb6649031a8f48a5654f97387014f + category: main + optional: false +- name: libcurand + version: 10.3.4.101 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/libcurand-10.3.4.101-0.tar.bz2 + hash: + md5: c19aee1cc95a33b3aadc62113f1e57f2 + sha256: 0c001fd158a81b5fff10cd711805f87c8d11a9f5794ea9fa5aa76aef129c0b48 + category: main + optional: false +- name: libcurl + version: 8.5.0 + manager: conda + platform: linux-64 + dependencies: + krb5: '>=1.21.2,<1.22.0a0' + libgcc-ng: '>=12' + libnghttp2: '>=1.58.0,<2.0a0' + libssh2: '>=1.11.0,<2.0a0' + libzlib: '>=1.2.13,<1.3.0a0' + openssl: '>=3.2.0,<4.0a0' + zstd: '>=1.5.5,<1.6.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.5.0-hca28451_0.conda + hash: + md5: 7144d5a828e2cae218e0e3c98d8a0aeb + sha256: 00a6bea5ff90ca58eeb15ebc98e08ffb88bddaff27396bb62640064f59d29cf0 + category: bioinformatics + optional: true +- name: libcusolver + version: 11.4.4.55 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/libcusolver-11.4.4.55-0.tar.bz2 + hash: + md5: 2d2fe4a7af91ec8a1eee7f1f0cf7b050 + sha256: bdd75eaf46b00ec5c22401b33a8bf428ecbf1d3afa3034cfb0f5a270bae588fc + category: main + optional: false +- name: libcusparse + version: 12.0.2.55 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/libcusparse-12.0.2.55-0.tar.bz2 + hash: + md5: c295ea64ea0654af0cbe833431de6daa + sha256: 0f21ec4d7d31640816b3bd76c9028dfe5e5d75265c65b6a359535f7b7fc66e1c + category: main + optional: false +- name: libdeflate + version: '1.18' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda + hash: + md5: 6aa9c9de5542ecb07fdda9ca626252d8 + sha256: 949d84ceea543802c1e085b2aa58f1d6cb5dd8cec5a9abaaf4e8ac65d6094b3a + category: main + optional: false +- name: libedit + version: 3.1.20191231 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=7.5.0' + ncurses: '>=6.2,<7.0.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2 + hash: + md5: 4d331e44109e3f0e19b4cb8f9b82f3e1 + sha256: a57d37c236d8f7c886e01656f4949d9dcca131d2a0728609c6f7fa338b65f1cf + category: bioinformatics + optional: true +- name: libev + version: '4.33' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda + hash: + md5: 172bf1cd1ff8629f2b1179945ed45055 + sha256: 1cd6048169fa0395af74ed5d8f1716e22c19a81a8a36f934c110ca3ad4dd27b4 + category: bioinformatics + optional: true +- name: libffi + version: 3.4.2 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=9.4.0' + url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 + hash: + md5: d645c6d2ac96843a2bfaccd2d62b3ac3 + sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e + category: main + optional: false +- name: libgcc-ng + version: 13.2.0 + manager: conda + platform: linux-64 + dependencies: + _libgcc_mutex: '0.1' + _openmp_mutex: '>=4.5' + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda + hash: + md5: 23fdf1fef05baeb7eadc2aed5fb0011f + sha256: 5e88f658e07a30ab41b154b42c59f079b168acfa9551a75bdc972099453f4105 + category: main + optional: false +- name: libgfortran-ng + version: 13.2.0 + manager: conda + platform: linux-64 + dependencies: + libgfortran5: 13.2.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda + hash: + md5: 73031c79546ad06f1fe62e57fdd021bc + sha256: 5b918950b84605b6865de438757f507b1eff73c96fd562f7022c80028b088c14 + category: main + optional: false +- name: libgfortran5 + version: 13.2.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=13.2.0' + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda + hash: + md5: c714d905cdfa0e70200f68b80cc04764 + sha256: 0084a1d29a4f8ee3b8edad80eb6c42e5f0480f054f28cf713fb314bebb347a50 + category: main + optional: false +- name: libhwloc + version: 2.9.3 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + libxml2: '>=2.11.5,<2.12.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.9.3-default_h554bfaf_1009.conda + hash: + md5: f36ddc11ca46958197a45effdd286e45 + sha256: 6950fee24766d03406e0f6f965262a5d98829c71eed8d1004f313892423b559b + category: main + optional: false +- name: libiconv + version: '1.17' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_1.conda + hash: + md5: 4b06b43d0eca61db2899e4d7a289c302 + sha256: a9364735ef2542558ed59aa5f404707dab674df465cbdf312edeaf5e827b55ed + category: main + optional: false +- name: libjpeg-turbo + version: 2.1.5.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda + hash: + md5: 323e90742f0f48fc22bea908735f55e6 + sha256: 0ef7378818c6d5b407692d02556c32e2f6af31c7542bca5160d0b92a59427fb5 + category: main + optional: false +- name: liblapack + version: 3.9.0 + manager: conda + platform: linux-64 + dependencies: + libblas: 3.9.0 + url: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_mkl.tar.bz2 + hash: + md5: a2f166748917d6d6e4707841ca1f519e + sha256: d6201f860b2d76ed59027e69c2bbad6d1cb211a215ec9705cc487cde488fa1fa + category: main + optional: false +- name: libnghttp2 + version: 1.58.0 + manager: conda + platform: linux-64 + dependencies: + c-ares: '>=1.23.0,<2.0a0' + libev: '>=4.33,<5.0a0' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + libzlib: '>=1.2.13,<1.3.0a0' + openssl: '>=3.2.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda + hash: + md5: 700ac6ea6d53d5510591c4344d5c989a + sha256: 1910c5306c6aa5bcbd623c3c930c440e9c77a5a019008e1487810e3c1d3716cb + category: bioinformatics + optional: true +- name: libnpp + version: 12.0.2.50 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/libnpp-12.0.2.50-0.tar.bz2 + hash: + md5: 072e390c1e0e4909bdd7508dd6af1474 + sha256: 88f3813a1b1e4599b9e758c1db9eb3981c8e75a505b6a1a29749a1d997913277 + category: main + optional: false +- name: libnsl + version: 2.0.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda + hash: + md5: 30fd6e37fe21f86f4bd26d6ee73eeec7 + sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6 + category: main + optional: false +- name: libnvjitlink + version: 12.1.105 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/libnvjitlink-12.1.105-0.tar.bz2 + hash: + md5: 0155acf6f5117613ba17c65d9be4f2f1 + sha256: d9c56634049b45e5e1c1fb9b1c451378c6853490112d38f3325309575807ad15 + category: main + optional: false +- name: libnvjpeg + version: 12.1.1.14 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/nvidia/linux-64/libnvjpeg-12.1.1.14-0.tar.bz2 + hash: + md5: 4dea93d43adfd03388b31f2ae9892558 + sha256: 8d06c6cf389011278fe912a20297c01f445ce68f69b3bd95199b6afe85de2981 + category: main + optional: false +- name: libpng + version: 1.6.39 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libzlib: '>=1.2.13,<1.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda + hash: + md5: e1c890aebdebbfbf87e2c917187b4416 + sha256: a32b36d34e4f2490b99bddbc77d01a674d304f667f0e62c89e02c961addef462 + category: main + optional: false +- name: libsodium + version: 1.0.18 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=7.5.0' + url: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.18-h36c2ea0_1.tar.bz2 + hash: + md5: c3788462a6fbddafdb413a9f9053e58d + sha256: 53da0c8b79659df7b53eebdb80783503ce72fb4b10ed6e9e05cc0e9e4207a130 + category: main + optional: false +- name: libsqlite + version: 3.44.2 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libzlib: '>=1.2.13,<1.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda + hash: + md5: 3b6a9f225c3dbe0d24f4fedd4625c5bf + sha256: ee2c4d724a3ed60d5b458864d66122fb84c6ce1df62f735f90d8db17b66cd88a + category: main + optional: false +- name: libssh2 + version: 1.11.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libzlib: '>=1.2.13,<1.3.0a0' + openssl: '>=3.1.1,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda + hash: + md5: 1f5a58e686b13bcfde88b93f547d23fe + sha256: 50e47fd9c4f7bf841a11647ae7486f65220cfc988ec422a4475fe8d5a823824d + category: bioinformatics + optional: true +- name: libstdcxx-ng + version: 13.2.0 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda + hash: + md5: 937eaed008f6bf2191c5fe76f87755e9 + sha256: 6c6c49efedcc5709a66f19fb6b26b69c6a5245310fd1d9a901fd5e38aaf7f882 + category: main + optional: false +- name: libtiff + version: 4.6.0 + manager: conda + platform: linux-64 + dependencies: + lerc: '>=4.0.0,<5.0a0' + libdeflate: '>=1.18,<1.19.0a0' + libgcc-ng: '>=12' + libjpeg-turbo: '>=2.1.5.1,<3.0a0' + libstdcxx-ng: '>=12' + libwebp-base: '>=1.3.1,<2.0a0' + libzlib: '>=1.2.13,<1.3.0a0' + xz: '>=5.2.6,<6.0a0' + zstd: '>=1.5.5,<1.6.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h8b53f26_0.conda + hash: + md5: 097c175e54beba8b42dcc5dd49299ca1 + sha256: a14a86478fb3c284b5552e33a179a3912cacc3f469d318557d20f4a72b624235 + category: main + optional: false +- name: libuuid + version: 2.38.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda + hash: + md5: 40b61aab5c7ba9ff276c41cfffe6b80b + sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 + category: main + optional: false +- name: libwebp-base + version: 1.3.2 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda + hash: + md5: 30de3fd9b3b602f7473f30e684eeea8c + sha256: 68764a760fa81ef35dacb067fe8ace452bbb41476536a4a147a1051df29525f0 + category: main + optional: false +- name: libxcb + version: '1.15' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + pthread-stubs: '' + xorg-libxau: '' + xorg-libxdmcp: '' + url: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda + hash: + md5: 33277193f5b92bad9fdd230eb700929c + sha256: a670902f0a3173a466c058d2ac22ca1dd0df0453d3a80e0212815c20a16b0485 + category: main + optional: false +- name: libxml2 + version: 2.11.6 + manager: conda + platform: linux-64 + dependencies: + icu: '>=73.2,<74.0a0' + libgcc-ng: '>=12' + libiconv: '>=1.17,<2.0a0' + libzlib: '>=1.2.13,<1.3.0a0' + xz: '>=5.2.6,<6.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda + hash: + md5: 427a3e59d66cb5d145020bd9c6493334 + sha256: e6183d5e57ee48cc1fc4340477c31a6bd8be4d3ba5dded82cbca0d5280591086 + category: main + optional: false +- name: libzlib + version: 1.2.13 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda + hash: + md5: f36c115f1ee199da648e0597ec2047ad + sha256: 370c7c5893b737596fd6ca0d9190c9715d89d888b8c88537ae1ef168c25e82e4 + category: main + optional: false +- name: llvm-openmp + version: 15.0.7 + manager: conda + platform: linux-64 + dependencies: + libzlib: '>=1.2.13,<1.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-15.0.7-h0cdce71_0.conda + hash: + md5: 589c9a3575a050b583241c3d688ad9aa + sha256: 7c67d383a8b1f3e7bf9e046e785325c481f6868194edcfb9d78d261da4ad65d4 + category: main + optional: false +- name: markdown-it-py + version: 3.0.0 + manager: conda + platform: linux-64 + dependencies: + mdurl: '>=0.1,<1' + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda + hash: + md5: 93a8e71256479c62074356ef6ebf501b + sha256: c041b0eaf7a6af3344d5dd452815cdc148d6284fec25a4fa3f4263b3a021e962 + category: workflows + optional: true +- name: markupsafe + version: 2.1.3 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_1.conda + hash: + md5: b74e07a054c479e45a83a83fc5be713c + sha256: ac46cc2f6d4bbeedcd2f508e43f43143a9286ced55730d8d97a3c91ceceb0d56 + category: main + optional: false +- name: mashumaro + version: '3.11' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + typing-extensions: '>=4.1.0' + url: https://conda.anaconda.org/conda-forge/noarch/mashumaro-3.11-pyhd8ed1ab_0.conda + hash: + md5: b831d76378b72084746bfcd01658989b + sha256: 07af0f5d0b721deb4c152525468109a70670fa93a92a1c1c339aed176ec1eb34 + category: main + optional: false +- name: matplotlib-base + version: 3.8.1 + manager: conda + platform: linux-64 + dependencies: + certifi: '>=2020.06.20' + contourpy: '>=1.0.1' + cycler: '>=0.10' + fonttools: '>=4.22.0' + freetype: '>=2.12.1,<3.0a0' + kiwisolver: '>=1.3.1' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + numpy: '>=1.22.4,<2.0a0' + packaging: '>=20.0' + pillow: '>=8' + pyparsing: '>=2.3.1' + python: '>=3.10,<3.11.0a0' + python-dateutil: '>=2.7' + python_abi: 3.10.* + tk: '>=8.6.13,<8.7.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.1-py310h62c0568_0.conda + hash: + md5: e650bd952e5618050ccb088bc0c6dfb4 + sha256: 615197c8b2b816aa1f7874319bd41acb134fcb9cd55e7337563295c8ced0a30e + category: main + optional: false +- name: matplotlib-inline + version: 0.1.6 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + traitlets: '' + url: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.6-pyhd8ed1ab_0.tar.bz2 + hash: + md5: b21613793fcc81d944c76c9f2864a7de + sha256: aa091b88aec55bfa2d9207028d8cdc689b9efb090ae27b99557e93c675be2f3c + category: main + optional: false +- name: mdurl + version: 0.1.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.0-pyhd8ed1ab_0.tar.bz2 + hash: + md5: f8dab71fdc13b1bf29a01248b156d268 + sha256: c678b9194e025b1fb665bec30ee20aab93399203583875b1dcc0a3b52a8f5523 + category: workflows + optional: true +- name: mistune + version: 3.0.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda + hash: + md5: 5cbee699846772cc939bef23a0d524ed + sha256: f95cb70007e3cc2ba44e17c29a056b499e6dadf08746706d0c817c8e2f47e05c + category: main + optional: false +- name: mkl + version: 2022.2.1 + manager: conda + platform: linux-64 + dependencies: + _openmp_mutex: '>=4.5' + llvm-openmp: '>=15.0.6' + tbb: 2021.* + url: https://conda.anaconda.org/conda-forge/linux-64/mkl-2022.2.1-h84fe81f_16997.conda + hash: + md5: a7ce56d5757f5b57e7daabe703ade5bb + sha256: 5322750d5e96ff5d96b1457db5fb6b10300f2bc4030545e940e17b57c4e96d00 + category: main + optional: false +- name: mpc + version: 1.3.1 + manager: conda + platform: linux-64 + dependencies: + gmp: '>=6.2.1,<7.0a0' + libgcc-ng: '>=12' + mpfr: '>=4.1.0,<5.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/mpc-1.3.1-hfe3b2da_0.conda + hash: + md5: 289c71e83dc0daa7d4c81f04180778ca + sha256: 2f88965949ba7b4b21e7e5facd62285f7c6efdb17359d1b365c3bb4ecc968d29 + category: main + optional: false +- name: mpfr + version: 4.2.1 + manager: conda + platform: linux-64 + dependencies: + gmp: '>=6.2.1,<7.0a0' + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.1-h9458935_0.conda + hash: + md5: 4c28f3210b30250037a4a627eeee9e0f + sha256: 008230a53ff15cf61966476b44f7ba2c779826825b9ca639a0a2b44d8f7aa6cb + category: main + optional: false +- name: mpmath + version: 1.3.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/mpmath-1.3.0-pyhd8ed1ab_0.conda + hash: + md5: dbf6e2d89137da32fa6670f3bffc024e + sha256: a4f025c712ec1502a55c471b56a640eaeebfce38dd497d5a1a33729014cac47a + category: main + optional: false +- name: munkres + version: 1.1.4 + manager: conda + platform: linux-64 + dependencies: + python: '' + url: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 + hash: + md5: 2ba8498c1018c1e9c61eb99b973dfe19 + sha256: f86fb22b58e93d04b6f25e0d811b56797689d598788b59dcb47f59045b568306 + category: main + optional: false +- name: nbclient + version: 0.8.0 + manager: conda + platform: linux-64 + dependencies: + jupyter_client: '>=6.1.12' + jupyter_core: '>=4.12,!=5.0.*' + nbformat: '>=5.1' + python: '>=3.8' + traitlets: '>=5.4' + url: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.8.0-pyhd8ed1ab_0.conda + hash: + md5: e78da91cf428faaf05701ce8cc8f2f9b + sha256: 4ebd237cdf4bfa5226f92d2ae78fab8dba27696909391884dc6594ca6f9df5ff + category: main + optional: false +- name: nbconvert-core + version: 7.12.0 + manager: conda + platform: linux-64 + dependencies: + beautifulsoup4: '' + bleach: '' + defusedxml: '' + entrypoints: '>=0.2.2' + jinja2: '>=3.0' + jupyter_core: '>=4.7' + jupyterlab_pygments: '' + markupsafe: '>=2.0' + mistune: '>=2.0.3,<4' + nbclient: '>=0.5.0' + nbformat: '>=5.1' + packaging: '' + pandocfilters: '>=1.4.1' + pygments: '>=2.4.1' + python: '>=3.8' + tinycss2: '' + traitlets: '>=5.0' + url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.12.0-pyhd8ed1ab_0.conda + hash: + md5: 4d67c68fd0d130091ada039bc2d81b33 + sha256: 04c3ac88701d98d58139569e4899c3254bf99908179a898cc3dcadd8c0ef44b4 + category: main + optional: false +- name: nbformat + version: 5.9.2 + manager: conda + platform: linux-64 + dependencies: + jsonschema: '>=2.6' + jupyter_core: '' + python: '>=3.8' + python-fastjsonschema: '' + traitlets: '>=5.1' + url: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.9.2-pyhd8ed1ab_0.conda + hash: + md5: 61ba076de6530d9301a0053b02f093d2 + sha256: fc82c5a9116820757b03ffb836b36f0f50e4cd390018024dbadb0ee0217f6992 + category: main + optional: false +- name: ncurses + version: '6.4' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda + hash: + md5: 7dbaa197d7ba6032caf7ae7f32c1efa0 + sha256: 91cc03f14caf96243cead96c76fe91ab5925a695d892e83285461fb927dece5e + category: main + optional: false +- name: nest-asyncio + version: 1.5.8 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.5' + url: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.5.8-pyhd8ed1ab_0.conda + hash: + md5: a4f0e4519bc50eee4f53f689be9607f7 + sha256: d7b795b4e754136841c6da3f9fa1a0f7ec37bc7167e7dd68c5b45e657133e008 + category: main + optional: false +- name: nettle + version: '3.6' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=7.5.0' + url: https://conda.anaconda.org/conda-forge/linux-64/nettle-3.6-he412f7d_0.tar.bz2 + hash: + md5: f050099af540c1c960c813b06bca89ad + sha256: d929f0c53f2bb74c8e3d97dc1c53cc76b7cec97837fcf87998fa3dd447f03b36 + category: main + optional: false +- name: networkx + version: 3.2.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/networkx-3.2.1-pyhd8ed1ab_0.conda + hash: + md5: 425fce3b531bed6ec3c74fab3e5f0a1c + sha256: 7629aa4f9f8cdff45ea7a4701fe58dccce5bf2faa01c26eb44cbb27b7e15ca9d + category: main + optional: false +- name: notebook-shim + version: 0.2.3 + manager: conda + platform: linux-64 + dependencies: + jupyter_server: '>=1.8,<3' + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.3-pyhd8ed1ab_0.conda + hash: + md5: 67e0fe74c156267d9159e9133df7fd37 + sha256: f028d7ad1f2175cde307db08b60d07e371b9d6f035cfae6c81ea94b4c408c538 + category: main + optional: false +- name: numpy + version: 1.26.2 + manager: conda + platform: linux-64 + dependencies: + libblas: '>=3.9.0,<4.0a0' + libcblas: '>=3.9.0,<4.0a0' + libgcc-ng: '>=12' + liblapack: '>=3.9.0,<4.0a0' + libstdcxx-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py310hb13e2d6_0.conda + hash: + md5: d3147cfbf72d6ae7bba10562208f6def + sha256: f5ea7769beb7827f4f5858d28bbdbc814c01649cb8cb81cccbba476ebe3798cd + category: main + optional: false +- name: openh264 + version: 2.1.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=9.3.0' + libstdcxx-ng: '>=9.3.0' + zlib: '>=1.2.11,<1.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/openh264-2.1.1-h780b84a_0.tar.bz2 + hash: + md5: 034a6f90f1bbc7ba11d04b84ec9d74c8 + sha256: 2ce3df1edb23541595443c7697e5568ae6426fa4d365dede45b16b0310bd6a06 + category: main + optional: false +- name: openjpeg + version: 2.5.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libpng: '>=1.6.39,<1.7.0a0' + libstdcxx-ng: '>=12' + libtiff: '>=4.6.0,<4.7.0a0' + libzlib: '>=1.2.13,<1.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda + hash: + md5: 128c25b7fe6a25286a48f3a6a9b5b6f3 + sha256: 9fe91b67289267de68fda485975bb48f0605ac503414dc663b50d8b5f29bc82a + category: main + optional: false +- name: openssl + version: 3.2.0 + manager: conda + platform: linux-64 + dependencies: + ca-certificates: '' + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda + hash: + md5: 603827b39ea2b835268adb8c821b8570 + sha256: 80efc6f429bd8e622d999652e5cba2ca56fcdb9c16a439d2ce9b4313116e4a87 + category: main + optional: false +- name: overrides + version: 7.4.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + typing_utils: '' + url: https://conda.anaconda.org/conda-forge/noarch/overrides-7.4.0-pyhd8ed1ab_0.conda + hash: + md5: 4625b7b01d7f4ac9c96300a5515acfaa + sha256: 29db8c3b521d261bf71897ba3cfbebc81cd61e581b30fcb984b5a713f02fe1ff + category: main + optional: false +- name: packaging + version: '23.2' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda + hash: + md5: 79002079284aa895f883c6b7f3f88fd6 + sha256: 69b3ace6cca2dab9047b2c24926077d81d236bef45329d264b394001e3c3e52f + category: main + optional: false +- name: pandas + version: 2.1.3 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + numpy: '>=1.22.4,<2.0a0' + python: '>=3.10,<3.11.0a0' + python-dateutil: '>=2.8.1' + python-tzdata: '>=2022a' + python_abi: 3.10.* + pytz: '>=2020.1' + url: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py310hcc13569_0.conda + hash: + md5: 30a39c1064e5efc578d83c2a5f7cd749 + sha256: bb2b3e4a3f3d40b87ac214b88393a7f1ee5b2cac41d249c580d184f7edb30653 + category: main + optional: false +- name: pandocfilters + version: 1.5.0 + manager: conda + platform: linux-64 + dependencies: + python: '!=3.0,!=3.1,!=3.2,!=3.3' + url: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 457c2c8c08e54905d6954e79cb5b5db9 + sha256: 2bb9ba9857f4774b85900c2562f7e711d08dd48e2add9bee4e1612fbee27e16f + category: main + optional: false +- name: paramiko + version: 3.3.1 + manager: conda + platform: linux-64 + dependencies: + bcrypt: '>=3.2' + cryptography: '>=3.3' + pynacl: '>=1.5' + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/paramiko-3.3.1-pyhd8ed1ab_0.conda + hash: + md5: 8d4563992b27cdb8e673d1ca16962c9d + sha256: 9139c13cb7ea5729af862db5fb0523daa5900b9b4fa36637cd7f9c01be665f80 + category: workflows + optional: true +- name: parso + version: 0.8.3 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.3-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 17a565a0c3899244e938cdf417e7b094 + sha256: 4e26d5daf5de0e31aa5e74ac56386a361b202433b83f024fdadbf07d4a244da4 + category: main + optional: false +- name: pexpect + version: 4.8.0 + manager: conda + platform: linux-64 + dependencies: + ptyprocess: '>=0.5' + python: '' + url: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.8.0-pyh1a96a4e_2.tar.bz2 + hash: + md5: 330448ce4403cc74990ac07c555942a1 + sha256: 07706c0417ead94f359ca7278f65452d3c396448777aba1da6a11fc351bdca9a + category: main + optional: false +- name: pickleshare + version: 0.7.5 + manager: conda + platform: linux-64 + dependencies: + python: '>=3' + url: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 + hash: + md5: 415f0ebb6198cc2801c73438a9fb5761 + sha256: a1ed1a094dd0d1b94a09ed85c283a0eb28943f2e6f22161fb45e128d35229738 + category: main + optional: false +- name: pillow + version: 10.0.1 + manager: conda + platform: linux-64 + dependencies: + freetype: '>=2.12.1,<3.0a0' + lcms2: '>=2.15,<3.0a0' + libgcc-ng: '>=12' + libjpeg-turbo: '>=2.1.5.1,<3.0a0' + libtiff: '>=4.6.0,<4.7.0a0' + libwebp-base: '>=1.3.2,<2.0a0' + libxcb: '>=1.15,<1.16.0a0' + libzlib: '>=1.2.13,<1.3.0a0' + openjpeg: '>=2.5.0,<3.0a0' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + tk: '>=8.6.12,<8.7.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py310h29da1c1_1.conda + hash: + md5: 8e93b1c69cddf89fd412178d3d418bae + sha256: 4c18593b1b90299e0f1f7a279ccce6dbe0aba694758ee039c0850e0119d3b3e8 + category: main + optional: false +- name: pip + version: 23.3.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + setuptools: '' + wheel: '' + url: https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda + hash: + md5: 2400c0b86889f43aa52067161e1fb108 + sha256: 435829a03e1c6009f013f29bb83de8b876c388820bf8cf69a7baeec25f6a3563 + category: main + optional: false +- name: pkgutil-resolve-name + version: 1.3.10 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda + hash: + md5: 405678b942f2481cecdb3e010f4925d9 + sha256: fecf95377134b0e8944762d92ecf7b0149c07d8186fb5db583125a2705c7ea0a + category: main + optional: false +- name: platformdirs + version: 4.1.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.1.0-pyhd8ed1ab_0.conda + hash: + md5: 45a5065664da0d1dfa8f8cd2eaf05ab9 + sha256: 9e4ff17ce802159ed31344eb913eaa877688226765b77947b102b42255a53853 + category: main + optional: false +- name: plumbum + version: 1.8.2 + manager: conda + platform: linux-64 + dependencies: + paramiko: '' + python: '>=3.6' + pywin32-on-windows: '' + url: https://conda.anaconda.org/conda-forge/noarch/plumbum-1.8.2-pyhd8ed1ab_0.conda + hash: + md5: 2f43a31010edd3d165fb469cac60d146 + sha256: 2d3c92af276556de9daf5554be7c9b2be477cc5f274d64fffa22e09b075ffc56 + category: workflows + optional: true +- name: pooch + version: 1.8.0 + manager: conda + platform: linux-64 + dependencies: + packaging: '>=20.0' + platformdirs: '>=2.5.0' + python: '>=3.7' + requests: '>=2.19.0' + url: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.0-pyhd8ed1ab_0.conda + hash: + md5: 134b2b57b7865d2316a7cce1915a51ed + sha256: 51b02987370bbff28dbf782063c23e3b264aa34173b344454203cd691946e077 + category: main + optional: false +- name: prometheus_client + version: 0.19.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.19.0-pyhd8ed1ab_0.conda + hash: + md5: 7baa10fa8073c371155cf451b71b848d + sha256: 1235a3dbb033f914163e0deaf22d244cb1c1b5d8829d0089e38c34079286acbe + category: main + optional: false +- name: prompt-toolkit + version: 3.0.42 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + wcwidth: '' + url: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.42-pyha770c72_0.conda + hash: + md5: 0bf64bf10eee21f46ac83c161917fa86 + sha256: 58525b2a9305fb154b2b0d43a48b9a6495441b80e4fbea44f2a34a597d2cef16 + category: main + optional: false +- name: psutil + version: 5.9.5 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h2372a71_1.conda + hash: + md5: cb25177acf28cc35cfa6c1ac1c679e22 + sha256: db8a99bc41c1b0405c8e9daa92b9d4e7711f9717aff7fd3feeba407ca2a91aa2 + category: main + optional: false +- name: pthread-stubs + version: '0.4' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=7.5.0' + url: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 + hash: + md5: 22dad4df6e8630e8dff2428f6f6a7036 + sha256: 67c84822f87b641d89df09758da498b2d4558d47b920fd1d3fe6d3a871e000ff + category: main + optional: false +- name: ptyprocess + version: 0.7.0 + manager: conda + platform: linux-64 + dependencies: + python: '' + url: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 + hash: + md5: 359eeb6536da0e687af562ed265ec263 + sha256: fb31e006a25eb2e18f3440eb8d17be44c8ccfae559499199f73584566d0a444a + category: main + optional: false +- name: pure_eval + version: 0.2.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.5' + url: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.2-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 6784285c7e55cb7212efabc79e4c2883 + sha256: 72792f9fc2b1820e37cc57f84a27bc819c71088c3002ca6db05a2e56404f9d44 + category: main + optional: false +- name: pycparser + version: '2.21' + manager: conda + platform: linux-64 + dependencies: + python: 2.7.*|>=3.4 + url: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 076becd9e05608f8dc72757d5f3a91ff + sha256: 74c63fd03f1f1ea2b54e8bc529fd1a600aaafb24027b738d0db87909ee3a33dc + category: main + optional: false +- name: pygments + version: 2.17.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda + hash: + md5: 140a7f159396547e9799aa98f9f0742e + sha256: af5f8867450dc292f98ea387d4d8945fc574284677c8f60eaa9846ede7387257 + category: main + optional: false +- name: pynacl + version: 1.5.0 + manager: conda + platform: linux-64 + dependencies: + cffi: '>=1.4.1' + libgcc-ng: '>=12' + libsodium: '>=1.0.18,<1.0.19.0a0' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + six: '' + url: https://conda.anaconda.org/conda-forge/linux-64/pynacl-1.5.0-py310h2372a71_3.conda + hash: + md5: a29a0825809cd3a780097472be176618 + sha256: f47cc2039e555a03187defab05cf77cc28f56df1a820d789efad39a930994192 + category: workflows + optional: true +- name: pyparsing + version: 3.1.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda + hash: + md5: 176f7d56f0cfe9008bdf1bccd7de02fb + sha256: 4a1332d634b6c2501a973655d68f08c9c42c0bd509c349239127b10572b8354b + category: main + optional: false +- name: pyperclip + version: 1.8.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/pyperclip-1.8.2-pyhd8ed1ab_2.tar.bz2 + hash: + md5: 8d7c6507e902193f9b1e9612f0210a4b + sha256: ccd049b620584b8cff340df8ea591bc73badc1e366768358e20e7cab9cf795dc + category: workflows + optional: true +- name: pysam + version: 0.22.0 + manager: conda + platform: linux-64 + dependencies: + bzip2: '>=1.0.8,<2.0a0' + libcurl: '>=8.4.0,<9.0a0' + libdeflate: '>=1.18,<1.19.0a0' + libgcc-ng: '>=12' + libzlib: '>=1.2.13,<1.3.0a0' + openssl: '>=3.1.3,<4.0a0' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + xz: '>=5.2.6,<6.0a0' + zlib: '' + url: https://conda.anaconda.org/bioconda/linux-64/pysam-0.22.0-py310h41dec4a_0.tar.bz2 + hash: + md5: a006237db7a1f3313b0e40945727e7b2 + sha256: b42a8e28b4bc5b8a05eea102f8e368cffd1176d545e28883cb23ffc1bbe953da + category: main + optional: false +- name: pysocks + version: 1.7.1 + manager: conda + platform: linux-64 + dependencies: + __unix: '' + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 + hash: + md5: 2a7de29fb590ca14b5243c4c812c8025 + sha256: a42f826e958a8d22e65b3394f437af7332610e43ee313393d1cf143f0a2d274b + category: main + optional: false +- name: python + version: 3.10.13 + manager: conda + platform: linux-64 + dependencies: + bzip2: '>=1.0.8,<2.0a0' + ld_impl_linux-64: '>=2.36.1' + libffi: '>=3.4,<4.0a0' + libgcc-ng: '>=12' + libnsl: '>=2.0.1,<2.1.0a0' + libsqlite: '>=3.43.2,<4.0a0' + libuuid: '>=2.38.1,<3.0a0' + libzlib: '>=1.2.13,<1.3.0a0' + ncurses: '>=6.4,<7.0a0' + openssl: '>=3.1.4,<4.0a0' + readline: '>=8.2,<9.0a0' + tk: '>=8.6.13,<8.7.0a0' + tzdata: '' + xz: '>=5.2.6,<6.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.13-hd12c33a_0_cpython.conda + hash: + md5: f3a8c32aa764c3e7188b4b810fc9d6ce + sha256: a53410f459f314537b379982717b1c5911efc2f0cc26d63c4d6f831bcb31c964 + category: main + optional: false +- name: python-dateutil + version: 2.8.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + six: '>=1.5' + url: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2 + hash: + md5: dd999d1cc9f79e67dbb855c8924c7984 + sha256: 54d7785c7678166aa45adeaccfc1d2b8c3c799ca2dc05d4a82bb39b1968bd7da + category: main + optional: false +- name: python-dotenv + version: 1.0.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.0.0-pyhd8ed1ab_1.conda + hash: + md5: 111e7f9edd31865e2659fa9aad8ec8fd + sha256: bc5663f224ff6d8a399ec6bd8517e0c0f87a69ead438f82e5ce5c30f00077586 + category: workflows + optional: true +- name: python-fastjsonschema + version: 2.19.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.3' + url: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.19.0-pyhd8ed1ab_0.conda + hash: + md5: e4dbdb3585c0266b4710467fe7b75cf4 + sha256: fdfe3f387c5ebde803605e1e90871c424519d2bfe2eb3bf9caad1c5a07f4c462 + category: main + optional: false +- name: python-json-logger + version: 2.0.7 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda + hash: + md5: a61bf9ec79426938ff785eb69dbb1960 + sha256: 4790787fe1f4e8da616edca4acf6a4f8ed4e7c6967aa31b920208fc8f95efcca + category: main + optional: false +- name: python-tzdata + version: '2023.3' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda + hash: + md5: 2590495f608a63625e165915fb4e2e34 + sha256: 0108888507014fb24573c31e4deceb61c99e63d37776dddcadd7c89b2ecae0b6 + category: main + optional: false +- name: python_abi + version: '3.10' + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-4_cp310.conda + hash: + md5: 26322ec5d7712c3ded99dd656142b8ce + sha256: 456bec815bfc2b364763084d08b412fdc4c17eb9ccc66a36cb775fa7ac3cbaec + category: main + optional: false +- name: pytorch + version: 2.1.0 + manager: conda + platform: linux-64 + dependencies: + blas: '*' + filelock: '' + jinja2: '' + llvm-openmp: <16 + mkl: '>=2018' + networkx: '' + python: '>=3.10,<3.11.0a0' + pytorch-cuda: '>=12.1,<12.2' + pytorch-mutex: '1.0' + pyyaml: '' + sympy: '' + torchtriton: 2.1.0 + typing_extensions: '' + url: https://conda.anaconda.org/pytorch/linux-64/pytorch-2.1.0-py3.10_cuda12.1_cudnn8.9.2_0.tar.bz2 + hash: + md5: af39dbbb6ab1305e4b504a51acecffd4 + sha256: 72eb8807295c93ed944ae3a7eeda0b72b2168c7ae395f25fa64f3078059fedcf + category: main + optional: false +- name: pytorch-cuda + version: '12.1' + manager: conda + platform: linux-64 + dependencies: + cuda-cudart: '>=12.1,<12.2' + cuda-cupti: '>=12.1,<12.2' + cuda-libraries: '>=12.1,<12.2' + cuda-nvrtc: '>=12.1,<12.2' + cuda-nvtx: '>=12.1,<12.2' + cuda-runtime: '>=12.1,<12.2' + libcublas: '>=12.1.0.26,<12.1.3.1' + libcufft: '>=11.0.2.4,<11.0.2.54' + libcusolver: '>=11.4.4.55,<11.4.5.107' + libcusparse: '>=12.0.2.55,<12.1.0.106' + libnpp: '>=12.0.2.50,<12.1.0.40' + libnvjitlink: '>=12.1.105,<12.2.0' + libnvjpeg: '>=12.1.0.39,<12.2.0.2' + url: https://conda.anaconda.org/pytorch/linux-64/pytorch-cuda-12.1-ha16c6d3_5.tar.bz2 + hash: + md5: ffc0937cf6ba3ffb299b0c256accc53f + sha256: 912c544df4e7abd8510e572bc50cd11c6fa880273858a057fc451563f04d3cf6 + category: main + optional: false +- name: pytorch-mutex + version: '1.0' + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/pytorch/noarch/pytorch-mutex-1.0-cuda.tar.bz2 + hash: + md5: a948316e36fb5b11223b3fcfa93f8358 + sha256: c16316183f51b74ca5eee4dcb8631052f328c0bbf244176734a0b7d390b81ee3 + category: main + optional: false +- name: pytz + version: 2023.3.post1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda + hash: + md5: c93346b446cd08c169d843ae5fc0da97 + sha256: 6b680e63d69aaf087cd43ca765a23838723ef59b0a328799e6363eb13f52c49e + category: main + optional: false +- name: pywin32-on-windows + version: 0.1.0 + manager: conda + platform: linux-64 + dependencies: + __unix: '' + python: '>=2.7' + url: https://conda.anaconda.org/conda-forge/noarch/pywin32-on-windows-0.1.0-pyh1179c8e_3.tar.bz2 + hash: + md5: 2807a0becd1d986fe1ef9b7f8135f215 + sha256: 6502696aaef571913b22a808b15c185bd8ea4aabb952685deb29e6a6765761cb + category: workflows + optional: true +- name: pyyaml + version: 6.0.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + yaml: '>=0.2.5,<0.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py310h2372a71_1.conda + hash: + md5: bb010e368de4940771368bc3dc4c63e7 + sha256: aa78ccddb0a75fa722f0f0eb3537c73ee1219c9dd46cea99d6b9eebfdd780f3d + category: main + optional: false +- name: pyzmq + version: 25.1.2 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libsodium: '>=1.0.18,<1.0.19.0a0' + libstdcxx-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + zeromq: '>=4.3.5,<4.4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-25.1.2-py310h795f18f_0.conda + hash: + md5: fa09f98f3acfd3f5de30bd2d27d5cb7f + sha256: 6ce93fd1e847ce02c2bbfa6022b639b21d4229d61b21ce0ecacb22c380e5680e + category: main + optional: false +- name: readline + version: '8.2' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + ncurses: '>=6.3,<7.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda + hash: + md5: 47d31b792659ce70f470b5c82fdfb7a4 + sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 + category: main + optional: false +- name: referencing + version: 0.32.0 + manager: conda + platform: linux-64 + dependencies: + attrs: '>=22.2.0' + python: '>=3.8' + rpds-py: '>=0.7.0' + url: https://conda.anaconda.org/conda-forge/noarch/referencing-0.32.0-pyhd8ed1ab_0.conda + hash: + md5: a7b5a535cd614e384594530aee7e6061 + sha256: dfd40282910a45e58882ed94b502b2a09f475efb04eaaa3bd8b3b5a9b21a19c3 + category: main + optional: false +- name: requests + version: 2.31.0 + manager: conda + platform: linux-64 + dependencies: + certifi: '>=2017.4.17' + charset-normalizer: '>=2,<4' + idna: '>=2.5,<4' + python: '>=3.7' + urllib3: '>=1.21.1,<3' + url: https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda + hash: + md5: a30144e4156cdbb236f99ebb49828f8b + sha256: 9f629d6fd3c8ac5f2a198639fe7af87c4db2ac9235279164bfe0fcb49d8c4bad + category: main + optional: false +- name: rfc3339-validator + version: 0.1.4 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.5' + six: '' + url: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 + hash: + md5: fed45fc5ea0813240707998abe49f520 + sha256: 7c7052b51de0b5c558f890bb11f8b5edbb9934a653d76be086b1182b9f54185d + category: main + optional: false +- name: rfc3986-validator + version: 0.1.1 + manager: conda + platform: linux-64 + dependencies: + python: '' + url: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 + hash: + md5: 912a71cc01012ee38e6b90ddd561e36f + sha256: 2a5b495a1de0f60f24d8a74578ebc23b24aa53279b1ad583755f223097c41c37 + category: main + optional: false +- name: rich + version: 13.6.0 + manager: conda + platform: linux-64 + dependencies: + markdown-it-py: '>=2.2.0' + pygments: '>=2.13.0,<3.0.0' + python: '>=3.7.0' + typing_extensions: '>=4.0.0,<5.0.0' + url: https://conda.anaconda.org/conda-forge/noarch/rich-13.6.0-pyhd8ed1ab_0.conda + hash: + md5: 3ca4829f40710f581ca1d76bc907e99f + sha256: a2f8838a75ab8c2c1da0a813c7569d4f6efba0d2b5dc3a7659e2cb6d96bd8e19 + category: main + optional: false +- name: rpds-py + version: 0.13.2 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.13.2-py310hcb5633a_0.conda + hash: + md5: 99b553d12fe320478b5b094cc1558f5a + sha256: af3b44e6e9ef8b18292c4854cf07edce6051efce06b75367bf0fc0599ba2cdf7 + category: main + optional: false +- name: scikit-learn + version: 1.3.2 + manager: conda + platform: linux-64 + dependencies: + _openmp_mutex: '>=4.5' + joblib: '>=1.1.1' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + numpy: '>=1.22.4,<2.0a0' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + scipy: '' + threadpoolctl: '>=2.0.0' + url: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.3.2-py310h1fdf081_2.conda + hash: + md5: 38f32271177dbc006e3f2bb340e58f1e + sha256: 01f56f35f5f1bc837dc77f760c813f81c76cb7d8eadd3369a3570d4ad43b1acc + category: main + optional: false +- name: scipy + version: 1.10.1 + manager: conda + platform: linux-64 + dependencies: + libblas: '>=3.9.0,<4.0a0' + libcblas: '>=3.9.0,<4.0a0' + libgcc-ng: '>=12' + libgfortran-ng: '' + libgfortran5: '>=12.2.0' + liblapack: '>=3.9.0,<4.0a0' + libstdcxx-ng: '>=12' + numpy: '>=1.21.6,<2.0a0' + pooch: '' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py310ha4c1d20_3.conda + hash: + md5: 0414d57832172f3cdcf56b5f053e177d + sha256: c7beb091db82a1be2fa9dafb878695b1e8bd6d7efe7764afa457cabfea2a93d3 + category: main + optional: false +- name: screed + version: 1.1.3 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + setuptools: '' + url: https://conda.anaconda.org/conda-forge/noarch/screed-1.1.3-pyhd8ed1ab_0.conda + hash: + md5: c82df785d03d9fe27b316279d333eaec + sha256: b727ded3d0a45c600177db915bf27c7d2fa814d97abb947d6a81fc2e9bce04a5 + category: bioinformatics + optional: true +- name: seaborn-base + version: 0.13.0 + manager: conda + platform: linux-64 + dependencies: + matplotlib-base: '>=3.3,!=3.6.1' + numpy: '>=1.20,!=1.24.0' + pandas: '>=1.2' + python: '>=3.8' + scipy: '>=1.3' + url: https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.13.0-pyhd8ed1ab_0.conda + hash: + md5: 082666331726b2438986cfe33ae9a8ee + sha256: e121a15200a420ceac466b08eda87c9c4b9668ed34a421f5d5c8baeefe6b6210 + category: main + optional: false +- name: send2trash + version: 1.8.2 + manager: conda + platform: linux-64 + dependencies: + __linux: '' + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.2-pyh41d4057_0.conda + hash: + md5: ada5a17adcd10be4fc7e37e4166ba0e2 + sha256: e74d3faf51a6cc429898da0209d95b209270160f3edbf2f6d8b61a99428301cd + category: main + optional: false +- name: setuptools + version: 68.2.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda + hash: + md5: fc2166155db840c634a1291a5c35a709 + sha256: 851901b1f8f2049edb36a675f0c3f9a98e1495ef4eb214761b048c6f696a06f7 + category: main + optional: false +- name: six + version: 1.16.0 + manager: conda + platform: linux-64 + dependencies: + python: '' + url: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 + hash: + md5: e5f25f8dbc060e9a8d912e432202afc2 + sha256: a85c38227b446f42c5b90d9b642f2c0567880c15d72492d8da074a59c8f91dd6 + category: main + optional: false +- name: sniffio + version: 1.3.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.0-pyhd8ed1ab_0.tar.bz2 + hash: + md5: dd6cbc539e74cb1f430efbd4575b9303 + sha256: a3fd30754c20ddb28b777db38345ea00d958f46701f0decd6291a81c0f4eee78 + category: main + optional: false +- name: soupsieve + version: '2.5' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda + hash: + md5: 3f144b2c34f8cb5a9abd9ed23a39c561 + sha256: 54ae221033db8fbcd4998ccb07f3c3828b4d77e73b0c72b18c1d6a507059059c + category: main + optional: false +- name: sourmash-minimal + version: 4.8.4 + manager: conda + platform: linux-64 + dependencies: + bitstring: '>=3.1.9,<4' + cachetools: '>=4.2.1,<5' + cffi: '>=1.14' + deprecation: '>=2.0.6' + libgcc-ng: '>=12' + matplotlib-base: '' + numpy: '' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + scipy: '' + screed: '>=1.1.2' + url: https://conda.anaconda.org/conda-forge/linux-64/sourmash-minimal-4.8.4-py310hcb5633a_1.conda + hash: + md5: a1615dd2afdb6b973d48b2656ee57878 + sha256: 25bd7b0fd386e547c2ba92a75916cc4fdda1bcbf0f292796f8cda33d8a82f904 + category: bioinformatics + optional: true +- name: stack_data + version: 0.6.2 + manager: conda + platform: linux-64 + dependencies: + asttokens: '' + executing: '' + pure_eval: '' + python: '>=3.5' + url: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda + hash: + md5: e7df0fdd404616638df5ece6e69ba7af + sha256: a58433e75229bec39f3be50c02efbe9b7083e53a1f31d8ee247564f370191eec + category: main + optional: false +- name: sympy + version: '1.12' + manager: conda + platform: linux-64 + dependencies: + __unix: '' + gmpy2: '>=2.0.8' + mpmath: '>=0.19' + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/sympy-1.12-pypyh9d50eac_103.conda + hash: + md5: 2f7d6347d7acf6edf1ac7f2189f44c8f + sha256: 0025dd4e6411423903bf478d1b9fbff0cbbbe546f51c9375dfd6729ef2e1a1ac + category: main + optional: false +- name: tbb + version: 2021.11.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libhwloc: '>=2.9.3,<2.9.4.0a0' + libstdcxx-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.11.0-h00ab1b0_0.conda + hash: + md5: fde515afbbe6e36eb4564965c20b1058 + sha256: 05f2282cb204eeb62dbc698e14475f0bbb7f1eb07081d7f5da37d61ad3c4acb3 + category: main + optional: false +- name: terminado + version: 0.18.0 + manager: conda + platform: linux-64 + dependencies: + __linux: '' + ptyprocess: '' + python: '>=3.8' + tornado: '>=6.1.0' + url: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.0-pyh0d859eb_0.conda + hash: + md5: e463f348b8b0eb62c9f7c6fbc780286c + sha256: e90139ef15ea9d75a69cd6b6302c29ed5b01c03ddfa717b71acb32b60af74269 + category: main + optional: false +- name: threadpoolctl + version: 3.2.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.2.0-pyha21a80b_0.conda + hash: + md5: 978d03388b62173b8e6f79162cf52b86 + sha256: 15e2f916fbfe3cc480160aa99eb6ba3edc183fceb234f10151d63870fdc4eccd + category: workflows + optional: true +- name: tinycss2 + version: 1.2.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.5' + webencodings: '>=0.4' + url: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.2.1-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 7234c9eefff659501cd2fe0d2ede4d48 + sha256: f0db1a2298a5e10e30f4b947566c7229442834702f549dded40a73ecdea7502d + category: main + optional: false +- name: tk + version: 8.6.13 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libzlib: '>=1.2.13,<1.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda + hash: + md5: d453b98d9c83e71da0741bb0ff4d76bc + sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e + category: main + optional: false +- name: tomli + version: 2.0.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 5844808ffab9ebdb694585b50ba02a96 + sha256: 4cd48aba7cd026d17e86886af48d0d2ebc67ed36f87f6534f4b67138f5a5a58f + category: main + optional: false +- name: torchtriton + version: 2.1.0 + manager: conda + platform: linux-64 + dependencies: + filelock: '' + python: '>=3.10,<3.11.0a0' + pytorch: '' + url: https://conda.anaconda.org/pytorch/linux-64/torchtriton-2.1.0-py310.tar.bz2 + hash: + md5: 5ef34d1428f3ea5dbe520c67d74048a5 + sha256: 9a98f884ba4a34c9fac59127e5f7930441ecdcc8628e135fe8d0f480540d62b6 + category: main + optional: false +- name: torchvision + version: 0.16.0 + manager: conda + platform: linux-64 + dependencies: + ffmpeg: '>=4.2' + libjpeg-turbo: '' + libpng: '' + numpy: '>=1.11' + pillow: '>=5.3.0,!=8.3.*' + python: '>=3.10,<3.11.0a0' + pytorch: 2.1.0 + pytorch-cuda: 12.1.* + pytorch-mutex: '1.0' + requests: '' + url: https://conda.anaconda.org/pytorch/linux-64/torchvision-0.16.0-py310_cu121.tar.bz2 + hash: + md5: 6d87395e8d85698147a4a85aebf57b24 + sha256: e133360a49738845b4939a317ce7d248c8e318d3be6d6698f4ed5a2ac986792a + category: main + optional: false +- name: tornado + version: 6.3.3 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_1.conda + hash: + md5: b23e0147fa5f7a9380e06334c7266ad5 + sha256: 209b6788b81739d3cdc2f04ad3f6f323efd85b1a30f2edce98ab76d98079fac8 + category: main + optional: false +- name: traitlets + version: 5.14.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.0-pyhd8ed1ab_0.conda + hash: + md5: 886f4a84ddb49b943b1697ac314e85b3 + sha256: c32412029033264140926be474d327d7fd57c0d11db9b1745396b3d4db78a799 + category: main + optional: false +- name: types-python-dateutil + version: 2.8.19.14 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.8.19.14-pyhd8ed1ab_0.conda + hash: + md5: 4df15c51a543e806d439490b862be1c6 + sha256: 7b0129c72d371fa7a06ed5dd1d701844c20d03bb4641a38a88a982b347d087e2 + category: main + optional: false +- name: typing-extensions + version: 4.9.0 + manager: conda + platform: linux-64 + dependencies: + typing_extensions: 4.9.0 + url: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.9.0-hd8ed1ab_0.conda + hash: + md5: c16524c1b7227dc80b36b4fa6f77cc86 + sha256: d795c1eb1db4ea147f01ece74e5a504d7c2e8d5ee8c11ec987884967dd938f9c + category: main + optional: false +- name: typing_extensions + version: 4.9.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.9.0-pyha770c72_0.conda + hash: + md5: a92a6440c3fe7052d63244f3aba2a4a7 + sha256: f3c5be8673bfd905c4665efcb27fa50192f24f84fa8eff2f19cba5d09753d905 + category: main + optional: false +- name: typing_utils + version: 0.1.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6.1' + url: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 + hash: + md5: eb67e3cace64c66233e2d35949e20f92 + sha256: 9e3758b620397f56fb709f796969de436d63b7117897159619b87938e1f78739 + category: main + optional: false +- name: tzdata + version: 2023c + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda + hash: + md5: 939e3e74d8be4dac89ce83b20de2492a + sha256: 0449138224adfa125b220154408419ec37c06b0b49f63c5954724325903ecf55 + category: main + optional: false +- name: unicodedata2 + version: 15.1.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + python: '>=3.10,<3.11.0a0' + python_abi: 3.10.* + url: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310h2372a71_0.conda + hash: + md5: 72637c58d36d9475fda24700c9796f19 + sha256: 5ab2f2d4542ba0cc27d222c08ae61706babe7173b0c6dfa748aa37ff2fa9d824 + category: main + optional: false +- name: uri-template + version: 1.3.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda + hash: + md5: 0944dc65cb4a9b5b68522c3bb585d41c + sha256: b76904b53721dc88a46352324c79d2b077c2f74a9f7208ad2c4249892669ae94 + category: main + optional: false +- name: urllib3 + version: 2.1.0 + manager: conda + platform: linux-64 + dependencies: + brotli-python: '>=1.0.9' + pysocks: '>=1.5.6,<2.0,!=1.5.7' + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda + hash: + md5: f8ced8ee63830dec7ecc1be048d1470a + sha256: eff5029820b4eaeab3a291a39854a6cd8fc8c4216264087f68c2d8d59822c869 + category: main + optional: false +- name: wcwidth + version: 0.2.12 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.12-pyhd8ed1ab_0.conda + hash: + md5: bf4a1d1a97ca27b0b65bacd9e238b484 + sha256: ca757d0fc2dbd422af9d3238a8b4b630a6e11df3707a447bd89540656770d1d7 + category: main + optional: false +- name: webcolors + version: '1.13' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.5' + url: https://conda.anaconda.org/conda-forge/noarch/webcolors-1.13-pyhd8ed1ab_0.conda + hash: + md5: 166212fe82dad8735550030488a01d03 + sha256: 6e097d5fe92849ad3af2c2a313771ad2fbf1cadd4dc4afd552303b2bf3f85211 + category: main + optional: false +- name: webencodings + version: 0.5.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=2.6' + url: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda + hash: + md5: daf5160ff9cde3a468556965329085b9 + sha256: 2adf9bd5482802837bc8814cbe28d7b2a4cbd2e2c52e381329eaa283b3ed1944 + category: main + optional: false +- name: websocket-client + version: 1.7.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.7.0-pyhd8ed1ab_0.conda + hash: + md5: 50ad31e07d706aae88b14a4ac9c73f23 + sha256: d9b537d5b7c5aa7a02a4ce4c6b755e458bd8083b67752a73c92d113ccec6c10f + category: main + optional: false +- name: wheel + version: 0.42.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda + hash: + md5: 1cdea58981c5cbc17b51973bcaddcea7 + sha256: 80be0ccc815ce22f80c141013302839b0ed938a2edb50b846cf48d8a8c1cfa01 + category: main + optional: false +- name: xorg-libxau + version: 1.0.11 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda + hash: + md5: 2c80dc38fface310c9bd81b17037fee5 + sha256: 309751371d525ce50af7c87811b435c176915239fc9e132b99a25d5e1703f2d4 + category: main + optional: false +- name: xorg-libxdmcp + version: 1.1.3 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=9.3.0' + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 + hash: + md5: be93aabceefa2fac576e971aef407908 + sha256: 4df7c5ee11b8686d3453e7f3f4aa20ceef441262b49860733066c52cfd0e4a77 + category: main + optional: false +- name: xz + version: 5.2.6 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 + hash: + md5: 2161070d867d1b1204ea749c8eec4ef0 + sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162 + category: main + optional: false +- name: yaml + version: 0.2.5 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=9.4.0' + url: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 + hash: + md5: 4cb3ad778ec2d5a7acbdf254eb1c42ae + sha256: a4e34c710eeb26945bdbdaba82d3d74f60a78f54a874ec10d373811a5d217535 + category: main + optional: false +- name: zeromq + version: 4.3.5 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libsodium: '>=1.0.18,<1.0.19.0a0' + libstdcxx-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h59595ed_0.conda + hash: + md5: 8851084c192dbc56215ac4e3c9aa30fa + sha256: 53bf2a18224406e9806adb3b270a2c8a028aca0c89bd40114a85d6446f5c98d1 + category: main + optional: false +- name: zipp + version: 3.17.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda + hash: + md5: 2e4d6bc0b14e10f895fc6791a7d9b26a + sha256: bced1423fdbf77bca0a735187d05d9b9812d2163f60ab426fc10f11f92ecbe26 + category: main + optional: false +- name: zlib + version: 1.2.13 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libzlib: 1.2.13 + url: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda + hash: + md5: 68c34ec6149623be41a1933ab996a209 + sha256: 9887a04d7e7cb14bd2b52fa01858f05a6d7f002c890f618d9fcd864adbfecb1b + category: main + optional: false +- name: zstd + version: 1.5.5 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + libzlib: '>=1.2.13,<1.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda + hash: + md5: 04b88013080254850d6c01ed54810589 + sha256: 607cbeb1a533be98ba96cf5cdf0ddbb101c78019f1fda063261871dad6248609 + category: main + optional: false +- name: adlfs + version: 2023.10.0 + manager: pip + platform: linux-64 + dependencies: + azure-core: '>=1.23.1,<2.0.0' + azure-datalake-store: '>=0.0.46,<0.1' + azure-identity: '*' + azure-storage-blob: '>=12.12.0' + fsspec: '>=2023.9.0' + aiohttp: '>=3.7.0' + url: https://files.pythonhosted.org/packages/b3/17/4c4a670b25d9b1ca80f72f97c72fd0759a1e5f6cbd5e91fe9ff4c28f6c96/adlfs-2023.10.0-py3-none-any.whl + hash: + sha256: dfdc8cc782bd78262435fb1bc2a8cfdbdd80342bb1b1ae9dfff968de912b0b09 + category: main + optional: false +- name: aiobotocore + version: 2.5.4 + manager: pip + platform: linux-64 + dependencies: + botocore: '>=1.31.17,<1.31.18' + aiohttp: '>=3.3.1,<4.0.0' + wrapt: '>=1.10.10,<2.0.0' + aioitertools: '>=0.5.1,<1.0.0' + url: https://files.pythonhosted.org/packages/20/00/01780c5fa93e3feb6d776ac8c7bd05dbe9290165636c13edcbdde6853537/aiobotocore-2.5.4-py3-none-any.whl + hash: + sha256: 4b32218728ca3d0be83835b604603a0cd6c329066e884bb78149334267f92440 + category: main + optional: false +- name: aiohttp + version: 3.9.1 + manager: pip + platform: linux-64 + dependencies: + attrs: '>=17.3.0' + multidict: '>=4.5,<7.0' + yarl: '>=1.0,<2.0' + frozenlist: '>=1.1.1' + aiosignal: '>=1.1.2' + async-timeout: '>=4.0,<5.0' + url: https://files.pythonhosted.org/packages/2f/16/50441c4baa39e5426181c6f630203ab65029f9a9c55d0a1019a31c26d702/aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f + category: main + optional: false +- name: aioitertools + version: 0.11.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/45/66/d1a9fd8e6ff88f2157cb145dd054defb0fd7fe2507fe5a01347e7c690eab/aioitertools-0.11.0-py3-none-any.whl + hash: + sha256: 04b95e3dab25b449def24d7df809411c10e62aab0cbe31a50ca4e68748c43394 + category: main + optional: false +- name: aiosignal + version: 1.3.1 + manager: pip + platform: linux-64 + dependencies: + frozenlist: '>=1.1.0' + url: https://files.pythonhosted.org/packages/76/ac/a7305707cb852b7e16ff80eaf5692309bde30e2b1100a1fcacdc8f731d97/aiosignal-1.3.1-py3-none-any.whl + hash: + sha256: f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17 + category: main + optional: false +- name: antlr4-python3-runtime + version: 4.9.3 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/3e/38/7859ff46355f76f8d19459005ca000b6e7012f2f1ca597746cbcd1fbfe5e/antlr4-python3-runtime-4.9.3.tar.gz + hash: + sha256: f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b + category: main + optional: false +- name: appdirs + version: 1.4.4 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl + hash: + sha256: a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 + category: main + optional: false +- name: async-timeout + version: 4.0.3 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/a7/fa/e01228c2938de91d47b307831c62ab9e4001e747789d0b05baf779a6488c/async_timeout-4.0.3-py3-none-any.whl + hash: + sha256: 7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028 + category: main + optional: false +- name: azure-core + version: 1.29.5 + manager: pip + platform: linux-64 + dependencies: + requests: '>=2.18.4' + six: '>=1.11.0' + typing-extensions: '>=4.6.0' + url: https://files.pythonhosted.org/packages/9c/f8/1cf23a75cb8c2755c539ac967f3a7f607887c4979d073808134803720f0f/azure_core-1.29.5-py3-none-any.whl + hash: + sha256: 0fa04b7b1f7d44a4fb8468c4093deb2ea01fdf4faddbf802ed9205615f99d68c + category: main + optional: false +- name: azure-datalake-store + version: 0.0.53 + manager: pip + platform: linux-64 + dependencies: + cffi: '*' + msal: '>=1.16.0,<2' + requests: '>=2.20.0' + url: https://files.pythonhosted.org/packages/88/2a/75f56b14f115189155cf12e46b366ad1fe3357af5a1a7c09f7446662d617/azure_datalake_store-0.0.53-py2.py3-none-any.whl + hash: + sha256: a30c902a6e360aa47d7f69f086b426729784e71c536f330b691647a51dc42b2b + category: main + optional: false +- name: azure-identity + version: 1.15.0 + manager: pip + platform: linux-64 + dependencies: + azure-core: '>=1.23.0,<2.0.0' + cryptography: '>=2.5' + msal: '>=1.24.0,<2.0.0' + msal-extensions: '>=0.3.0,<2.0.0' + url: https://files.pythonhosted.org/packages/30/10/5dbf755b368d10a28d55b06ac1f12512a13e88874a23db82defdea9a8cd9/azure_identity-1.15.0-py3-none-any.whl + hash: + sha256: a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912 + category: main + optional: false +- name: azure-storage-blob + version: 12.19.0 + manager: pip + platform: linux-64 + dependencies: + azure-core: '>=1.28.0,<2.0.0' + cryptography: '>=2.1.4' + typing-extensions: '>=4.3.0' + isodate: '>=0.6.1' + url: https://files.pythonhosted.org/packages/f6/82/24b0d7cf67ea63af86f11092756b8fe2adc1d55323241dc4107f5f5748e2/azure_storage_blob-12.19.0-py3-none-any.whl + hash: + sha256: 7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b + category: main + optional: false +- name: binaryornot + version: 0.4.4 + manager: pip + platform: linux-64 + dependencies: + chardet: '>=3.0.2' + url: https://files.pythonhosted.org/packages/24/7e/f7b6f453e6481d1e233540262ccbfcf89adcd43606f44a028d7f5fae5eb2/binaryornot-0.4.4-py2.py3-none-any.whl + hash: + sha256: b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4 + category: main + optional: false +- name: biofluff + version: 3.0.4 + manager: pip + platform: linux-64 + dependencies: + pysam: '*' + htseq: '*' + numpy: '*' + scipy: '*' + scikit-learn: '*' + matplotlib: '*' + palettable: '*' + pybedtools: '*' + pybigwig: '*' + url: https://files.pythonhosted.org/packages/72/a4/be0044b5fae8f93f1ab15e790e5b6ec5fa19e874a8dc6201787a2fb5a38c/biofluff-3.0.4.tar.gz + hash: + sha256: ef7b0a54103a830f197f21aa3d1ade8bdcddf613b437ea38c95260bb45324d6b + category: main + optional: false +- name: biopython + version: '1.81' + manager: pip + platform: linux-64 + dependencies: + numpy: '*' + url: https://files.pythonhosted.org/packages/65/51/05d02d400ee0b45fc7f01ab71d9db0e06cfa8af891fd88557d3e489fd97e/biopython-1.81-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 6ebfbce0d91796c7aef422ee9dffe8827e07e5abaa94545e006f1f20e965c80b + category: bioinformatics + optional: true +- name: biothings-client + version: 0.3.1 + manager: pip + platform: linux-64 + dependencies: + requests: '>=2.3.0' + url: https://files.pythonhosted.org/packages/c2/21/6c4bdb8ba8d2cdeb5ac3a6460ab1cbd841e46cd851d6b00028b327c5deb3/biothings_client-0.3.1-py2.py3-none-any.whl + hash: + sha256: c08437f652d9282da785e098288ef7cf3aa2a79f5d90c480eadfce96b846013e + category: main + optional: false +- name: botocore + version: 1.31.17 + manager: pip + platform: linux-64 + dependencies: + jmespath: '>=0.7.1,<2.0.0' + python-dateutil: '>=2.1,<3.0.0' + urllib3: '>=1.25.4,<1.27' + url: https://files.pythonhosted.org/packages/3d/e5/32a88f5a95e3d43c2e3ed86fc1ffdb715547a04f95a51d00e1185af63b0c/botocore-1.31.17-py3-none-any.whl + hash: + sha256: 6ac34a1d34aa3750e78b77b8596617e2bab938964694d651939dba2cbde2c12b + category: main + optional: false +- name: cachetools + version: 5.3.2 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/a2/91/2d843adb9fbd911e0da45fbf6f18ca89d07a087c3daa23e955584f90ebf4/cachetools-5.3.2-py3-none-any.whl + hash: + sha256: 861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 + category: main + optional: false +- name: chardet + version: 5.2.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl + hash: + sha256: e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970 + category: main + optional: false +- name: cloudpickle + version: 3.0.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl + hash: + sha256: 246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7 + category: main + optional: false +- name: colorama + version: 0.4.6 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + hash: + sha256: 4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + category: bioinformatics + optional: true +- name: configparser + version: 6.0.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/81/a3/0e5ed11da4b7770c15f6f319abf053f46b5a06c7d4273c48469b7899bd89/configparser-6.0.0-py3-none-any.whl + hash: + sha256: 900ea2bb01b2540b1a644ad3d5351e9b961a4a012d4732f619375fb8f641ee19 + category: main + optional: false +- name: cookiecutter + version: 2.5.0 + manager: pip + platform: linux-64 + dependencies: + binaryornot: '>=0.4.4' + jinja2: '>=2.7,<4.0.0' + click: '>=7.0,<9.0.0' + pyyaml: '>=5.3.1' + python-slugify: '>=4.0.0' + requests: '>=2.23.0' + arrow: '*' + rich: '*' + url: https://files.pythonhosted.org/packages/d1/32/91af80d3ecbe0113ae26cdaa623a3def2319a02c1eafe91733ac45133afd/cookiecutter-2.5.0-py3-none-any.whl + hash: + sha256: 8aa2f12ed11bc05628651e9dc4353a10571dd9908aaaaeec959a2b9ea465a5d2 + category: main + optional: false +- name: croniter + version: 2.0.1 + manager: pip + platform: linux-64 + dependencies: + python-dateutil: '*' + pytz: '>2021.1' + url: https://files.pythonhosted.org/packages/27/86/c1ce37af79385a106aacdd40b6e25ff25b2d888f2e64a05ee4e5b05f5768/croniter-2.0.1-py2.py3-none-any.whl + hash: + sha256: 4cb064ce2d8f695b3b078be36ff50115cf8ac306c10a7e8653ee2a5b534673d7 + category: main + optional: false +- name: dataclasses-json + version: 0.5.9 + manager: pip + platform: linux-64 + dependencies: + marshmallow: '>=3.3.0,<4.0.0' + marshmallow-enum: '>=1.5.1,<2.0.0' + typing-inspect: '>=0.4.0' + url: https://files.pythonhosted.org/packages/eb/04/2851f9fe4b01b5b752c16e41d581f6b9d0ca82e388d7bd58357d758fc6ce/dataclasses_json-0.5.9-py3-none-any.whl + hash: + sha256: 1280542631df1c375b7bc92e5b86d39e06c44760d7e3571a537b3b8acabf2f0c + category: workflows + optional: true +- name: diskcache + version: 5.6.3 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl + hash: + sha256: 5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19 + category: main + optional: false +- name: docker + version: 6.1.3 + manager: pip + platform: linux-64 + dependencies: + packaging: '>=14.0' + requests: '>=2.26.0' + urllib3: '>=1.26.0' + websocket-client: '>=0.32.0' + url: https://files.pythonhosted.org/packages/db/be/3032490fa33b36ddc8c4b1da3252c6f974e7133f1a50de00c6b85cca203a/docker-6.1.3-py3-none-any.whl + hash: + sha256: aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9 + category: main + optional: false +- name: docker-pycreds + version: 0.4.0 + manager: pip + platform: linux-64 + dependencies: + six: '>=1.4.0' + url: https://files.pythonhosted.org/packages/f5/e8/f6bd1eee09314e7e6dee49cbe2c5e22314ccdb38db16c9fc72d2fa80d054/docker_pycreds-0.4.0-py2.py3-none-any.whl + hash: + sha256: 7266112468627868005106ec19cd0d722702d2b7d5912a28e19b826c3d37af49 + category: main + optional: false +- name: docstring-parser + version: '0.15' + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/89/e3/32e272db7adcf90e93f73e9a98fd763049ed7c641fb57ab26cb8f3e7e79c/docstring_parser-0.15-py3-none-any.whl + hash: + sha256: d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9 + category: main + optional: false +- name: feather-format + version: 0.4.1 + manager: pip + platform: linux-64 + dependencies: + pyarrow: '>=0.4.0' + url: https://files.pythonhosted.org/packages/67/e8/ee99f142f19d35588501943510f8217f9dd77184574b0c933c53218e0f19/feather-format-0.4.1.tar.gz + hash: + sha256: 45f67e3745d394d4f160ca6d636bbfd4f8b68d01199dc1649b6e487d3e878903 + category: main + optional: false +- name: flyteidl + version: 1.10.0 + manager: pip + platform: linux-64 + dependencies: + googleapis-common-protos: '*' + protoc-gen-swagger: '*' + protobuf: '>=4.21.1,<5.0.0' + url: https://files.pythonhosted.org/packages/11/e0/04ad067ad1c19223016612f27bacb1d6d1fc309b2cd3a83823d7d47a5e5f/flyteidl-1.10.0-py3-none-any.whl + hash: + sha256: 21d9fcc21217e95fd940964e704cb11e74ce707d625dd954a54e5e7c39143db1 + category: main + optional: false +- name: flytekit + version: 0.0.0+develop + manager: pip + platform: linux-64 + dependencies: + adlfs: '*' + click: '>=6.6,<9.0' + cloudpickle: '>=2.0.0' + cookiecutter: '>=1.7.3' + croniter: '>=0.3.20,<4.0.0' + dataclasses-json: '>=0.5.2,<0.5.12' + diskcache: '>=5.2.1' + docker: '>=4.0.0,<7.0.0' + docstring-parser: '>=0.9.0' + flyteidl: '>=1.10.0' + fsspec: '>=2023.3.0,<=2023.9.2' + gcsfs: '*' + googleapis-common-protos: '>=1.57' + grpcio: '*' + grpcio-status: '*' + importlib-metadata: '*' + joblib: '*' + jsonpickle: '*' + keyring: '>=18.0.1' + kubernetes: '>=12.0.1' + marshmallow-enum: '*' + marshmallow-jsonschema: '>=0.12.0' + mashumaro: '>=3.9.1' + numpy: '*' + pandas: '>=1.0.0,<2.0.0' + protobuf: <4.25.0 + pyarrow: '>=4.0.0' + python-json-logger: '>=2.0.0' + pytimeparse: '>=1.1.8,<2.0.0' + pyyaml: <5.4.0 || >5.4.0,<5.4.1 || >5.4.1,<6.0.0 || >6.0.0 + requests: '>=2.18.4,<3.0.0' + rich: '*' + rich-click: '*' + s3fs: '>=0.6.0' + statsd: '>=3.0.0,<4.0.0' + typing-extensions: '*' + urllib3: '>=1.22,<2.0.0' + url: git+https://github.com/cameronraysmith/flytekit@9c8481ec30ad13707c1ebe8e13c9cb6962e75276 + hash: + sha256: 9c8481ec30ad13707c1ebe8e13c9cb6962e75276 + category: main + source: + type: url + url: git+https://github.com/cameronraysmith/flytekit@9c8481ec30ad13707c1ebe8e13c9cb6962e75276 + optional: false +- name: frozenlist + version: 1.4.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/1e/28/74b8b6451c89c070d34e753d8b65a1e4ce508a6808b18529f36e8c0e2184/frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300 + category: main + optional: false +- name: gcsfs + version: 2023.9.2 + manager: pip + platform: linux-64 + dependencies: + aiohttp: <4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1 + decorator: '>4.1.2' + fsspec: 2023.9.2 + google-auth: '>=1.2' + google-auth-oauthlib: '*' + google-cloud-storage: '*' + requests: '*' + url: https://files.pythonhosted.org/packages/5e/36/30df95bf7ea5dc8538276c51f5c4a8a88d3dad397b848084dcc3c2ab31ae/gcsfs-2023.9.2-py2.py3-none-any.whl + hash: + sha256: b3e61d07b0ecf3e04627b0cc0df30ee728bc49e31d42de180815601041e62c1b + category: main + optional: false +- name: genomepy + version: 0.16.1 + manager: pip + platform: linux-64 + dependencies: + appdirs: '*' + biopython: '>=1.73' + click: '*' + colorama: '*' + diskcache: '*' + filelock: '>=3.5' + loguru: '*' + mygene: '*' + mysql-connector-python: '*' + norns: '>=0.1.6' + numpy: '*' + pandas: '*' + pyfaidx: '>=0.7.2.1' + requests: '*' + tqdm: '>=4.51' + url: https://files.pythonhosted.org/packages/ae/35/6643405d6ff16684d5a25391c34ec9d09fc7169c19b081c4e3858d20b93f/genomepy-0.16.1-py3-none-any.whl + hash: + sha256: 820d46bce1503f66aa82e795a9a33e53a89e4d4f3f79b5c105ae452164f47635 + category: bioinformatics + optional: true +- name: gimmemotifs + version: 0+untagged.1965.g7451274 + manager: pip + platform: linux-64 + dependencies: + biofluff: '>=3.0.4' + configparser: '*' + diskcache: '*' + feather-format: '*' + genomepy: '>=0.13.0' + iteround: '*' + jinja2: '*' + logomaker: '*' + loguru: '*' + matplotlib: '>=3.3' + numpy: '>=1.18' + pandas: '>=1.3.0' + pybedtools: '>=0.9.0' + pysam: '>=0.16' + xxhash: '*' + qnorm: '>=0.8.1' + scikit-learn: '>=0.23.2' + scipy: '>=1.5' + seaborn: '>=0.10.1' + statsmodels: '*' + tqdm: '>=4.46.1' + xdg: '*' + url: git+https://github.com/cameronraysmith/gimmemotifs@74512747f54102500a248d56db5b469fc75961bc + hash: + sha256: 74512747f54102500a248d56db5b469fc75961bc + category: main + source: + type: url + url: git+https://github.com/cameronraysmith/gimmemotifs@74512747f54102500a248d56db5b469fc75961bc + optional: false +- name: gitdb + version: 4.0.11 + manager: pip + platform: linux-64 + dependencies: + smmap: '>=3.0.1,<6' + url: https://files.pythonhosted.org/packages/fd/5b/8f0c4a5bb9fd491c277c21eff7ccae71b47d43c4446c9d0c6cff2fe8c2c4/gitdb-4.0.11-py3-none-any.whl + hash: + sha256: 81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4 + category: main + optional: false +- name: gitpython + version: 3.1.40 + manager: pip + platform: linux-64 + dependencies: + gitdb: '>=4.0.1,<5' + url: https://files.pythonhosted.org/packages/8d/c4/82b858fb6483dfb5e338123c154d19c043305b01726a67d89532b8f8f01b/GitPython-3.1.40-py3-none-any.whl + hash: + sha256: cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a + category: main + optional: false +- name: google-api-core + version: 2.14.0 + manager: pip + platform: linux-64 + dependencies: + googleapis-common-protos: '>=1.56.2,<2.0.dev0' + protobuf: '>=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 + || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 + || >4.21.5,<5.0.0.dev0' + google-auth: '>=2.14.1,<3.0.dev0' + requests: '>=2.18.0,<3.0.0.dev0' + url: https://files.pythonhosted.org/packages/c4/1e/924dcad4725d2e697888e044edf7a433db84bf9a3e40d3efa38ba859d0ce/google_api_core-2.14.0-py3-none-any.whl + hash: + sha256: de2fb50ed34d47ddbb2bd2dcf680ee8fead46279f4ed6b16de362aca23a18952 + category: main + optional: false +- name: google-auth + version: 2.24.0 + manager: pip + platform: linux-64 + dependencies: + cachetools: '>=2.0.0,<6.0' + pyasn1-modules: '>=0.2.1' + rsa: '>=3.1.4,<5' + url: https://files.pythonhosted.org/packages/ca/7e/2d41727aeba37b84e1ca515fbb2ca0d706c591ca946236466ffe575b2059/google_auth-2.24.0-py2.py3-none-any.whl + hash: + sha256: 9b82d5c8d3479a5391ea0a46d81cca698d328459da31d4a459d4e901a5d927e0 + category: main + optional: false +- name: google-auth-oauthlib + version: 1.1.0 + manager: pip + platform: linux-64 + dependencies: + google-auth: '>=2.15.0' + requests-oauthlib: '>=0.7.0' + url: https://files.pythonhosted.org/packages/ce/33/a907b4b67245647746dde8d61e1643ef5d210c88e090d491efd89eff9f95/google_auth_oauthlib-1.1.0-py2.py3-none-any.whl + hash: + sha256: 089c6e587d36f4803ac7e0720c045c6a8b1fd1790088b8424975b90d0ee61c12 + category: main + optional: false +- name: google-cloud-core + version: 2.3.3 + manager: pip + platform: linux-64 + dependencies: + google-api-core: '>=1.31.6,<2.0.0 || >2.3.0,<3.0.0dev' + google-auth: '>=1.25.0,<3.0dev' + url: https://files.pythonhosted.org/packages/a2/40/02045f776fdb6e44194f34b6375a26ce8a61bd9bd03cd8930ed91cf51a62/google_cloud_core-2.3.3-py2.py3-none-any.whl + hash: + sha256: fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 + category: main + optional: false +- name: google-cloud-storage + version: 2.13.0 + manager: pip + platform: linux-64 + dependencies: + google-auth: '>=2.23.3,<3.0dev' + google-api-core: '>=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev' + google-cloud-core: '>=2.3.0,<3.0dev' + google-resumable-media: '>=2.6.0' + requests: '>=2.18.0,<3.0.0dev' + google-crc32c: '>=1.0,<2.0dev' + url: https://files.pythonhosted.org/packages/04/72/71b1b531cefa1daff8f6a2a70b4d4fa18dd4da851b5486d53578811b0838/google_cloud_storage-2.13.0-py2.py3-none-any.whl + hash: + sha256: ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d + category: main + optional: false +- name: google-crc32c + version: 1.5.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/a9/d0/04f2846f0af1c683eb3b664c9de9543da1e66a791397456a65073b6054a2/google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 + category: main + optional: false +- name: google-resumable-media + version: 2.6.0 + manager: pip + platform: linux-64 + dependencies: + google-crc32c: '>=1.0,<2.0dev' + url: https://files.pythonhosted.org/packages/c7/4f/b8e5e22406e5aeafa46df8799939d5eeee52f18eeed339675167fac6603a/google_resumable_media-2.6.0-py2.py3-none-any.whl + hash: + sha256: fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b + category: main + optional: false +- name: googleapis-common-protos + version: 1.61.0 + manager: pip + platform: linux-64 + dependencies: + protobuf: '>=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 + || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0' + url: https://files.pythonhosted.org/packages/21/49/12996dc0238e017504dceea1d121a48bd49fb3f4416f40d59fc3e924b4f3/googleapis_common_protos-1.61.0-py2.py3-none-any.whl + hash: + sha256: 22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 + category: main + optional: false +- name: grpcio + version: 1.59.3 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/d8/d0/0c42b56820f399f9bbcb4441fba1d4e52af3f11fa51c40c553fbd404aa1a/grpcio-1.59.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 60cddafb70f9a2c81ba251b53b4007e07cca7389e704f86266e22c4bffd8bf1d + category: main + optional: false +- name: grpcio-status + version: 1.59.3 + manager: pip + platform: linux-64 + dependencies: + protobuf: '>=4.21.6' + grpcio: '>=1.59.3' + googleapis-common-protos: '>=1.5.5' + url: https://files.pythonhosted.org/packages/0f/31/9f87b4d6a5a03c92bab47d54bf516b7196667441e86550280178714bdb28/grpcio_status-1.59.3-py3-none-any.whl + hash: + sha256: 2fd2eb39ca4e9afb3c874c0878ff75b258db0b7dcc25570fc521f16ae0ab942a + category: main + optional: false +- name: gtfparse + version: 1.3.0 + manager: pip + platform: linux-64 + dependencies: + numpy: '>=1.7' + pandas: '>=0.15' + url: https://files.pythonhosted.org/packages/f5/bb/f97d06b60f32e30b7ba25336f0886c24b13855d7ca8642200e4d70382a45/gtfparse-1.3.0.tar.gz + hash: + sha256: d957f18e5f70413f89a28ef83068c461b6407eb38fd30e99b8da3d69143527b1 + category: bioinformatics + optional: true +- name: htseq + version: 2.0.5 + manager: pip + platform: linux-64 + dependencies: + numpy: '*' + pysam: '*' + url: https://files.pythonhosted.org/packages/5f/a1/f47c2f7f93e2714f7bcd8ad4b5a8073140af0ec7527b0383f2b78fda28f0/HTSeq-2.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: a0441e9f5cc89828c71d9ecb2c3e8e653b0e9ec967c43958103891dafc4d2df0 + category: bioinformatics + optional: true +- name: hydra-core + version: 1.3.2 + manager: pip + platform: linux-64 + dependencies: + omegaconf: '>=2.2,<2.4' + antlr4-python3-runtime: '>=4.9.0,<4.10.0' + packaging: '*' + url: https://files.pythonhosted.org/packages/c6/50/e0edd38dcd63fb26a8547f13d28f7a008bc4a3fd4eb4ff030673f22ad41a/hydra_core-1.3.2-py3-none-any.whl + hash: + sha256: fa0238a9e31df3373b35b0bfb672c34cc92718d21f81311d8996a16de1141d8b + category: workflows + optional: true +- name: hydra-joblib-launcher + version: 1.2.0 + manager: pip + platform: linux-64 + dependencies: + hydra-core: '>=1.1.0.dev7' + joblib: '>=0.14.0' + url: https://files.pythonhosted.org/packages/bd/50/40613fdf65f9227bc0b073c1ee9db2e2298f7b6cd248e8b4825eedc292eb/hydra_joblib_launcher-1.2.0-py3-none-any.whl + hash: + sha256: 57bfd042b015056157297de93e8ec1c6bc75fd39bd3b300e1599db0c5d992eee + category: workflows + optional: true +- name: hydra-zen + version: 0.11.0 + manager: pip + platform: linux-64 + dependencies: + hydra-core: '>=1.2.0' + omegaconf: '>=2.2.1' + typing-extensions: '>=4.1.0,<4.6.0 || >4.6.0' + url: https://files.pythonhosted.org/packages/fd/0a/a8270cf8a00eef614aacc257b60af208f7431c27a093c1f1dee8b475c185/hydra_zen-0.11.0-py3-none-any.whl + hash: + sha256: a99a87ec5ae758f57d43c35ae20e172d849e67c0b916c2f91daa4f90b52dc6e5 + category: workflows + optional: true +- name: isodate + version: 0.6.1 + manager: pip + platform: linux-64 + dependencies: + six: '*' + url: https://files.pythonhosted.org/packages/b6/85/7882d311924cbcfc70b1890780763e36ff0b140c7e51c110fc59a532f087/isodate-0.6.1-py2.py3-none-any.whl + hash: + sha256: 0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 + category: main + optional: false +- name: iteround + version: 1.0.4 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/26/c7/68d920f791cd99919d82dd6db9fc0aca3790dc8d67c69b559a447ca2a914/iteround-1.0.4-py3-none-any.whl + hash: + sha256: 17947dd5479177e6fb186b0a3d5d594b55eedea14dc722c6da7e84bbed45f5b2 + category: main + optional: false +- name: jaraco.classes + version: 3.3.0 + manager: pip + platform: linux-64 + dependencies: + more-itertools: '*' + url: https://files.pythonhosted.org/packages/c7/6b/1bc8fa93ea85146e08f0e0883bc579b7c7328364ed7df90b1628dcb36e10/jaraco.classes-3.3.0-py3-none-any.whl + hash: + sha256: 10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb + category: main + optional: false +- name: jeepney + version: 0.8.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/ae/72/2a1e2290f1ab1e06f71f3d0f1646c9e4634e70e1d37491535e19266e8dc9/jeepney-0.8.0-py3-none-any.whl + hash: + sha256: c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + category: main + optional: false +- name: jmespath + version: 1.0.1 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl + hash: + sha256: 02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 + category: main + optional: false +- name: jsonpickle + version: 3.0.2 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/d3/25/6e0a450430b7aa194b0f515f64820fc619314faa289458b7dfca4a026114/jsonpickle-3.0.2-py3-none-any.whl + hash: + sha256: 4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f + category: main + optional: false +- name: keyring + version: 24.3.0 + manager: pip + platform: linux-64 + dependencies: + jaraco.classes: '*' + importlib-metadata: '>=4.11.4' + secretstorage: '>=3.2' + jeepney: '>=0.4.2' + url: https://files.pythonhosted.org/packages/e3/e9/c51071308adc273ed612cd308a4b4360ffd291da40b7de2f47c9d6e3a978/keyring-24.3.0-py3-none-any.whl + hash: + sha256: 4446d35d636e6a10b8bce7caa66913dd9eca5fd222ca03a3d42c38608ac30836 + category: main + optional: false +- name: kubernetes + version: 28.1.0 + manager: pip + platform: linux-64 + dependencies: + certifi: '>=14.05.14' + six: '>=1.9.0' + python-dateutil: '>=2.5.3' + pyyaml: '>=5.4.1' + google-auth: '>=1.0.1' + websocket-client: '>=0.32.0,<0.40.0 || >0.40.0,<0.41.0 || >=0.43.0' + requests: '*' + requests-oauthlib: '*' + oauthlib: '>=3.2.2' + urllib3: '>=1.24.2,<2.0' + url: https://files.pythonhosted.org/packages/f5/6a/1f69c2d8b1ff03f8d8e10d801f4ac3016ed4c1b00aa9795732c6ec900bba/kubernetes-28.1.0-py2.py3-none-any.whl + hash: + sha256: 10f56f8160dcb73647f15fafda268e7f60cf7dbc9f8e46d52fcd46d3beb0c18d + category: main + optional: false +- name: llvmlite + version: 0.41.1 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/57/7d/ef28d5812f852b93bd2a583d00cdcde56833d31b645ae0eaa7e71eecfb4e/llvmlite-0.41.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 0dd0338da625346538f1173a17cabf21d1e315cf387ca21b294ff209d176e244 + category: main + optional: false +- name: logomaker + version: '0.8' + manager: pip + platform: linux-64 + dependencies: + numpy: '*' + matplotlib: '*' + pandas: '*' + url: https://files.pythonhosted.org/packages/87/6d/9a9976674e77de3eab157e8c50667a7091058fa355fd7665eb1ab4b93c5a/logomaker-0.8-py2.py3-none-any.whl + hash: + sha256: 6766a0d83de4990ea859366a661ba72c580a7b73ac3c8b526204a0be7d65a50d + category: main + optional: false +- name: loguru + version: 0.7.2 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/03/0a/4f6fed21aa246c6b49b561ca55facacc2a44b87d65b8b92362a8e99ba202/loguru-0.7.2-py3-none-any.whl + hash: + sha256: 003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb + category: main + optional: false +- name: marshmallow + version: 3.20.1 + manager: pip + platform: linux-64 + dependencies: + packaging: '>=17.0' + url: https://files.pythonhosted.org/packages/ed/3c/cebfdcad015240014ff08b883d1c0c427f2ba45ae8c6572851b6ef136cad/marshmallow-3.20.1-py3-none-any.whl + hash: + sha256: 684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c + category: workflows + optional: true +- name: marshmallow-enum + version: 1.5.1 + manager: pip + platform: linux-64 + dependencies: + marshmallow: '>=2.0.0' + url: https://files.pythonhosted.org/packages/c6/59/ef3a3dc499be447098d4a89399beb869f813fee1b5a57d5d79dee2c1bf51/marshmallow_enum-1.5.1-py2.py3-none-any.whl + hash: + sha256: 57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072 + category: main + optional: false +- name: marshmallow-jsonschema + version: 0.13.0 + manager: pip + platform: linux-64 + dependencies: + marshmallow: '>=3.11' + url: https://files.pythonhosted.org/packages/f6/cf/a620a7b0a5ba2aaa52e70f95795e0cf3a7f6332a7cb432a1223b61ac654e/marshmallow_jsonschema-0.13.0-py3-none-any.whl + hash: + sha256: 2814f2afb94a6e01b3c0a5795b3dfb142b628763655f20378400af5c0a2307fb + category: main + optional: false +- name: memory-efficient-attention-pytorch + version: 0.1.6 + manager: pip + platform: linux-64 + dependencies: + einops: '>=0.4.1' + torch: '>=1.6' + url: https://files.pythonhosted.org/packages/ee/77/482a6a9abcc4b2493f5fa077b05d38c1a384643c22cd42cbdc751d3c4961/memory_efficient_attention_pytorch-0.1.6-py3-none-any.whl + hash: + sha256: efbb2676f8695b21a29d96d83f84818be257a35ac4c89f94d7d93f59819d38ed + category: main + optional: false +- name: more-itertools + version: 10.1.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/5a/cb/6dce742ea14e47d6f565589e859ad225f2a5de576d7696e0623b784e226b/more_itertools-10.1.0-py3-none-any.whl + hash: + sha256: 64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 + category: main + optional: false +- name: msal + version: 1.25.0 + manager: pip + platform: linux-64 + dependencies: + requests: '>=2.0.0,<3' + pyjwt: '>=1.0.0,<3' + cryptography: '>=0.6,<44' + url: https://files.pythonhosted.org/packages/2a/45/d80a35ce701c1b3b53ab57a585813636acba39f3a8ed87ac01e0f1dfa3c1/msal-1.25.0-py2.py3-none-any.whl + hash: + sha256: 386df621becb506bc315a713ec3d4d5b5d6163116955c7dde23622f156b81af6 + category: main + optional: false +- name: msal-extensions + version: 1.0.0 + manager: pip + platform: linux-64 + dependencies: + msal: '>=0.4.1,<2.0.0' + portalocker: '>=1.0,<3' + url: https://files.pythonhosted.org/packages/52/34/a8995d6f0fa626ff6b28dbd9c90f6c2a46bd484bc7ab343d078b0c6ff1a7/msal_extensions-1.0.0-py2.py3-none-any.whl + hash: + sha256: 91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee + category: main + optional: false +- name: multidict + version: 6.0.4 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/56/b5/ac112889bfc68e6cf4eda1e4325789b166c51c6cd29d5633e28fb2c2f966/multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93 + category: main + optional: false +- name: mygene + version: 3.2.2 + manager: pip + platform: linux-64 + dependencies: + biothings-client: '>=0.2.6' + url: https://files.pythonhosted.org/packages/a7/b7/132b1673c0ec00881d49d56c09624942fa0ebd2fc21d73d80647efa082e9/mygene-3.2.2-py2.py3-none-any.whl + hash: + sha256: 18d85d1b28ecee2be31d844607fb0c5f7d7c58573278432df819ee2a5e88fe46 + category: bioinformatics + optional: true +- name: mypy-extensions + version: 1.0.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl + hash: + sha256: 4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d + category: main + optional: false +- name: mysql-connector-python + version: 8.0.23 + manager: pip + platform: linux-64 + dependencies: + protobuf: '>=3.0.0' + url: https://files.pythonhosted.org/packages/13/62/5234fb85bf30c28c70c6c7664926bd4b40e3c1d914f6e275174801cf8b8a/mysql_connector_python-8.0.23-py2.py3-none-any.whl + hash: + sha256: c783e1dc8b78a1b1a9ebbf3ccb12d17e4513d91fafeb5b6c06a29f2d5619e285 + category: bioinformatics + optional: true +- name: norns + version: 0.1.6 + manager: pip + platform: linux-64 + dependencies: + nose: '*' + appdirs: '*' + pyyaml: '*' + url: https://files.pythonhosted.org/packages/37/0b/6eb131da9cf8dd0f7eb08add9d65330cf3a3bc2e28a5c7faf2dbcb681a99/norns-0.1.6.tar.gz + hash: + sha256: 1f3c6ccbe79b2cb3076f66a352cd76462593adbabe9ebb262f879a9d0a6634e4 + category: bioinformatics + optional: true +- name: nose + version: 1.3.7 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/15/d8/dd071918c040f50fa1cf80da16423af51ff8ce4a0f2399b7bf8de45ac3d9/nose-1.3.7-py3-none-any.whl + hash: + sha256: 9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac + category: main + optional: false +- name: numba + version: 0.58.1 + manager: pip + platform: linux-64 + dependencies: + llvmlite: '>=0.41.0dev0,<0.42' + numpy: '>=1.22,<1.27' + url: https://files.pythonhosted.org/packages/ed/13/b66627125b35f2987bd9872cf028b5e1e1ffcbc8d1e182ac4e84eed3998f/numba-0.58.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl + hash: + sha256: 4e79b6cc0d2bf064a955934a2e02bf676bc7995ab2db929dbbc62e4c16551be6 + category: main + optional: false +- name: oauthlib + version: 3.2.2 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/7e/80/cab10959dc1faead58dc8384a781dfbf93cb4d33d50988f7a69f1b7c9bbe/oauthlib-3.2.2-py3-none-any.whl + hash: + sha256: 8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca + category: main + optional: false +- name: omegaconf + version: 2.3.0 + manager: pip + platform: linux-64 + dependencies: + antlr4-python3-runtime: '>=4.9.0,<4.10.0' + pyyaml: '>=5.1.0' + url: https://files.pythonhosted.org/packages/e3/94/1843518e420fa3ed6919835845df698c7e27e183cb997394e4a670973a65/omegaconf-2.3.0-py3-none-any.whl + hash: + sha256: 7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b + category: workflows + optional: true +- name: palettable + version: 3.3.3 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/cf/f7/3367feadd4ab56783b0971c9b7edfbdd68e0c70ce877949a5dd2117ed4a0/palettable-3.3.3-py2.py3-none-any.whl + hash: + sha256: 74e9e7d7fe5a9be065e02397558ed1777b2df0b793a6f4ce1a5ee74f74fb0caa + category: main + optional: false +- name: pandas + version: 1.5.3 + manager: pip + platform: linux-64 + dependencies: + python-dateutil: '>=2.8.1' + pytz: '>=2020.1' + numpy: '>=1.21.0' + url: https://files.pythonhosted.org/packages/49/e2/79e46612dc25ebc7603dc11c560baa7266c90f9e48537ecf1a02a0dd6bff/pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23 + category: main + optional: false +- name: patsy + version: 0.5.4 + manager: pip + platform: linux-64 + dependencies: + six: '*' + numpy: '>=1.4' + url: https://files.pythonhosted.org/packages/29/ab/373449d6f741732f94e2d15d116a90f050b2857cb727b26d2f7bead50815/patsy-0.5.4-py2.py3-none-any.whl + hash: + sha256: 0486413077a527db51ddea8fa94a5234d0feb17a4f4dc01b59b6086c58a70f80 + category: main + optional: false +- name: portalocker + version: 2.8.2 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/17/9e/87671efcca80ba6203811540ed1f9c0462c1609d2281d7b7f53cef05da3d/portalocker-2.8.2-py3-none-any.whl + hash: + sha256: cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e + category: main + optional: false +- name: protobuf + version: 4.24.4 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/c8/2c/03046cac73f46bfe98fc846ef629cf4f84c2f59258216aa2cc0d22bfca8f/protobuf-4.24.4-cp37-abi3-manylinux2014_x86_64.whl + hash: + sha256: b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9 + category: main + optional: false +- name: protoc-gen-swagger + version: 0.1.0 + manager: pip + platform: linux-64 + dependencies: + protobuf: '>=3.0.0' + url: https://files.pythonhosted.org/packages/7d/7f/d8f8d81a921f07e703cabf8a0b5bb6cbc26e3bce7614db905c3c7637315a/protoc_gen_swagger-0.1.0-py2.py3-none-any.whl + hash: + sha256: cdc043da538865f055a7f22b304a35085cef269dc33e2f3408b12d397e8d8b4b + category: main + optional: false +- name: pyarrow + version: 14.0.1 + manager: pip + platform: linux-64 + dependencies: + numpy: '>=1.16.6' + url: https://files.pythonhosted.org/packages/34/65/204f7c0d507056c37b56dddb3bd60f55744f2609c0f96a5e4ca91c67c42a/pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl + hash: + sha256: 06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93 + category: main + optional: false +- name: pyasn1 + version: 0.5.1 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/d1/75/4686d2872bf2fc0b37917cbc8bbf0dd3a5cdb0990799be1b9cbf1e1eb733/pyasn1-0.5.1-py2.py3-none-any.whl + hash: + sha256: 4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58 + category: main + optional: false +- name: pyasn1-modules + version: 0.3.0 + manager: pip + platform: linux-64 + dependencies: + pyasn1: '>=0.4.6,<0.6.0' + url: https://files.pythonhosted.org/packages/cd/8e/bea464350e1b8c6ed0da3a312659cb648804a08af6cacc6435867f74f8bd/pyasn1_modules-0.3.0-py2.py3-none-any.whl + hash: + sha256: d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d + category: main + optional: false +- name: pybedtools + version: 0.9.1 + manager: pip + platform: linux-64 + dependencies: + six: '*' + pysam: '*' + numpy: '*' + url: git+https://github.com/cameronraysmith/pybedtools@9876fa25e80c7547101e662ebe1c6388579405d5 + hash: + sha256: 9876fa25e80c7547101e662ebe1c6388579405d5 + category: main + source: + type: url + url: git+https://github.com/cameronraysmith/pybedtools@9876fa25e80c7547101e662ebe1c6388579405d5 + optional: false +- name: pybigwig + version: 0.3.22 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/3b/9c/b86f24903d39531876b98cb9b2bf17a3d4fcff5fa8a17dab9971bbe920f9/pyBigWig-0.3.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 55031f67de6b117d49ba191738ea9707239bdacbd623a046e03917913257ac29 + category: main + optional: false +- name: pyfaidx + version: 0.7.2.2 + manager: pip + platform: linux-64 + dependencies: + six: '*' + importlib-metadata: '*' + url: https://files.pythonhosted.org/packages/83/bf/269e9b3a18dfda8a22a2d76decf98725f28ff930bd449f87a194625ba913/pyfaidx-0.7.2.2-py3-none-any.whl + hash: + sha256: 4e689bc09f3c5de1d2a1099d059b3b9914629c1c5c3ad08b49ff05af33392e0e + category: bioinformatics + optional: true +- name: pyjwt + version: 2.8.0 + manager: pip + platform: linux-64 + dependencies: + cryptography: '>=3.4.0' + url: https://files.pythonhosted.org/packages/2b/4f/e04a8067c7c96c364cef7ef73906504e2f40d690811c021e1a1901473a19/PyJWT-2.8.0-py3-none-any.whl + hash: + sha256: 59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 + category: main + optional: false +- name: python-slugify + version: 8.0.1 + manager: pip + platform: linux-64 + dependencies: + text-unidecode: '>=1.3' + url: https://files.pythonhosted.org/packages/b4/85/6aa722a11307ec572682023b76cad4c52cda708dfc25fcb4b4a6051da7ab/python_slugify-8.0.1-py2.py3-none-any.whl + hash: + sha256: 70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395 + category: main + optional: false +- name: pytimeparse + version: 1.1.8 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/1b/b4/afd75551a3b910abd1d922dbd45e49e5deeb4d47dc50209ce489ba9844dd/pytimeparse-1.1.8-py2.py3-none-any.whl + hash: + sha256: 04b7be6cc8bd9f5647a6325444926c3ac34ee6bc7e69da4367ba282f076036bd + category: main + optional: false +- name: qnorm + version: 0.8.1 + manager: pip + platform: linux-64 + dependencies: + numba: '*' + numpy: '*' + url: https://files.pythonhosted.org/packages/45/bc/80350513f3abf14f6dd921bf083f241ebc826c94556931c51f05348fd442/qnorm-0.8.1-py3-none-any.whl + hash: + sha256: 9d6ce4e82444155922baf06aa89f9f939b54f53844e340bf2c6d9e7ff8821c41 + category: main + optional: false +- name: requests-oauthlib + version: 1.3.1 + manager: pip + platform: linux-64 + dependencies: + oauthlib: '>=3.0.0' + requests: '>=2.0.0' + url: https://files.pythonhosted.org/packages/6f/bb/5deac77a9af870143c684ab46a7934038a53eb4aa975bc0687ed6ca2c610/requests_oauthlib-1.3.1-py2.py3-none-any.whl + hash: + sha256: 2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 + category: main + optional: false +- name: rich-click + version: 1.7.2 + manager: pip + platform: linux-64 + dependencies: + click: '>=7' + rich: '>=10.7.0' + typing-extensions: '*' + url: https://files.pythonhosted.org/packages/72/0c/a7e1b2161edced47101771e1807ac74e921168088c83765eb083c17746ef/rich_click-1.7.2-py3-none-any.whl + hash: + sha256: a42bcdcb8696c4ca7a3b1a39e1aba3d2cb64ad00690b4c022fdcb2cbccebc3fc + category: main + optional: false +- name: rsa + version: '4.9' + manager: pip + platform: linux-64 + dependencies: + pyasn1: '>=0.1.3' + url: https://files.pythonhosted.org/packages/49/97/fa78e3d2f65c02c8e1268b9aba606569fe97f6c8f7c2d74394553347c145/rsa-4.9-py3-none-any.whl + hash: + sha256: 90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 + category: main + optional: false +- name: s3fs + version: 2023.9.2 + manager: pip + platform: linux-64 + dependencies: + aiobotocore: '>=2.5.4,<2.6.0' + fsspec: 2023.9.2 + aiohttp: <4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1 + url: https://files.pythonhosted.org/packages/36/93/8aed66523d90361211a02dc0435855cc1ef357978decc2b05c8291fc515f/s3fs-2023.9.2-py3-none-any.whl + hash: + sha256: d0e0ad0267820f4e9ff16556e004e6759010e92378aebe2ac5d71419a6ff5387 + category: main + optional: false +- name: secretstorage + version: 3.3.3 + manager: pip + platform: linux-64 + dependencies: + cryptography: '>=2.0' + jeepney: '>=0.6' + url: https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl + hash: + sha256: f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + category: main + optional: false +- name: sentry-sdk + version: 1.38.0 + manager: pip + platform: linux-64 + dependencies: + certifi: '*' + urllib3: '>=1.26.11' + url: https://files.pythonhosted.org/packages/ee/61/72bf9b0326f77486403f468b0466a3eeb6f7613ba96b714f6974fe6b9c36/sentry_sdk-1.38.0-py2.py3-none-any.whl + hash: + sha256: 0017fa73b8ae2d4e57fd2522ee3df30453715b29d2692142793ec5d5f90b94a6 + category: main + optional: false +- name: setproctitle + version: 1.3.3 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/79/e7/54b36be02aee8ad573be68f6f46fd62838735c2f007b22df50eb5e13a20d/setproctitle-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: fc74e84fdfa96821580fb5e9c0b0777c1c4779434ce16d3d62a9c4d8c710df39 + category: main + optional: false +- name: smmap + version: 5.0.1 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/a7/a5/10f97f73544edcdef54409f1d839f6049a0d79df68adbc1ceb24d1aaca42/smmap-5.0.1-py3-none-any.whl + hash: + sha256: e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da + category: main + optional: false +- name: statsd + version: 3.3.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/47/33/c824f799128dfcfce2142f18d9bc6c55c46a939f6e4250639134222d99eb/statsd-3.3.0-py2.py3-none-any.whl + hash: + sha256: c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa + category: main + optional: false +- name: statsmodels + version: 0.14.0 + manager: pip + platform: linux-64 + dependencies: + numpy: '>=1.18' + scipy: '>=1.4,<1.9.2 || >1.9.2' + pandas: '>=1.0' + patsy: '>=0.5.2' + packaging: '>=21.3' + url: https://files.pythonhosted.org/packages/d5/22/62831c11096ef030e35424326716d47d65d5f7eb323966c9e4725324770d/statsmodels-0.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 9c64ebe9cf376cba0c31aed138e15ed179a1d128612dd241cdf299d159e5e882 + category: main + optional: false +- name: text-unidecode + version: '1.3' + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/a6/a5/c0b6468d3824fe3fde30dbb5e1f687b291608f9473681bbf7dabbf5a87d7/text_unidecode-1.3-py2.py3-none-any.whl + hash: + sha256: 1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8 + category: main + optional: false +- name: tqdm + version: 4.66.1 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl + hash: + sha256: d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386 + category: main + optional: false +- name: typing-inspect + version: 0.9.0 + manager: pip + platform: linux-64 + dependencies: + mypy-extensions: '>=0.3.0' + typing-extensions: '>=3.7.4' + url: https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl + hash: + sha256: 9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f + category: workflows + optional: true +- name: urllib3 + version: 1.26.18 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/b0/53/aa91e163dcfd1e5b82d8a890ecf13314e3e149c05270cc644581f77f17fd/urllib3-1.26.18-py2.py3-none-any.whl + hash: + sha256: 34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 + category: main + optional: false +- name: wandb + version: 0.16.0 + manager: pip + platform: linux-64 + dependencies: + click: '>=7.1,<8.0.0 || >8.0.0' + gitpython: '>=1.0.0,<3.1.29 || >3.1.29' + requests: '>=2.0.0,<3' + psutil: '>=5.0.0' + sentry-sdk: '>=1.0.0' + docker-pycreds: '>=0.4.0' + pyyaml: '*' + setproctitle: '*' + appdirs: '>=1.4.3' + protobuf: '>=3.19.0,<4.21.0 || >4.21.0,<5' + url: https://files.pythonhosted.org/packages/5c/81/1780aa129564b11709a6d7f0739257174f0a3a1b432ba804b3c6f00e0f88/wandb-0.16.0-py3-none-any.whl + hash: + sha256: e103142a5ecdb158d29441c2bf9f935ae149ed562377f7cebffd2a6f7c9de949 + category: main + optional: false +- name: wrapt + version: 1.16.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/49/83/b40bc1ad04a868b5b5bcec86349f06c1ee1ea7afe51dc3e46131e4f39308/wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf + category: main + optional: false +- name: xdg + version: 6.0.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/dd/54/3516c1cf349060fc3578686d271eba242f10ec00b4530c2985af9faac49b/xdg-6.0.0-py3-none-any.whl + hash: + sha256: df3510755b4395157fc04fc3b02467c777f3b3ca383257397f09ab0d4c16f936 + category: main + optional: false +- name: xxhash + version: 3.4.1 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/80/8a/1dd41557883b6196f8f092011a5c1f72d4d44cf36d7b67d4a5efe3127949/xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b + category: main + optional: false +- name: yarl + version: 1.9.3 + manager: pip + platform: linux-64 + dependencies: + idna: '>=2.0' + multidict: '>=4.0' + url: https://files.pythonhosted.org/packages/b6/b2/44b31699e27f82c577143d062a2b58cbe0c6e7a0828d13c0ffd10891ad40/yarl-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: c5f3faeb8100a43adf3e7925d556801d14b5816a0ac9e75e22948e787feec642 + category: main + optional: false diff --git a/environments/conda/environment.yml b/environments/conda/environment.yml index c7a50f83..4f755d36 100644 --- a/environments/conda/environment.yml +++ b/environments/conda/environment.yml @@ -1,4 +1,10 @@ name: dnadiffusion +# this manually curated environment.yml file +# may be deprecated in favor of conda-lock.yml +# see: +# $ poe -d conda-lock +# and pyproject.toml: +# tool.poe.tasks.conda-lock channels: - nodefaults - pytorch diff --git a/environments/conda/virtual-packages.yml b/environments/conda/virtual-packages.yml new file mode 100644 index 00000000..f53c079a --- /dev/null +++ b/environments/conda/virtual-packages.yml @@ -0,0 +1,5 @@ +subdirs: + linux-64: + packages: + __cuda: 12.3.0 + __glibc: 2.35 diff --git a/flake.lock b/flake.lock new file mode 100644 index 00000000..9269d543 --- /dev/null +++ b/flake.lock @@ -0,0 +1,366 @@ +{ + "nodes": { + "devenv": { + "inputs": { + "flake-compat": "flake-compat", + "nix": "nix", + "nixpkgs": "nixpkgs", + "pre-commit-hooks": "pre-commit-hooks" + }, + "locked": { + "lastModified": 1702239828, + "narHash": "sha256-H+z5LY1XslLLIlsh0pirHmveD7Eh6QQUT96VNSRJW9w=", + "owner": "cachix", + "repo": "devenv", + "rev": "895e8403410c3ec14d1e8cae94e88b4e7e2e8c2f", + "type": "github" + }, + "original": { + "owner": "cachix", + "repo": "devenv", + "type": "github" + } + }, + "flake-compat": { + "flake": false, + "locked": { + "lastModified": 1673956053, + "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1685518550, + "narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "flake-utils_2": { + "inputs": { + "systems": "systems_2" + }, + "locked": { + "lastModified": 1694529238, + "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "gitignore": { + "inputs": { + "nixpkgs": [ + "devenv", + "pre-commit-hooks", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1660459072, + "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", + "owner": "hercules-ci", + "repo": "gitignore.nix", + "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "gitignore.nix", + "type": "github" + } + }, + "lowdown-src": { + "flake": false, + "locked": { + "lastModified": 1633514407, + "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", + "owner": "kristapsdz", + "repo": "lowdown", + "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", + "type": "github" + }, + "original": { + "owner": "kristapsdz", + "repo": "lowdown", + "type": "github" + } + }, + "nix": { + "inputs": { + "lowdown-src": "lowdown-src", + "nixpkgs": [ + "devenv", + "nixpkgs" + ], + "nixpkgs-regression": "nixpkgs-regression" + }, + "locked": { + "lastModified": 1676545802, + "narHash": "sha256-EK4rZ+Hd5hsvXnzSzk2ikhStJnD63odF7SzsQ8CuSPU=", + "owner": "domenkozar", + "repo": "nix", + "rev": "7c91803598ffbcfe4a55c44ac6d49b2cf07a527f", + "type": "github" + }, + "original": { + "owner": "domenkozar", + "ref": "relaxed-flakes", + "repo": "nix", + "type": "github" + } + }, + "nix-github-actions": { + "inputs": { + "nixpkgs": [ + "poetry2nix", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1698974481, + "narHash": "sha256-yPncV9Ohdz1zPZxYHQf47S8S0VrnhV7nNhCawY46hDA=", + "owner": "nix-community", + "repo": "nix-github-actions", + "rev": "4bb5e752616262457bc7ca5882192a564c0472d2", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "nix-github-actions", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1678875422, + "narHash": "sha256-T3o6NcQPwXjxJMn2shz86Chch4ljXgZn746c2caGxd8=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "126f49a01de5b7e35a43fd43f891ecf6d3a51459", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-regression": { + "locked": { + "lastModified": 1643052045, + "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + } + }, + "nixpkgs-stable": { + "locked": { + "lastModified": 1685801374, + "narHash": "sha256-otaSUoFEMM+LjBI1XL/xGB5ao6IwnZOXc47qhIgJe8U=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "c37ca420157f4abc31e26f436c1145f8951ff373", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-23.05", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs_2": { + "locked": { + "lastModified": 1702233072, + "narHash": "sha256-H5G2wgbim2Ku6G6w+NSaQaauv6B6DlPhY9fMvArKqRo=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "781e2a9797ecf0f146e81425c822dca69fe4a348", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-23.11", + "repo": "nixpkgs", + "type": "github" + } + }, + "poetry2nix": { + "inputs": { + "flake-utils": "flake-utils_2", + "nix-github-actions": "nix-github-actions", + "nixpkgs": [ + "nixpkgs" + ], + "systems": "systems_3", + "treefmt-nix": "treefmt-nix" + }, + "locked": { + "lastModified": 1702353412, + "narHash": "sha256-pH7+EgqYxrEvBybEmW8sgLdk+L20MAu+VVk7wIcGI7w=", + "owner": "nix-community", + "repo": "poetry2nix", + "rev": "92363e49d033af04ba1531309f62c049e1cdf044", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "poetry2nix", + "type": "github" + } + }, + "pre-commit-hooks": { + "inputs": { + "flake-compat": [ + "devenv", + "flake-compat" + ], + "flake-utils": "flake-utils", + "gitignore": "gitignore", + "nixpkgs": [ + "devenv", + "nixpkgs" + ], + "nixpkgs-stable": "nixpkgs-stable" + }, + "locked": { + "lastModified": 1688056373, + "narHash": "sha256-2+SDlNRTKsgo3LBRiMUcoEUb6sDViRNQhzJquZ4koOI=", + "owner": "cachix", + "repo": "pre-commit-hooks.nix", + "rev": "5843cf069272d92b60c3ed9e55b7a8989c01d4c7", + "type": "github" + }, + "original": { + "owner": "cachix", + "repo": "pre-commit-hooks.nix", + "type": "github" + } + }, + "root": { + "inputs": { + "devenv": "devenv", + "nixpkgs": "nixpkgs_2", + "poetry2nix": "poetry2nix", + "systems": "systems_4" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + }, + "systems_2": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + }, + "systems_3": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "id": "systems", + "type": "indirect" + } + }, + "systems_4": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + }, + "treefmt-nix": { + "inputs": { + "nixpkgs": [ + "poetry2nix", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1699786194, + "narHash": "sha256-3h3EH1FXQkIeAuzaWB+nK0XK54uSD46pp+dMD3gAcB4=", + "owner": "numtide", + "repo": "treefmt-nix", + "rev": "e82f32aa7f06bbbd56d7b12186d555223dc399d1", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "treefmt-nix", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 00000000..d2b2ccd7 --- /dev/null +++ b/flake.nix @@ -0,0 +1,169 @@ +{ + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11"; + # nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + systems.url = "github:nix-systems/default"; + # flake-utils.url = github:numtide/flake-utils; + devenv.url = "github:cachix/devenv"; + poetry2nix = { + url = github:nix-community/poetry2nix; + inputs = { + nixpkgs.follows = "nixpkgs"; + # flake-utils.follows = "flake-utils"; + }; + }; + }; + + nixConfig = { + extra-trusted-public-keys = [ + "dnadiffusion.cachix.org-1:P20JWJrVBiN5iPBnzJ4UiqLVghGBCYOicXxltPRLaEY=" + "devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw=" + ]; + extra-substituters = [ + "https://dnadiffusion.cachix.org" + "https://devenv.cachix.org" + ]; + }; + + outputs = { + self, + nixpkgs, + devenv, + systems, + poetry2nix, + ... + } @ inputs: let + forEachSystem = nixpkgs.lib.genAttrs (import systems); + in { + packages = forEachSystem (system: let + pkgs = import nixpkgs { + inherit system; + overlays = [poetry2nix.overlays.default]; + }; + in { + devenv-up = self.devShells.${system}.default.config.procfileScript; + }); + + devShells = + forEachSystem + (system: let + pkgs = import nixpkgs { + inherit system; + overlays = [poetry2nix.overlays.default]; + }; + + pyPkgsBuildRequirements = { + biofluff = ["setuptools"]; + biothings-client = ["setuptools"]; + cloudpickle = ["flit-core"]; + feather-format = ["setuptools"]; + flytekit = ["setuptools"]; + flyteidl = ["setuptools"]; + genomepy = ["hatchling"]; + gimmemotifs = ["setuptools"]; + gtfparse = ["setuptools"]; + htseq = [pkgs.swig]; + hydra-core = ["setuptools"]; + hydra-joblib-launcher = ["setuptools"]; + hydra-zen = ["setuptools"]; + logomaker = ["setuptools"]; + marshmallow-jsonschema = ["setuptools"]; + mygene = ["setuptools"]; + memory-efficient-attention-pytorch = ["setuptools"]; + mysql-connector-python = ["setuptools"]; + norns = ["setuptools"]; + pybedtools = ["setuptools" "cython" pkgs.bedtools pkgs.htslib pkgs.zlib]; + pybigwig = [pkgs.zlib pkgs.curl]; + pysam = [pkgs.bzip2 pkgs.curl pkgs.htslib pkgs.openssl pkgs.xz]; + xdoctest = ["setuptools"]; + }; + + poetry2nixOverrides = pkgs.poetry2nix.overrides.withDefaults ( + self: super: let + buildInputsOverrides = + builtins.mapAttrs ( + package: buildRequirements: + (builtins.getAttr package super).overridePythonAttrs (old: { + buildInputs = + (old.buildInputs or []) + ++ (builtins.map (pkg: + if builtins.isString pkg + then builtins.getAttr pkg super + else pkg) + buildRequirements); + }) + ) + pyPkgsBuildRequirements; + in + buildInputsOverrides + // { + htseq = super.htseq.override {preferWheel = true;}; + hydra-core = super.hydra-core.override {preferWheel = true;}; + hydra-joblib-launcher = super.hydra-joblib-launcher.override {preferWheel = true;}; + pysam = super.pysam.override {preferWheel = true;}; + qnorm = super.qnorm.override {preferWheel = true;}; + scipy = super.scipy.override {preferWheel = true;}; + sourmash = super.sourmash.override {preferWheel = true;}; + yarl = super.yarl.override {preferWheel = true;}; + } + ); + + poetryEnv = pkgs.poetry2nix.mkPoetryEnv { + projectDir = ./.; + python = pkgs.python310; + preferWheels = false; + editablePackageSources = { + dnadiffusion = ./src; + }; + groups = [ + "bioinformatics" + "workflows" + "lint" + "test" + ]; + checkGroups = ["test"]; + extras = []; + overrides = poetry2nixOverrides; + }; + in { + default = devenv.lib.mkShell { + inherit inputs pkgs; + modules = [ + { + packages = with pkgs; [ + poetryEnv + poetry + + atuin + bat + gh + git + gnumake + lazygit + poethepoet + ripgrep + starship + tree + yq-go + zsh + ]; + + dotenv = { + enable = true; + filename = ".env"; + # disableHint = true; + }; + + pre-commit.hooks = { + alejandra.enable = true; + ruff.enable = true; + # pyright.enable = true; + }; + + difftastic.enable = true; + } + ]; + }; + }); + }; +} diff --git a/notebooks/dnadiffusion.py b/notebooks/dnadiffusion.py index 29384c34..8cd42a93 100644 --- a/notebooks/dnadiffusion.py +++ b/notebooks/dnadiffusion.py @@ -65,7 +65,9 @@ def __init__(self, beta): self.step = 0 def update_model_average(self, ma_model, current_model): - for current_params, ma_params in zip(current_model.parameters(), ma_model.parameters()): + for current_params, ma_params in zip( + current_model.parameters(), ma_model.parameters() + ): old_weight, up_weight = ma_params.data, current_params.data ma_params.data = self.update_average(old_weight, up_weight) @@ -109,7 +111,9 @@ def encode(seq, alphabet): # Metrics def convert_to_seq(x, nucleotides): - return "".join([nucleotides[s] for s in np.argmax(x.reshape(4, 200), axis=0)]) + return "".join( + [nucleotides[s] for s in np.argmax(x.reshape(4, 200), axis=0)] + ) def create_sample( @@ -134,7 +138,9 @@ def create_sample( sampled = torch.from_numpy(np.random.choice(cell_types, sample_bs)) classes = sampled.float().to(diffusion_model.device) - sampled_images = diffusion_model.sample(classes, (sample_bs, 1, 4, 200), cond_weight_to_metric) + sampled_images = diffusion_model.sample( + classes, (sample_bs, 1, 4, 200), cond_weight_to_metric + ) if save_timestep_dataframe: seqs_to_df = {} @@ -145,7 +151,10 @@ def create_sample( else: for n_b, x in enumerate(sampled_images[-1]): seq_final = f">seq_test_{n_a}_{n_b}\n" + "".join( - [nucleotides[s] for s in np.argmax(x.reshape(4, 200), axis=0)] + [ + nucleotides[s] + for s in np.argmax(x.reshape(4, 200), axis=0) + ] ) final_sequences.append(seq_final) @@ -162,18 +171,33 @@ def create_sample( save_motifs_syn = open("synthetic_motifs.fasta", "w") save_motifs_syn.write("\n".join(final_sequences)) save_motifs_syn.close() - os.system("gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed") - df_results_syn = pd.read_csv("syn_results_motifs.bed", sep="\t", skiprows=5, header=None) + os.system( + "gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed" + ) + df_results_syn = pd.read_csv( + "syn_results_motifs.bed", sep="\t", skiprows=5, header=None + ) - df_results_syn["motifs"] = df_results_syn[8].apply(lambda x: x.split('motif_name "')[1].split('"')[0]) - df_results_syn[0] = df_results_syn[0].apply(lambda x: "_".join(x.split("_")[:-1])) - df_motifs_count_syn = df_results_syn[[0, "motifs"]].drop_duplicates().groupby("motifs").count() + df_results_syn["motifs"] = df_results_syn[8].apply( + lambda x: x.split('motif_name "')[1].split('"')[0] + ) + df_results_syn[0] = df_results_syn[0].apply( + lambda x: "_".join(x.split("_")[:-1]) + ) + df_motifs_count_syn = ( + df_results_syn[[0, "motifs"]] + .drop_duplicates() + .groupby("motifs") + .count() + ) return df_motifs_count_syn def compare_motif_list(df_motifs_a, df_motifs_b): # Using KL divergence to compare motifs lists distribution - set_all_mot = set(df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist()) + set_all_mot = set( + df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist() + ) create_new_matrix = [] for x in set_all_mot: list_in = [] @@ -190,11 +214,19 @@ def compare_motif_list(df_motifs_a, df_motifs_b): create_new_matrix.append(list_in) - df_motifs = pd.DataFrame(create_new_matrix, columns=["motif", "motif_a", "motif_b"]) + df_motifs = pd.DataFrame( + create_new_matrix, columns=["motif", "motif_a", "motif_b"] + ) - df_motifs["Diffusion_seqs"] = df_motifs["motif_a"] / df_motifs["motif_a"].sum() - df_motifs["Training_seqs"] = df_motifs["motif_b"] / df_motifs["motif_b"].sum() - kl_pq = rel_entr(df_motifs["Diffusion_seqs"].values, df_motifs["Training_seqs"].values) + df_motifs["Diffusion_seqs"] = ( + df_motifs["motif_a"] / df_motifs["motif_a"].sum() + ) + df_motifs["Training_seqs"] = ( + df_motifs["motif_b"] / df_motifs["motif_b"].sum() + ) + kl_pq = rel_entr( + df_motifs["Diffusion_seqs"].values, df_motifs["Training_seqs"].values + ) return np.sum(kl_pq) @@ -217,7 +249,9 @@ def generate_heatmap(df_heat, x_label, y_label, cell_components): df_plot.columns = [x.split("_")[0] for x in cell_components] df_plot.index = df_plot.columns sns.heatmap(df_plot, cmap="Blues_r", annot=True, lw=0.1, vmax=1, vmin=0) - plt.title(f"Kl divergence \n {x_label} sequences x {y_label} sequences \n MOTIFS probabilities") + plt.title( + f"Kl divergence \n {x_label} sequences x {y_label} sequences \n MOTIFS probabilities" + ) plt.xlabel(f"{x_label} Sequences \n(motifs dist)") plt.ylabel(f"{y_label} \n (motifs dist)") plt.grid(False) @@ -228,7 +262,11 @@ def generate_heatmap(df_heat, x_label, y_label, cell_components): def generate_similarity_metric(nucleotides): """Capture the syn_motifs.fasta and compare with the dataset motifs""" seqs_file = open("synthetic_motifs.fasta").readlines() - seqs_to_hotencoder = [one_hot_encode(s.replace("\n", ""), nucleotides, 200).T for s in seqs_file if ">" not in s] + seqs_to_hotencoder = [ + one_hot_encode(s.replace("\n", ""), nucleotides, 200).T + for s in seqs_file + if ">" not in s + ] return seqs_to_hotencoder @@ -238,7 +276,9 @@ def get_best_match(db, x_seq): # transforming in a function def calculate_mean_similarity(database, input_query_seqs, seq_len=200): - final_base_max_match = np.mean([get_best_match(database, x) for x in tqdm(input_query_seqs)]) + final_base_max_match = np.mean( + [get_best_match(database, x) for x in tqdm(input_query_seqs)] + ) return final_base_max_match / seq_len @@ -246,7 +286,9 @@ def generate_similarity_using_train(X_train_in, nucleotides): convert_X_train = X_train_in.copy() convert_X_train[convert_X_train == -1] = 0 generated_seqs_to_similarity = generate_similarity_metric(nucleotides) - return calculate_mean_similarity(convert_X_train, generated_seqs_to_similarity) + return calculate_mean_similarity( + convert_X_train, generated_seqs_to_similarity + ) # Linear Beta Schedule @@ -338,7 +380,11 @@ def __init__(self, input_dim, emb_dim): generic one layer FC NN for embedding things """ self.input_dim = input_dim - layers = [nn.Linear(input_dim, emb_dim), nn.GELU(), nn.Linear(emb_dim, emb_dim)] + layers = [ + nn.Linear(input_dim, emb_dim), + nn.GELU(), + nn.Linear(emb_dim, emb_dim), + ] self.model = nn.Sequential(*layers) def forward(self, x): @@ -431,11 +477,17 @@ def forward(self, x, scale_shift=None): class ResnetBlock(nn.Module): def __init__(self, dim, dim_out, *, time_emb_dim=None, groups=8): super().__init__() - self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None + self.mlp = ( + nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) + if exists(time_emb_dim) + else None + ) self.block1 = Block(dim, dim_out, groups=groups) self.block2 = Block(dim_out, dim_out, groups=groups) - self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity() + self.res_conv = ( + nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity() + ) def forward(self, x, time_emb=None): scale_shift = None @@ -452,7 +504,16 @@ def forward(self, x, time_emb=None): class ResnetBlockClassConditioned(ResnetBlock): - def __init__(self, dim, dim_out, *, num_classes, class_embed_dim, time_emb_dim=None, groups=8): + def __init__( + self, + dim, + dim_out, + *, + num_classes, + class_embed_dim, + time_emb_dim=None, + groups=8, + ): super().__init__( dim=dim + class_embed_dim, dim_out=dim_out, @@ -477,12 +538,17 @@ def __init__(self, dim, heads=4, dim_head=32): self.heads = heads hidden_dim = dim_head * heads self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) - self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)) + self.to_out = nn.Sequential( + nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim) + ) def forward(self, x): b, c, h, w = x.shape qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) + q, k, v = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv + ) q = q.softmax(dim=-2) k = k.softmax(dim=-1) @@ -493,7 +559,9 @@ def forward(self, x): context = torch.einsum("b h d n, b h e n -> b h d e", k, v) out = torch.einsum("b h d e, b h d n -> b h e n", context, q) - out = rearrange(out, "b h c (x y) -> b (h c) x y", h=self.heads, x=h, y=w) + out = rearrange( + out, "b h c (x y) -> b (h c) x y", h=self.heads, x=h, y=w + ) return self.to_out(out) @@ -509,7 +577,10 @@ def __init__(self, dim, heads=4, dim_head=32, scale=10): def forward(self, x): b, c, h, w = x.shape qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) + q, k, v = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv + ) q, k = map(l2norm, (q, k)) @@ -536,9 +607,15 @@ def forward(self, x, y): qkv_x = self.to_qkv(x).chunk(3, dim=1) qkv_y = self.to_qkv(y).chunk(3, dim=1) - q_x, k_x, v_x = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv_x) + q_x, k_x, v_x = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv_x + ) - q_y, k_y, v_y = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv_y) + q_y, k_y, v_y = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv_y + ) q, k = map(l2norm, (q_x, k_y)) @@ -611,7 +688,9 @@ def __init__( block_klass(dim_in, dim_in, time_emb_dim=time_dim), block_klass(dim_in, dim_in, time_emb_dim=time_dim), Residual(PreNorm(dim_in, LinearAttention(dim_in))), - Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1), + Downsample(dim_in, dim_out) + if not is_last + else nn.Conv2d(dim_in, dim_out, 3, padding=1), ] ) ) @@ -626,10 +705,16 @@ def __init__( self.ups.append( nn.ModuleList( [ - block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim), - block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim), + block_klass( + dim_out + dim_in, dim_out, time_emb_dim=time_dim + ), + block_klass( + dim_out + dim_in, dim_out, time_emb_dim=time_dim + ), Residual(PreNorm(dim_out, LinearAttention(dim_out))), - Upsample(dim_out, dim_in) if not is_last else nn.Conv2d(dim_out, dim_in, 3, padding=1), + Upsample(dim_out, dim_in) + if not is_last + else nn.Conv2d(dim_out, dim_in, 3, padding=1), ] ) ) @@ -708,20 +793,40 @@ def forward(self, x, time, classes, x_self_cond=None): # Loading data and Motifs def motifs_from_fasta(fasta: str): print("Computing Motifs....") - os.system(f"gimme scan {fasta} -p JASPAR2020_vertebrates -g hg38 > train_results_motifs.bed") - df_results_seq_guime = pd.read_csv("train_results_motifs.bed", sep="\t", skiprows=5, header=None) - df_results_seq_guime["motifs"] = df_results_seq_guime[8].apply(lambda x: x.split('motif_name "')[1].split('"')[0]) + os.system( + f"gimme scan {fasta} -p JASPAR2020_vertebrates -g hg38 > train_results_motifs.bed" + ) + df_results_seq_guime = pd.read_csv( + "train_results_motifs.bed", sep="\t", skiprows=5, header=None + ) + df_results_seq_guime["motifs"] = df_results_seq_guime[8].apply( + lambda x: x.split('motif_name "')[1].split('"')[0] + ) - df_results_seq_guime[0] = df_results_seq_guime[0].apply(lambda x: "_".join(x.split("_")[:-1])) - df_results_seq_guime_count_out = df_results_seq_guime[[0, "motifs"]].drop_duplicates().groupby("motifs").count() + df_results_seq_guime[0] = df_results_seq_guime[0].apply( + lambda x: "_".join(x.split("_")[:-1]) + ) + df_results_seq_guime_count_out = ( + df_results_seq_guime[[0, "motifs"]] + .drop_duplicates() + .groupby("motifs") + .count() + ) plt.rcParams["figure.figsize"] = (30, 2) - df_results_seq_guime_count_out.sort_values(0, ascending=False).head(50)[0].plot.bar() + df_results_seq_guime_count_out.sort_values(0, ascending=False).head(50)[ + 0 + ].plot.bar() plt.title("Top 50 MOTIFS on component 0 ") plt.show() return df_results_seq_guime_count_out -def save_fasta(df: pd.DataFrame, name: str, num_sequences: int, seq_to_subset_comp: bool = False) -> str: +def save_fasta( + df: pd.DataFrame, + name: str, + num_sequences: int, + seq_to_subset_comp: bool = False, +) -> str: fasta_path = f"{name}.fasta" save_fasta_file = open(fasta_path, "w") num_to_sample = df.shape[0] @@ -745,14 +850,21 @@ def save_fasta(df: pd.DataFrame, name: str, num_sequences: int, seq_to_subset_co def generate_motifs_and_fastas( - df: pd.DataFrame, name: str, num_sequences: int, subset_list: list | None = None + df: pd.DataFrame, + name: str, + num_sequences: int, + subset_list: list | None = None, ) -> dict[str, Any]: print("Generating Motifs and Fastas...", name) print("---" * 10) # Saving fasta if subset_list: - fasta_path = save_fasta(df, f"{name}_{'_'.join([str(c) for c in subset_list])}", num_sequences) + fasta_path = save_fasta( + df, + f"{name}_{'_'.join([str(c) for c in subset_list])}", + num_sequences, + ) else: fasta_path = save_fasta(df, name, num_sequences) @@ -763,7 +875,9 @@ def generate_motifs_and_fastas( final_subset_motifs = {} for comp, v_comp in df.groupby("TAG"): print(comp) - c_fasta = save_fasta(v_comp, f"{name}_{comp}", num_sequences, seq_to_subset_comp=True) + c_fasta = save_fasta( + v_comp, f"{name}_{comp}", num_sequences, seq_to_subset_comp=True + ) final_subset_motifs[comp] = motifs_from_fasta(c_fasta) return { @@ -798,15 +912,21 @@ def preprocess_data( # Creating train/test/shuffle groups df_test = df[df["chr"] == "chr1"].reset_index(drop=True) df_train_shuffled = df[df["chr"] == "chr2"].reset_index(drop=True) - df_train = df_train = df[(df["chr"] != "chr1") & (df["chr"] != "chr2")].reset_index(drop=True) + df_train = df[(df["chr"] != "chr1") & (df["chr"] != "chr2")].reset_index( + drop=True + ) df_train_shuffled["sequence"] = df_train_shuffled["sequence"].apply( lambda x: "".join(random.sample(list(x), len(x))) ) # Getting motif information from the sequences - train = generate_motifs_and_fastas(df_train, "train", number_of_sequences_to_motif_creation, subset_list) - test = generate_motifs_and_fastas(df_test, "test", number_of_sequences_to_motif_creation, subset_list) + train = generate_motifs_and_fastas( + df_train, "train", number_of_sequences_to_motif_creation, subset_list + ) + test = generate_motifs_and_fastas( + df_test, "test", number_of_sequences_to_motif_creation, subset_list + ) train_shuffled = generate_motifs_and_fastas( df_train_shuffled, "train_shuffled", @@ -814,7 +934,11 @@ def preprocess_data( subset_list, ) - combined_dict = {"train": train, "test": test, "train_shuffled": train_shuffled} + combined_dict = { + "train": train, + "test": test, + "train_shuffled": train_shuffled, + } # Writing to pickle if save_output: @@ -886,12 +1010,20 @@ def load_data( test_motifs_cell_specific = encode_data["test"]["final_subset_motifs"] shuffle_motifs = encode_data["train_shuffled"]["motifs"] - shuffle_motifs_cell_specific = encode_data["train_shuffled"]["final_subset_motifs"] + shuffle_motifs_cell_specific = encode_data["train_shuffled"][ + "final_subset_motifs" + ] # Creating sequence dataset df = encode_data["train"]["df"] nucleotides = ["A", "C", "G", "T"] - x_train_seq = np.array([one_hot_encode(x, nucleotides, 200) for x in df["sequence"] if "N" not in x]) + x_train_seq = np.array( + [ + one_hot_encode(x, nucleotides, 200) + for x in df["sequence"] + if "N" not in x + ] + ) X_train = np.array([x.T.tolist() for x in x_train_seq]) X_train[X_train == 0] = -1 @@ -903,8 +1035,12 @@ def load_data( # Wrapping data into dataloader tf = T.Compose([T.ToTensor()]) - seq_dataset = SequenceDataset(seqs=X_train, c=x_train_cell_type, transform=tf) - train_dl = DataLoader(seq_dataset, batch_size, shuffle=True, num_workers=96, pin_memory=True) + seq_dataset = SequenceDataset( + seqs=X_train, c=x_train_cell_type, transform=tf + ) + train_dl = DataLoader( + seq_dataset, batch_size, shuffle=True, num_workers=96, pin_memory=True + ) # Collecting variables into a dict encode_data_dict = { @@ -944,7 +1080,9 @@ def __init__( self.register_buffer("alphas_cumprod_prev", alphas_cumprod_prev) self.register_buffer("sqrt_recip_alphas", torch.sqrt(1.0 / alphas)) self.register_buffer("sqrt_alphas_cumprod", torch.sqrt(alphas_cumprod)) - self.register_buffer("sqrt_one_minus_alphas_cumprod", torch.sqrt(1.0 - alphas_cumprod)) + self.register_buffer( + "sqrt_one_minus_alphas_cumprod", torch.sqrt(1.0 - alphas_cumprod) + ) self.register_buffer( "posterior_variance", betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod), @@ -963,7 +1101,9 @@ def sample(self, classes, shape, cond_weight): ) @torch.no_grad() - def p_sample_loop(self, classes, image_size, cond_weight, get_cross_map=False): + def p_sample_loop( + self, classes, image_size, cond_weight, get_cross_map=False + ): b = image_size[0] device = self.device @@ -990,7 +1130,11 @@ def p_sample_loop(self, classes, image_size, cond_weight, get_cross_map=False): sampling_fn = partial(self.p_sample) for i in reversed(range(0, self.timesteps)): - img, cross_matrix = sampling_fn(x=img, t=torch.full((b,), i, device=device, dtype=torch.long), t_index=i) + img, cross_matrix = sampling_fn( + x=img, + t=torch.full((b,), i, device=device, dtype=torch.long), + t_index=i, + ) imgs.append(img.cpu().numpy()) cross_images_final.append(cross_matrix.cpu().numpy()) @@ -1002,12 +1146,17 @@ def p_sample_loop(self, classes, image_size, cond_weight, get_cross_map=False): @torch.no_grad() def p_sample(self, x, t, t_index): betas_t = extract(self.betas, t, x.shape) - sqrt_one_minus_alphas_cumprod_t = extract(self.sqrt_one_minus_alphas_cumprod, t, x.shape) + sqrt_one_minus_alphas_cumprod_t = extract( + self.sqrt_one_minus_alphas_cumprod, t, x.shape + ) sqrt_recip_alphas_t = extract(self.sqrt_recip_alphas, t, x.shape) # Equation 11 in the paper # Use our model (noise predictor) to predict the mean - model_mean = sqrt_recip_alphas_t * (x - betas_t * self.model(x, time=t) / sqrt_one_minus_alphas_cumprod_t) + model_mean = sqrt_recip_alphas_t * ( + x + - betas_t * self.model(x, time=t) / sqrt_one_minus_alphas_cumprod_t + ) if t_index == 0: return model_mean @@ -1018,7 +1167,9 @@ def p_sample(self, x, t, t_index): return model_mean + torch.sqrt(posterior_variance_t) * noise @torch.no_grad() - def p_sample_guided(self, x, classes, t, t_index, context_mask, cond_weight): + def p_sample_guided( + self, x, classes, t, t_index, context_mask, cond_weight + ): # adapted from: https://openreview.net/pdf?id=qw8AKxfYbI batch_size = x.shape[0] device = self.device @@ -1026,15 +1177,21 @@ def p_sample_guided(self, x, classes, t, t_index, context_mask, cond_weight): t_double = t.repeat(2).to(device) x_double = x.repeat(2, 1, 1, 1).to(device) betas_t = extract(self.betas, t_double, x_double.shape, device) - sqrt_one_minus_alphas_cumprod_t = extract(self.sqrt_one_minus_alphas_cumprod, t_double, x_double.shape, device) - sqrt_recip_alphas_t = extract(self.sqrt_recip_alphas, t_double, x_double.shape, device) + sqrt_one_minus_alphas_cumprod_t = extract( + self.sqrt_one_minus_alphas_cumprod, t_double, x_double.shape, device + ) + sqrt_recip_alphas_t = extract( + self.sqrt_recip_alphas, t_double, x_double.shape, device + ) # classifier free sampling interpolates between guided and non guided using `cond_weight` classes_masked = classes * context_mask classes_masked = classes_masked.type(torch.long) # model = self.accelerator.unwrap_model(self.model) self.model.output_attention = True - preds, cross_map_full = self.model(x_double, time=t_double, classes=classes_masked) + preds, cross_map_full = self.model( + x_double, time=t_double, classes=classes_masked + ) self.model.output_attention = False cross_map = cross_map_full[:batch_size] eps1 = (1 + cond_weight) * preds[:batch_size] @@ -1044,33 +1201,52 @@ def p_sample_guided(self, x, classes, t, t_index, context_mask, cond_weight): # Equation 11 in the paper # Use our model (noise predictor) to predict the mean model_mean = sqrt_recip_alphas_t[:batch_size] * ( - x - betas_t[:batch_size] * x_t / sqrt_one_minus_alphas_cumprod_t[:batch_size] + x + - betas_t[:batch_size] + * x_t + / sqrt_one_minus_alphas_cumprod_t[:batch_size] ) if t_index == 0: return model_mean, cross_map else: - posterior_variance_t = extract(self.posterior_variance, t, x.shape, device) + posterior_variance_t = extract( + self.posterior_variance, t, x.shape, device + ) noise = torch.randn_like(x) # Algorithm 2 line 4: - return model_mean + torch.sqrt(posterior_variance_t) * noise, cross_map + return ( + model_mean + torch.sqrt(posterior_variance_t) * noise, + cross_map, + ) def q_sample(self, x_start, t, noise=None): noise = default(noise, torch.randn_like(x_start)) device = self.device - sqrt_alphas_cumprod_t = extract(self.sqrt_alphas_cumprod, t, x_start.shape, device) - sqrt_one_minus_alphas_cumprod_t = extract(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape, device) + sqrt_alphas_cumprod_t = extract( + self.sqrt_alphas_cumprod, t, x_start.shape, device + ) + sqrt_one_minus_alphas_cumprod_t = extract( + self.sqrt_one_minus_alphas_cumprod, t, x_start.shape, device + ) - return sqrt_alphas_cumprod_t * x_start + sqrt_one_minus_alphas_cumprod_t * noise + return ( + sqrt_alphas_cumprod_t * x_start + + sqrt_one_minus_alphas_cumprod_t * noise + ) - def p_losses(self, x_start, t, classes, noise=None, loss_type="huber", p_uncond=0.1): + def p_losses( + self, x_start, t, classes, noise=None, loss_type="huber", p_uncond=0.1 + ): device = self.device noise = default(noise, torch.randn_like(x_start)) x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) - context_mask = torch.bernoulli(torch.zeros(classes.shape[0]) + (1 - p_uncond)).to(device) + context_mask = torch.bernoulli( + torch.zeros(classes.shape[0]) + (1 - p_uncond) + ).to(device) # Mask for unconditional guidance classes = classes * context_mask @@ -1155,7 +1331,9 @@ def __init__( ) # Preparing model/optimizer/EMA/dataloader - model = Unet_lucas(dim=200, channels=1, dim_mults=(1, 2, 4), resnet_block_groups=4) + model = Unet_lucas( + dim=200, channels=1, dim_mults=(1, 2, 4), resnet_block_groups=4 + ) # Creating diffusion_model self.diffusion_model = Diffusion( @@ -1167,11 +1345,17 @@ def __init__( if self.accelerator.is_main_process: self.ema = EMA(0.995) - self.ema_model = copy.deepcopy(self.diffusion_model).eval().requires_grad_(False) + self.ema_model = ( + copy.deepcopy(self.diffusion_model).eval().requires_grad_(False) + ) self.train_kl, self.test_kl, self.shuffle_kl = 1, 1, 1 self.seq_similarity = 0.38 - self.diffusion_model, self.optimizer, self.train_dl = self.accelerator.prepare( + ( + self.diffusion_model, + self.optimizer, + self.train_dl, + ) = self.accelerator.prepare( self.diffusion_model, self.optimizer, self.train_dl ) @@ -1221,26 +1405,46 @@ def train(self): print(f" Epoch {epoch} Loss:", loss.item()) # if epoch != 0 and epoch % self.save_and_sample_every == 0 and self.accelerator.is_main_process: - if epoch != 0 and epoch % self.save_and_sample_every == 0 and self.accelerator.is_main_process: + if ( + epoch != 0 + and epoch % self.save_and_sample_every == 0 + and self.accelerator.is_main_process + ): self.diffusion_model.eval() print("saving") synt_df = create_sample( self.accelerator.unwrap_model(self.diffusion_model), - conditional_numeric_to_tag=self.encode_data["numeric_to_tag"], + conditional_numeric_to_tag=self.encode_data[ + "numeric_to_tag" + ], cell_types=self.encode_data["cell_types"], - number_of_samples=int(self.num_sampling_to_compare_cells / 10), + number_of_samples=int( + self.num_sampling_to_compare_cells / 10 + ), + ) + self.train_kl = compare_motif_list( + synt_df, self.encode_data["train_motifs"] + ) + self.test_kl = compare_motif_list( + synt_df, self.encode_data["test_motifs"] + ) + self.shuffle_kl = compare_motif_list( + synt_df, self.encode_data["shuffle_motifs"] ) - self.train_kl = compare_motif_list(synt_df, self.encode_data["train_motifs"]) - self.test_kl = compare_motif_list(synt_df, self.encode_data["test_motifs"]) - self.shuffle_kl = compare_motif_list(synt_df, self.encode_data["shuffle_motifs"]) print("Similarity", self.seq_similarity, "Similarity") print("KL_TRAIN", self.train_kl, "KL") print("KL_TEST", self.test_kl, "KL") print("KL_SHUFFLE", self.shuffle_kl, "KL") - if epoch != 0 and epoch % 500 == 0 and self.accelerator.is_main_process: - model_path = f"dnadiffusion/checkpoints/epoch_{epoch}_" + self.model_name + if ( + epoch != 0 + and epoch % 500 == 0 + and self.accelerator.is_main_process + ): + model_path = ( + f"dnadiffusion/checkpoints/epoch_{epoch}_" + self.model_name + ) self.save(epoch, model_path) # Saving model diff --git a/notebooks/experiments/conditional_diffusion/VQ_VAE_LATENT_SPACE_WITH_METRICS.ipynb b/notebooks/experiments/conditional_diffusion/VQ_VAE_LATENT_SPACE_WITH_METRICS.ipynb index 30dfae15..56bdc66f 100644 --- a/notebooks/experiments/conditional_diffusion/VQ_VAE_LATENT_SPACE_WITH_METRICS.ipynb +++ b/notebooks/experiments/conditional_diffusion/VQ_VAE_LATENT_SPACE_WITH_METRICS.ipynb @@ -226,7 +226,9 @@ }, "outputs": [], "source": [ - "def one_hot_encode_sequences(seq: str, include_n: Optional[bool] = False) -> torch.Tensor:\n", + "def one_hot_encode_sequences(\n", + " seq: str, include_n: Optional[bool] = False\n", + ") -> torch.Tensor:\n", " \"\"\"\n", " Converts a sequence of nucleotides to a one-hot encoded tensor.\n", "\n", @@ -240,7 +242,11 @@ " num_seq = []\n", " for nucleotide in seq:\n", " num_seq.append(NUCLEOTIDES[nucleotide])\n", - " return (F.one_hot(torch.tensor(num_seq).to(torch.int64), num_classes=len(NUCLEOTIDES))).T\n", + " return (\n", + " F.one_hot(\n", + " torch.tensor(num_seq).to(torch.int64), num_classes=len(NUCLEOTIDES)\n", + " )\n", + " ).T\n", "\n", "\n", "class PeaksDataset(Dataset):\n", @@ -316,8 +322,16 @@ "\n", "\n", "df = pd.read_csv('train_all_classifier_WM20220916.csv', delimiter='\\t')\n", - "train_df = df[(df['seqname'] != 'chr3') & (df['seqname'] != 'chr15') & (df['seqname'] != 'chr7')]\n", - "test_df = df[(df['seqname'] == 'chr3') | (df['seqname'] == 'chr15') | (df['seqname'] == 'chr7')]\n", + "train_df = df[\n", + " (df['seqname'] != 'chr3')\n", + " & (df['seqname'] != 'chr15')\n", + " & (df['seqname'] != 'chr7')\n", + "]\n", + "test_df = df[\n", + " (df['seqname'] == 'chr3')\n", + " | (df['seqname'] == 'chr15')\n", + " | (df['seqname'] == 'chr7')\n", + "]\n", "\n", "peaks_data = PeaksDataset(train_df)\n", "peaks_dl = DataLoader(peaks_data, batch_size=BATCH_SIZE, shuffle=True)" @@ -398,14 +412,20 @@ " space, with shape (batch_size, 1).\n", " \"\"\"\n", "\n", - " def __init__(self, num_embeddings: int, embedding_dim: int, commitment_cost: float) -> None:\n", + " def __init__(\n", + " self, num_embeddings: int, embedding_dim: int, commitment_cost: float\n", + " ) -> None:\n", " super(VectorQuantizer, self).__init__()\n", "\n", " self._embedding_dim = embedding_dim\n", " self._num_embeddings = num_embeddings\n", "\n", - " self._embedding = nn.Embedding(self._num_embeddings, self._embedding_dim)\n", - " self._embedding.weight.data.uniform_(-1 / self._num_embeddings, 1 / self._num_embeddings)\n", + " self._embedding = nn.Embedding(\n", + " self._num_embeddings, self._embedding_dim\n", + " )\n", + " self._embedding.weight.data.uniform_(\n", + " -1 / self._num_embeddings, 1 / self._num_embeddings\n", + " )\n", " self._commitment_cost = commitment_cost\n", "\n", " def forward(self, inputs: torch.Tensor) -> Tuple[torch.Tensor]:\n", @@ -425,11 +445,17 @@ "\n", " # Encoding\n", " encoding_indices = torch.argmin(distances, dim=1).unsqueeze(1)\n", - " encodings = torch.zeros(encoding_indices.shape[0], self._num_embeddings, device=inputs.device)\n", + " encodings = torch.zeros(\n", + " encoding_indices.shape[0],\n", + " self._num_embeddings,\n", + " device=inputs.device,\n", + " )\n", " encodings.scatter_(1, encoding_indices, 1)\n", "\n", " # Quantize and unflatten\n", - " quantized = torch.matmul(encodings, self._embedding.weight).view(input_shape)\n", + " quantized = torch.matmul(encodings, self._embedding.weight).view(\n", + " input_shape\n", + " )\n", "\n", " # Loss\n", " e_latent_loss = F.mse_loss(quantized.detach(), inputs)\n", @@ -438,7 +464,9 @@ "\n", " quantized = inputs + (quantized - inputs).detach()\n", " avg_probs = torch.mean(encodings, dim=0)\n", - " perplexity = torch.exp(-torch.sum(avg_probs * torch.log(avg_probs + 1e-10)))\n", + " perplexity = torch.exp(\n", + " -torch.sum(avg_probs * torch.log(avg_probs + 1e-10))\n", + " )\n", "\n", " # convert quantized from BHWC -> BCHW\n", " return (\n", @@ -504,12 +532,16 @@ " self._embedding_dim = embedding_dim\n", " self._num_embeddings = num_embeddings\n", "\n", - " self._embedding = nn.Embedding(self._num_embeddings, self._embedding_dim)\n", + " self._embedding = nn.Embedding(\n", + " self._num_embeddings, self._embedding_dim\n", + " )\n", " self._embedding.weight.data.normal_()\n", " self._commitment_cost = commitment_cost\n", "\n", " self.register_buffer(\"_ema_cluster_size\", torch.zeros(num_embeddings))\n", - " self._ema_w = nn.Parameter(torch.Tensor(num_embeddings, self._embedding_dim))\n", + " self._ema_w = nn.Parameter(\n", + " torch.Tensor(num_embeddings, self._embedding_dim)\n", + " )\n", " self._ema_w.data.normal_()\n", "\n", " self._decay = decay\n", @@ -532,26 +564,40 @@ "\n", " # Encoding\n", " encoding_indices = torch.argmin(distances, dim=1).unsqueeze(1)\n", - " encodings = torch.zeros(encoding_indices.shape[0], self._num_embeddings, device=inputs.device)\n", + " encodings = torch.zeros(\n", + " encoding_indices.shape[0],\n", + " self._num_embeddings,\n", + " device=inputs.device,\n", + " )\n", " encodings.scatter_(1, encoding_indices, 1)\n", "\n", " # Quantize and unflatten\n", - " quantized = torch.matmul(encodings, self._embedding.weight).view(input_shape)\n", + " quantized = torch.matmul(encodings, self._embedding.weight).view(\n", + " input_shape\n", + " )\n", "\n", " # Use EMA to update the embedding vectors\n", " if self.training:\n", - " self._ema_cluster_size = self._ema_cluster_size * self._decay + (1 - self._decay) * torch.sum(encodings, 0)\n", + " self._ema_cluster_size = self._ema_cluster_size * self._decay + (\n", + " 1 - self._decay\n", + " ) * torch.sum(encodings, 0)\n", "\n", " # Laplace smoothing of the cluster size\n", " n = torch.sum(self._ema_cluster_size.data)\n", " self._ema_cluster_size = (\n", - " (self._ema_cluster_size + self._epsilon) / (n + self._num_embeddings * self._epsilon) * n\n", + " (self._ema_cluster_size + self._epsilon)\n", + " / (n + self._num_embeddings * self._epsilon)\n", + " * n\n", " )\n", "\n", " dw = torch.matmul(encodings.t(), flat_input)\n", - " self._ema_w = nn.Parameter(self._ema_w * self._decay + (1 - self._decay) * dw)\n", + " self._ema_w = nn.Parameter(\n", + " self._ema_w * self._decay + (1 - self._decay) * dw\n", + " )\n", "\n", - " self._embedding.weight = nn.Parameter(self._ema_w / self._ema_cluster_size.unsqueeze(1))\n", + " self._embedding.weight = nn.Parameter(\n", + " self._ema_w / self._ema_cluster_size.unsqueeze(1)\n", + " )\n", "\n", " # Loss\n", " e_latent_loss = F.mse_loss(quantized.detach(), inputs)\n", @@ -560,7 +606,9 @@ " # Straight Through Estimator\n", " quantized = inputs + (quantized - inputs).detach()\n", " avg_probs = torch.mean(encodings, dim=0)\n", - " perplexity = torch.exp(-torch.sum(avg_probs * torch.log(avg_probs + 1e-10)))\n", + " perplexity = torch.exp(\n", + " -torch.sum(avg_probs * torch.log(avg_probs + 1e-10))\n", + " )\n", "\n", " # convert quantized from BHWC -> BCHW\n", " return (\n", @@ -599,7 +647,9 @@ " - y (torch.Tensor): The output tensor with shape (batch_size, num_hiddens, height, width).\n", " \"\"\"\n", "\n", - " def __init__(self, in_channels: int, num_hiddens: int, num_residual_hiddens: int) -> None:\n", + " def __init__(\n", + " self, in_channels: int, num_hiddens: int, num_residual_hiddens: int\n", + " ) -> None:\n", " super(Residual, self).__init__()\n", " self._block = nn.Sequential(\n", " nn.ReLU(True),\n", @@ -655,7 +705,10 @@ " super(ResidualStack, self).__init__()\n", " self._num_residual_layers = num_residual_layers\n", " self._layers = nn.ModuleList(\n", - " [Residual(in_channels, num_hiddens, num_residual_hiddens) for _ in range(self._num_residual_layers)]\n", + " [\n", + " Residual(in_channels, num_hiddens, num_residual_hiddens)\n", + " for _ in range(self._num_residual_layers)\n", + " ]\n", " )\n", "\n", " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", @@ -845,13 +898,29 @@ " ) -> None:\n", " super(Model, self).__init__()\n", "\n", - " self._encoder = Encoder(1, num_hiddens, num_residual_layers, num_residual_hiddens)\n", - " self._pre_vq_conv = nn.Conv2d(in_channels=num_hiddens, out_channels=embedding_dim, kernel_size=1, stride=1)\n", + " self._encoder = Encoder(\n", + " 1, num_hiddens, num_residual_layers, num_residual_hiddens\n", + " )\n", + " self._pre_vq_conv = nn.Conv2d(\n", + " in_channels=num_hiddens,\n", + " out_channels=embedding_dim,\n", + " kernel_size=1,\n", + " stride=1,\n", + " )\n", " if decay > 0.0:\n", - " self._vq_vae = VectorQuantizerEMA(num_embeddings, embedding_dim, commitment_cost, decay)\n", + " self._vq_vae = VectorQuantizerEMA(\n", + " num_embeddings, embedding_dim, commitment_cost, decay\n", + " )\n", " else:\n", - " self._vq_vae = VectorQuantizer(num_embeddings, embedding_dim, commitment_cost)\n", - " self._decoder = Decoder(embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens)\n", + " self._vq_vae = VectorQuantizer(\n", + " num_embeddings, embedding_dim, commitment_cost\n", + " )\n", + " self._decoder = Decoder(\n", + " embedding_dim,\n", + " num_hiddens,\n", + " num_residual_layers,\n", + " num_residual_hiddens,\n", + " )\n", "\n", " def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor]:\n", " z = self._encoder(x)\n", @@ -993,7 +1062,13 @@ " data = data.float()\n", " optimizer.zero_grad()\n", "\n", - " vq_loss, data_recon, perplexity, hidden_quantized, encoding_index = model(data)\n", + " (\n", + " vq_loss,\n", + " data_recon,\n", + " perplexity,\n", + " hidden_quantized,\n", + " encoding_index,\n", + " ) = model(data)\n", " recon_error = F.mse_loss(data_recon, data) / 1\n", " loss = recon_error + vq_loss\n", " loss.backward()\n", @@ -1076,10 +1151,15 @@ " # Iterate over the items in the batch of data\n", " for i in range(original_valid_data.shape[0]):\n", " # Increment the number of correct predictions if the predicted label for this item matches the ground truth label\n", - " num_correct_predictions += int((prediction[i][0] == ground_truth[i][0]).sum())\n", + " num_correct_predictions += int(\n", + " (prediction[i][0] == ground_truth[i][0]).sum()\n", + " )\n", " if int((prediction[i][0] == ground_truth[i][0]).sum()) == 200:\n", " num_correct_predictions_full += 1\n", - " elif int((prediction[i][0] == ground_truth[i][0]).sum()) > RECOVERY_THRESHOLD:\n", + " elif (\n", + " int((prediction[i][0] == ground_truth[i][0]).sum())\n", + " > RECOVERY_THRESHOLD\n", + " ):\n", " num_correct_predictions_partially += 1\n", "\n", " # Increment the total number of predictions\n", @@ -1120,7 +1200,9 @@ } ], "source": [ - "percentage_correct_predictions_nucleotides = num_correct_predictions / num_total_predictions\n", + "percentage_correct_predictions_nucleotides = (\n", + " num_correct_predictions / num_total_predictions\n", + ")\n", "percentage_correct_predictions_nucleotides" ] }, @@ -1156,7 +1238,9 @@ } ], "source": [ - "percentage_correct_predictions_full = num_correct_predictions_full / num_total_predictions_full\n", + "percentage_correct_predictions_full = (\n", + " num_correct_predictions_full / num_total_predictions_full\n", + ")\n", "percentage_correct_predictions_full" ] }, @@ -1192,7 +1276,9 @@ } ], "source": [ - "percentage_correct_predictions_partially = num_correct_predictions_partially / num_total_predictions_full\n", + "percentage_correct_predictions_partially = (\n", + " num_correct_predictions_partially / num_total_predictions_full\n", + ")\n", "percentage_correct_predictions_partially" ] }, @@ -1364,7 +1450,9 @@ } ], "source": [ - "tsne = TSNE(n_components=2, random_state=0).fit_transform(model._vq_vae._embedding.weight.data.cpu())\n", + "tsne = TSNE(n_components=2, random_state=0).fit_transform(\n", + " model._vq_vae._embedding.weight.data.cpu()\n", + ")\n", "x, y = list(zip(*tsne))\n", "\n", "plt.figure(figsize=(9, 6))\n", @@ -1462,7 +1550,9 @@ " for i, motif in enumerate(motifs):\n", " for j in range(200 // MOTIF_LENGTH):\n", " # Write the motif to the file\n", - " f.write(f\">motif_{i+1}_{j+1}\\n{motif[j*MOTIF_LENGTH:(j+1)*MOTIF_LENGTH]}\\n\")\n", + " f.write(\n", + " f\">motif_{i+1}_{j+1}\\n{motif[j*MOTIF_LENGTH:(j+1)*MOTIF_LENGTH]}\\n\"\n", + " )\n", "\n", "\n", "save_motifs_to_fasta(df_raw_sequences, \"recoded_motifs_seqs.fasta\")" @@ -1482,7 +1572,9 @@ " \"\"\"\n", " print('Computing Motifs....')\n", " !gimme scan $fasta -p JASPAR2020_vertebrates -g hg38 > train_results_motifs.bed\n", - " df_results_seq_guime = pd.read_csv('train_results_motifs.bed', sep='\\t', skiprows=5, header=None)\n", + " df_results_seq_guime = pd.read_csv(\n", + " 'train_results_motifs.bed', sep='\\t', skiprows=5, header=None\n", + " )\n", " return df_results_seq_guime" ] }, @@ -1859,7 +1951,9 @@ "sns.countplot(\n", " y=\"motifs\",\n", " data=df_results_seq_guime,\n", - " order=pd.value_counts(df_results_seq_guime[\"motifs\"]).iloc[:N_MOTIF_OCCURANCES_TO_PLOT].index,\n", + " order=pd.value_counts(df_results_seq_guime[\"motifs\"])\n", + " .iloc[:N_MOTIF_OCCURANCES_TO_PLOT]\n", + " .index,\n", ")" ] } diff --git a/notebooks/experiments/conditional_diffusion/accelerate_diffusion_conditional_4_cells.ipynb b/notebooks/experiments/conditional_diffusion/accelerate_diffusion_conditional_4_cells.ipynb index 96645cd3..03d20ca7 100644 --- a/notebooks/experiments/conditional_diffusion/accelerate_diffusion_conditional_4_cells.ipynb +++ b/notebooks/experiments/conditional_diffusion/accelerate_diffusion_conditional_4_cells.ipynb @@ -486,7 +486,9 @@ " self.step = 0\n", "\n", " def update_model_average(self, ma_model, current_model):\n", - " for current_params, ma_params in zip(current_model.parameters(), ma_model.parameters()):\n", + " for current_params, ma_params in zip(\n", + " current_model.parameters(), ma_model.parameters()\n", + " ):\n", " old_weight, up_weight = ma_params.data, current_params.data\n", " ma_params.data = self.update_average(old_weight, up_weight)\n", "\n", @@ -684,11 +686,17 @@ "outputs": [], "source": [ "def sampling_to_metric(\n", - " number_of_samples=20, specific_group=False, group_number=None, cond_weight_to_metric=0, additional_variables=None\n", + " number_of_samples=20,\n", + " specific_group=False,\n", + " group_number=None,\n", + " cond_weight_to_metric=0,\n", + " additional_variables=None,\n", "):\n", " # Sampling regions using the trained model\n", " final_sequences = []\n", - " for n_a in tqdm_notebook(range(number_of_samples)): # generating number_of_samples *10 sequences\n", + " for n_a in tqdm_notebook(\n", + " range(number_of_samples)\n", + " ): # generating number_of_samples *10 sequences\n", " # sampled_images = bit_diffusion.sample(batch_size = 4)\n", " print(n_a)\n", " sample_bs = 10\n", @@ -726,10 +734,21 @@ " save_motifs_syn.close()\n", " # Scan for motifs\n", " !gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed\n", - " df_results_syn = pd.read_csv('syn_results_motifs.bed', sep='\\t', skiprows=5, header=None)\n", - " df_results_syn['motifs'] = df_results_syn[8].apply(lambda x: x.split('motif_name \"')[1].split('\"')[0])\n", - " df_results_syn[0] = df_results_syn[0].apply(lambda x: '_'.join(x.split('_')[:-1]))\n", - " df_motifs_count_syn = df_results_syn[[0, 'motifs']].drop_duplicates().groupby('motifs').count()\n", + " df_results_syn = pd.read_csv(\n", + " 'syn_results_motifs.bed', sep='\\t', skiprows=5, header=None\n", + " )\n", + " df_results_syn['motifs'] = df_results_syn[8].apply(\n", + " lambda x: x.split('motif_name \"')[1].split('\"')[0]\n", + " )\n", + " df_results_syn[0] = df_results_syn[0].apply(\n", + " lambda x: '_'.join(x.split('_')[:-1])\n", + " )\n", + " df_motifs_count_syn = (\n", + " df_results_syn[[0, 'motifs']]\n", + " .drop_duplicates()\n", + " .groupby('motifs')\n", + " .count()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (30, 2)\n", " df_motifs_count_syn.sort_values(0, ascending=False).head(50)[0].plot.bar()\n", " plt.show()\n", @@ -791,7 +810,9 @@ "# Not using the total number of motifs but the count of the occurence aka: percentage of the sequences with a given motif.\n", "def compare_motif_list(df_motifs_a, df_motifs_b):\n", " # Using KL divergence to compare motifs lists distribution\n", - " set_all_mot = set(df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist())\n", + " set_all_mot = set(\n", + " df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist()\n", + " )\n", " create_new_matrix = []\n", " for x in set_all_mot:\n", " list_in = []\n", @@ -808,10 +829,16 @@ "\n", " create_new_matrix.append(list_in)\n", "\n", - " df_motifs = pd.DataFrame(create_new_matrix, columns=['motif', 'motif_a', 'motif_b'])\n", + " df_motifs = pd.DataFrame(\n", + " create_new_matrix, columns=['motif', 'motif_a', 'motif_b']\n", + " )\n", "\n", - " df_motifs['Diffusion_seqs'] = df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", - " df_motifs['Training_seqs'] = df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " df_motifs['Diffusion_seqs'] = (\n", + " df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", + " )\n", + " df_motifs['Training_seqs'] = (\n", + " df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (3, 3)\n", " sns.regplot(x='Diffusion_seqs', y='Training_seqs', data=df_motifs)\n", " plt.xlabel('Diffusion Seqs')\n", @@ -820,7 +847,9 @@ " plt.show()\n", "\n", " display(df_motifs)\n", - " kl_pq = rel_entr(df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values)\n", + " kl_pq = rel_entr(\n", + " df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values\n", + " )\n", " return np.sum(kl_pq)\n", "\n", "\n", @@ -884,13 +913,17 @@ "@torch.no_grad()\n", "def p_sample(model, x, t, t_index):\n", " betas_t = extract(betas, t, x.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x.shape\n", + " )\n", " # print (x.shape, 'x_shape')\n", " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t, x.shape)\n", "\n", " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", - " model_mean = sqrt_recip_alphas_t * (x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t)\n", + " model_mean = sqrt_recip_alphas_t * (\n", + " x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t\n", + " )\n", "\n", " if t_index == 0:\n", " return model_mean\n", @@ -926,8 +959,12 @@ " betas = betas.to(device)\n", " sqrt_one_minus_alphas_cumprod = sqrt_one_minus_alphas_cumprod.to(device)\n", " betas_t = extract(betas, t_double, x_double.shape, device=device)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t_double, x_double.shape, device=device)\n", - " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t_double, x_double.shape, device=device)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t_double, x_double.shape, device=device\n", + " )\n", + " sqrt_recip_alphas_t = extract(\n", + " sqrt_recip_alphas, t_double, x_double.shape, device=device\n", + " )\n", "\n", " # classifier free sampling interpolates between guided and non guided using `cond_weight`\n", " classes_masked = classes * context_mask\n", @@ -944,7 +981,9 @@ " # print (show_out_test[1])\n", " # print (show_out_test[2])\n", " # print (show_out_test[3])\n", - " preds, cross_map_full = model(x_double, time=t_double, classes=classes_masked) # I added cross_map\n", + " preds, cross_map_full = model(\n", + " x_double, time=t_double, classes=classes_masked\n", + " ) # I added cross_map\n", " model.output_attention = False\n", " cross_map = cross_map_full[:batch_size]\n", " eps1 = (1 + cond_weight) * preds[:batch_size]\n", @@ -954,13 +993,18 @@ " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", " model_mean = sqrt_recip_alphas_t[:batch_size] * (\n", - " x - betas_t[:batch_size] * x_t / sqrt_one_minus_alphas_cumprod_t[:batch_size]\n", + " x\n", + " - betas_t[:batch_size]\n", + " * x_t\n", + " / sqrt_one_minus_alphas_cumprod_t[:batch_size]\n", " )\n", "\n", " if t_index == 0:\n", " return model_mean, cross_map\n", " else:\n", - " posterior_variance_t = extract(posterior_variance, t, x.shape, device=device)\n", + " posterior_variance_t = extract(\n", + " posterior_variance, t, x.shape, device=device\n", + " )\n", " noise = torch.randn_like(x)\n", " # Algorithm 2 line 4:\n", " return model_mean + torch.sqrt(posterior_variance_t) * noise, cross_map\n", @@ -1013,8 +1057,17 @@ " else:\n", " sampling_fn = partial(p_sample)\n", "\n", - " for i in tqdm(reversed(range(0, timesteps)), desc='sampling loop time step', total=timesteps):\n", - " img, cross_matrix = sampling_fn(model, x=img, t=torch.full((b,), i, device=device, dtype=torch.long), t_index=i)\n", + " for i in tqdm(\n", + " reversed(range(0, timesteps)),\n", + " desc='sampling loop time step',\n", + " total=timesteps,\n", + " ):\n", + " img, cross_matrix = sampling_fn(\n", + " model,\n", + " x=img,\n", + " t=torch.full((b,), i, device=device, dtype=torch.long),\n", + " t_index=i,\n", + " )\n", " imgs.append(img.cpu().numpy())\n", " cross_images_final.append(cross_matrix.cpu().numpy())\n", " if get_cross_map:\n", @@ -1142,7 +1195,9 @@ " \"\"\"\n", " steps = timesteps + 1\n", " x = torch.linspace(0, timesteps, steps)\n", - " alphas_cumprod = torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " alphas_cumprod = (\n", + " torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " )\n", " alphas_cumprod = alphas_cumprod / alphas_cumprod[0]\n", " betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1])\n", " return torch.clip(betas, 0.0001, 0.9999)\n", @@ -1209,19 +1264,33 @@ "outputs": [], "source": [ "# forward diffusion\n", - "def q_sample(x_start, t, sqrt_alphas_cumprod, sqrt_one_minus_alphas_cumprod, noise=None, device=None):\n", + "def q_sample(\n", + " x_start,\n", + " t,\n", + " sqrt_alphas_cumprod,\n", + " sqrt_one_minus_alphas_cumprod,\n", + " noise=None,\n", + " device=None,\n", + "):\n", " if noise is None:\n", " noise = torch.randn_like(x_start)\n", "\n", - " sqrt_alphas_cumprod_t = extract(sqrt_alphas_cumprod, t, x_start.shape).to(device)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x_start.shape).to(device)\n", + " sqrt_alphas_cumprod_t = extract(sqrt_alphas_cumprod, t, x_start.shape).to(\n", + " device\n", + " )\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x_start.shape\n", + " ).to(device)\n", "\n", " # print (sqrt_alphas_cumprod_t , sqrt_one_minus_alphas_cumprod_t , t)\n", " # print (sqrt_alphas_cumprod_t.device, 'sqrt_alphas_cumprod_t')\n", " # print (x_start.device, 'x_start' )\n", " # print (sqrt_one_minus_alphas_cumprod_t.device , 'sqrt_one_minus_alphas_cumprod_t')\n", " # print (noise.device , 'noise.device')\n", - " return sqrt_alphas_cumprod_t * x_start + sqrt_one_minus_alphas_cumprod_t * noise" + " return (\n", + " sqrt_alphas_cumprod_t * x_start\n", + " + sqrt_one_minus_alphas_cumprod_t * noise\n", + " )" ] }, { @@ -1365,7 +1434,9 @@ " ) # this is the auto generated noise given t and Noise\n", " # print('max_q_sample', x_noisy.max(), 'mean_q_sample',x_noisy.mean() )\n", "\n", - " context_mask = torch.bernoulli(torch.zeros(classes.shape[0]) + (1 - p_uncond)).to(device)\n", + " context_mask = torch.bernoulli(\n", + " torch.zeros(classes.shape[0]) + (1 - p_uncond)\n", + " ).to(device)\n", " # print ('context mask', context_mask)\n", " # print ('classes', classes)\n", "\n", @@ -1374,7 +1445,9 @@ " # nn.Embedding needs type to be long, multiplying with mask changes type\n", " classes = classes.type(torch.long)\n", " # print ('final class',classes )\n", - " predicted_noise = denoise_model(x_noisy, t, classes) # this is the predicted noise given the model and step t\n", + " predicted_noise = denoise_model(\n", + " x_noisy, t, classes\n", + " ) # this is the predicted noise given the model and step t\n", " # print('max_predicted', x_noisy.max(), 'mean_predicted',x_noisy.mean() )\n", "\n", " # #predicted is ok (clipped)\n", @@ -1522,7 +1595,12 @@ " def __init__(self):\n", " super().__init__()\n", "\n", - " self.res = nn.Sequential(ResBlock(1, 2, 1), ResBlock(1, 2, 1), ResBlock(1, 2, 1), ResBlock(1, 2, 1))\n", + " self.res = nn.Sequential(\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " )\n", "\n", " self.conv = nn.Sequential(\n", " ConvBlock_2d(in_channels=1, out_channels=2),\n", @@ -1541,7 +1619,9 @@ " self.fc = nn.Sequential(\n", " nn.Linear(800, 800),\n", " # nn.GELU(),\n", - " nn.BatchNorm1d(800), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", + " nn.BatchNorm1d(\n", + " 800\n", + " ), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", " # nn.Linear(400, 400),\n", " # nn.BatchNorm1d(400),\n", " # nn.GELU(),\n", @@ -1551,7 +1631,9 @@ " self.fc2 = nn.Sequential(\n", " nn.Linear(400, 800),\n", " # nn.GELU(),\n", - " nn.BatchNorm1d(800), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", + " nn.BatchNorm1d(\n", + " 800\n", + " ), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", " # nn.Linear(400, 400),\n", " # nn.GELU(),\n", " # nn.BatchNorm1d(400),\n", @@ -1690,7 +1772,11 @@ " generic one layer FC NN for embedding things \n", " '''\n", " self.input_dim = input_dim\n", - " layers = [nn.Linear(input_dim, emb_dim), nn.GELU(), nn.Linear(emb_dim, emb_dim)]\n", + " layers = [\n", + " nn.Linear(input_dim, emb_dim),\n", + " nn.GELU(),\n", + " nn.Linear(emb_dim, emb_dim),\n", + " ]\n", " self.model = nn.Sequential(*layers)\n", "\n", " def forward(self, x):\n", @@ -1783,7 +1869,8 @@ "\n", "def Upsample(dim, dim_out=None):\n", " return nn.Sequential(\n", - " nn.Upsample(scale_factor=2, mode='nearest'), nn.Conv2d(dim, default(dim_out, dim), 3, padding=1)\n", + " nn.Upsample(scale_factor=2, mode='nearest'),\n", + " nn.Conv2d(dim, default(dim_out, dim), 3, padding=1),\n", " )\n", "\n", "\n", @@ -1861,11 +1948,17 @@ "class ResnetBlock(nn.Module):\n", " def __init__(self, dim, dim_out, *, time_emb_dim=None, groups=8):\n", " super().__init__()\n", - " self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None\n", + " self.mlp = (\n", + " nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2))\n", + " if exists(time_emb_dim)\n", + " else None\n", + " )\n", "\n", " self.block1 = Block(dim, dim_out, groups=groups)\n", " self.block2 = Block(dim_out, dim_out, groups=groups)\n", - " self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " self.res_conv = (\n", + " nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " )\n", "\n", " def forward(self, x, time_emb=None):\n", " scale_shift = None\n", @@ -1882,8 +1975,22 @@ "\n", "\n", "class ResnetBlockClassConditioned(ResnetBlock):\n", - " def __init__(self, dim, dim_out, *, num_classes, class_embed_dim, time_emb_dim=None, groups=8):\n", - " super().__init__(dim=dim + class_embed_dim, dim_out=dim_out, time_emb_dim=time_emb_dim, groups=groups)\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " dim_out,\n", + " *,\n", + " num_classes,\n", + " class_embed_dim,\n", + " time_emb_dim=None,\n", + " groups=8,\n", + " ):\n", + " super().__init__(\n", + " dim=dim + class_embed_dim,\n", + " dim_out=dim_out,\n", + " time_emb_dim=time_emb_dim,\n", + " groups=groups,\n", + " )\n", " # print ('n_classes', num_classes, 'class_embed_dim', class_embed_dim)\n", " self.class_mlp = EmbedFC(num_classes, class_embed_dim)\n", "\n", @@ -1905,12 +2012,17 @@ " self.heads = heads\n", " hidden_dim = dim_head * heads\n", " self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False)\n", - " self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim))\n", + " self.to_out = nn.Sequential(\n", + " nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)\n", + " )\n", "\n", " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q = q.softmax(dim=-2)\n", " k = k.softmax(dim=-1)\n", @@ -1921,7 +2033,9 @@ " context = torch.einsum('b h d n, b h e n -> b h d e', k, v)\n", "\n", " out = torch.einsum('b h d e, b h d n -> b h e n', context, q)\n", - " out = rearrange(out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w)\n", + " out = rearrange(\n", + " out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w\n", + " )\n", " return self.to_out(out)\n", "\n", "\n", @@ -1937,7 +2051,10 @@ " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q, k = map(l2norm, (q, k))\n", "\n", @@ -1964,9 +2081,15 @@ " qkv_x = self.to_qkv(x).chunk(3, dim=1)\n", " qkv_y = self.to_qkv(y).chunk(3, dim=1)\n", "\n", - " q_x, k_x, v_x = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv_x)\n", + " q_x, k_x, v_x = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv_x,\n", + " )\n", "\n", - " q_y, k_y, v_y = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv_y)\n", + " q_y, k_y, v_y = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv_y,\n", + " )\n", "\n", " q, k = map(l2norm, (q_x, k_y))\n", "\n", @@ -2004,7 +2127,9 @@ "\n", "\n", "def log_snr_to_alpha_sigma(log_snr):\n", - " return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt(torch.sigmoid(-log_snr))\n", + " return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt(\n", + " torch.sigmoid(-log_snr)\n", + " )\n", "\n", "\n", "class Unet_lucas(nn.Module):\n", @@ -2055,7 +2180,10 @@ " fourier_dim = learned_sinusoidal_dim + 1\n", "\n", " self.time_mlp = nn.Sequential(\n", - " sinu_pos_emb, nn.Linear(fourier_dim, time_dim), nn.GELU(), nn.Linear(time_dim, time_dim)\n", + " sinu_pos_emb,\n", + " nn.Linear(fourier_dim, time_dim),\n", + " nn.GELU(),\n", + " nn.Linear(time_dim, time_dim),\n", " )\n", "\n", " if num_classes is not None:\n", @@ -2076,7 +2204,9 @@ " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " Residual(PreNorm(dim_in, LinearAttention(dim_in))),\n", - " Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", + " Downsample(dim_in, dim_out)\n", + " if not is_last\n", + " else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -2092,10 +2222,16 @@ " self.ups.append(\n", " nn.ModuleList(\n", " [\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", " Residual(PreNorm(dim_out, LinearAttention(dim_out))),\n", - " Upsample(dim_out, dim_in) if not is_last else nn.Conv2d(dim_out, dim_in, 3, padding=1),\n", + " Upsample(dim_out, dim_in)\n", + " if not is_last\n", + " else nn.Conv2d(dim_out, dim_in, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -2108,7 +2244,12 @@ " # print('self.final_conv' , self.final_conv)\n", "\n", " self.cross_attn = EfficientAttention(\n", - " dim=200, dim_head=64, heads=1, memory_efficient=True, q_bucket_size=1024, k_bucket_size=2048\n", + " dim=200,\n", + " dim_head=64,\n", + " heads=1,\n", + " memory_efficient=True,\n", + " q_bucket_size=1024,\n", + " k_bucket_size=2048,\n", " )\n", "\n", " # mask = torch.ones(1, 65536).bool().cuda()\n", @@ -2186,7 +2327,8 @@ " # crossattention_out = self.cross_attention(x_reshaped, t_cross_reshaped)\n", "\n", " crossattention_out = self.cross_attn(\n", - " self.norm_to_cross(x_reshaped.reshape(-1, 800)).reshape(-1, 4, 200), context=t_cross_reshaped\n", + " self.norm_to_cross(x_reshaped.reshape(-1, 800)).reshape(-1, 4, 200),\n", + " context=t_cross_reshaped,\n", " ) # (-1,1, 4, 200)\n", " crossattention_out = x.view(-1, 1, 4, 200)\n", " # print ('crossattention_out', crossattention_out.shape)\n", @@ -5331,11 +5473,15 @@ "\n", "final_comp_values_trian = encode_data.train['motifs_per_components_dict']\n", "final_comp_values_test = encode_data.test['motifs_per_components_dict']\n", - "final_comp_values_shuffle = encode_data.train_shuffle['motifs_per_components_dict']\n", + "final_comp_values_shuffle = encode_data.train_shuffle[\n", + " 'motifs_per_components_dict'\n", + "]\n", "\n", "\n", "df = encode_data.train['dataset']\n", - "cell_components = df.sort_values('TAG')['TAG'].unique().tolist() # I need to add this function inside the dataloader" + "cell_components = (\n", + " df.sort_values('TAG')['TAG'].unique().tolist()\n", + ") # I need to add this function inside the dataloader" ] }, { @@ -5469,7 +5615,9 @@ " df_plot.columns = [x.split('_')[0] for x in cell_components]\n", " df_plot.index = df_plot.columns\n", " sns.heatmap(df_plot, cmap='Blues_r', annot=True, lw=0.1, vmax=1, vmin=0)\n", - " plt.title(f'Kl divergence \\n {x_label} sequences x {y_label} sequences \\n MOTIFS probabilities')\n", + " plt.title(\n", + " f'Kl divergence \\n {x_label} sequences x {y_label} sequences \\n MOTIFS probabilities'\n", + " )\n", " plt.xlabel(f'{x_label} Sequences \\n(motifs dist)')\n", " plt.ylabel(f'{y_label} \\n (motifs dist)')" ] @@ -7929,7 +8077,9 @@ } ], "source": [ - "heat_train_test = kl_comparison_between_dataset(final_comp_values_trian, final_comp_values_test)" + "heat_train_test = kl_comparison_between_dataset(\n", + " final_comp_values_trian, final_comp_values_test\n", + ")" ] }, { @@ -10502,7 +10652,9 @@ } ], "source": [ - "heat_train_shuffle = kl_comparison_between_dataset(final_comp_values_trian, final_comp_values_shuffle)" + "heat_train_shuffle = kl_comparison_between_dataset(\n", + " final_comp_values_trian, final_comp_values_shuffle\n", + ")" ] }, { @@ -10660,7 +10812,13 @@ ], "source": [ "dna_alphabet = ['A', 'C', 'T', 'G']\n", - "x_train_seq = np.array([one_hot_encode(x, dna_alphabet, 200) for x in tqdm_notebook(df['sequence']) if 'N' not in x])\n", + "x_train_seq = np.array(\n", + " [\n", + " one_hot_encode(x, dna_alphabet, 200)\n", + " for x in tqdm_notebook(df['sequence'])\n", + " if 'N' not in x\n", + " ]\n", + ")\n", "X_train = x_train_seq\n", "X_train = np.array([x.T.tolist() for x in X_train])\n", "X_train[X_train == 0] = -1\n", @@ -10803,8 +10961,12 @@ } ], "source": [ - "conditional_tag_to_numeric = {x: n + 1 for n, x in enumerate(df.TAG.unique())} # check if this is changing order\n", - "conditional_numeric_to_tag = {n + 1: x for n, x in enumerate(df.TAG.unique())} # check if this is changing order\n", + "conditional_tag_to_numeric = {\n", + " x: n + 1 for n, x in enumerate(df.TAG.unique())\n", + "} # check if this is changing order\n", + "conditional_numeric_to_tag = {\n", + " n + 1: x for n, x in enumerate(df.TAG.unique())\n", + "} # check if this is changing order\n", "\n", "list(conditional_tag_to_numeric.keys())" ] @@ -10882,11 +11044,15 @@ "source": [ "# this needs to be changed is preety uggly\n", "# conditional training init\n", - "conditional_tags_to_numeric = {n + 1: x for n, x in enumerate(df.TAG.unique())} # check if this is changing order\n", + "conditional_tags_to_numeric = {\n", + " n + 1: x for n, x in enumerate(df.TAG.unique())\n", + "} # check if this is changing order\n", "cell_types = sorted(list(conditional_numeric_to_tag.keys()))\n", "print(cell_types)\n", "TOTAL_class_number = 17\n", - "x_train_cell_type = torch.from_numpy(df[\"TAG\"].apply(lambda x: conditional_tag_to_numeric[x]).to_numpy())" + "x_train_cell_type = torch.from_numpy(\n", + " df[\"TAG\"].apply(lambda x: conditional_tag_to_numeric[x]).to_numpy()\n", + ")" ] }, { @@ -11380,7 +11546,9 @@ "from pathlib import Path\n", "\n", "\n", - "def save_model(milestone, step, accelerator, opt, model, ema_model, folder_path_string=''):\n", + "def save_model(\n", + " milestone, step, accelerator, opt, model, ema_model, folder_path_string=''\n", + "):\n", " results_folder = Path(folder_path_string)\n", "\n", " data = {\n", @@ -11395,7 +11563,13 @@ "\n", "\n", "def recreating_models():\n", - " model = Unet_lucas(dim=200, channels=1, dim_mults=(1, 2, 4), resnet_block_groups=4, num_classes=TOTAL_class_number)\n", + " model = Unet_lucas(\n", + " dim=200,\n", + " channels=1,\n", + " dim_mults=(1, 2, 4),\n", + " resnet_block_groups=4,\n", + " num_classes=TOTAL_class_number,\n", + " )\n", "\n", " # ema = EMA(0.995)\n", " # ema_model = copy.deepcopy(model).eval().requires_grad_(False)\n", @@ -12147,7 +12321,9 @@ }, "outputs": [], "source": [ - "model_loaded, step = load_model('model_48k_sequences_per_group_K562_hESCT0_HepG2_GM12878_12k.pt')" + "model_loaded, step = load_model(\n", + " 'model_48k_sequences_per_group_K562_hESCT0_HepG2_GM12878_12k.pt'\n", + ")" ] }, { @@ -12184,7 +12360,9 @@ "# sqrt_one_minus_alphas_cumprod = torch.sqrt(1. - alphas_cumprod)\n", "sqrt_one_minus_alphas_cumprod = torch.sqrt(1.0 - alphas_cumprod)\n", "# calculations for posterior q(x_{t-1} | x_t, x_0)\n", - "posterior_variance = betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + "posterior_variance = (\n", + " betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + ")\n", "print(torch.cuda.is_initialized())\n", "\n", "loaded_additional_variables = {\n", @@ -12203,7 +12381,12 @@ "\n", "class_in = torch.Tensor([2]).cuda()\n", "sample_test_out, attention_out = sample(\n", - " classes=class_in, batch_size=1, channels=1, cond_weight=1, get_cross_map=True, **loaded_additional_variables\n", + " classes=class_in,\n", + " batch_size=1,\n", + " channels=1,\n", + " cond_weight=1,\n", + " get_cross_map=True,\n", + " **loaded_additional_variables,\n", ")\n", "model_loaded.output_attention = False\n", "\n", @@ -12336,7 +12519,10 @@ "outputs": [], "source": [ "def kl_comparison_generated_sequences(\n", - " components_list, dict_targer_components, additional_variables=None, number_of_sequences_to_sample_by_component=500\n", + " components_list,\n", + " dict_targer_components,\n", + " additional_variables=None,\n", + " number_of_sequences_to_sample_by_component=500,\n", "):\n", " '''\n", " ex: components_list = [3, 8, 12, 15]\n", diff --git a/notebooks/experiments/conditional_diffusion/dna_diff_baseline_conditional_UNET.ipynb b/notebooks/experiments/conditional_diffusion/dna_diff_baseline_conditional_UNET.ipynb index a1a76108..95528bd7 100644 --- a/notebooks/experiments/conditional_diffusion/dna_diff_baseline_conditional_UNET.ipynb +++ b/notebooks/experiments/conditional_diffusion/dna_diff_baseline_conditional_UNET.ipynb @@ -383,7 +383,9 @@ " '\\n'\n", ")\n", "# Cell names\n", - "CELL_NAMES = {int(x.split(' ')[0]): x.split(' ')[1] for x in ENUMARATED_CELL_NAME}\n", + "CELL_NAMES = {\n", + " int(x.split(' ')[0]): x.split(' ')[1] for x in ENUMARATED_CELL_NAME\n", + "}\n", "# Number of epochs to train for\n", "EPOCHS = 10000\n", "# save and compare metrics after specified epoch\n", @@ -547,7 +549,9 @@ }, "outputs": [], "source": [ - "def motif_scoring_KL_divergence(original: pd.Series, generated: pd.Series) -> torch.Tensor:\n", + "def motif_scoring_KL_divergence(\n", + " original: pd.Series, generated: pd.Series\n", + ") -> torch.Tensor:\n", " \"\"\"\n", " This function encapsulates the logic of evaluating the KL divergence metric\n", " between two sequences.\n", @@ -572,7 +576,10 @@ "outputs": [], "source": [ "def compare_motif_list(\n", - " df_motifs_a, df_motifs_b, motif_scoring_metric=motif_scoring_KL_divergence, plot_motif_probs=False\n", + " df_motifs_a,\n", + " df_motifs_b,\n", + " motif_scoring_metric=motif_scoring_KL_divergence,\n", + " plot_motif_probs=False,\n", "):\n", " \"\"\"\n", " This function encapsulates the logic of evaluating the difference between the distribution\n", @@ -582,7 +589,9 @@ " for that is that they dont satisfy certain properties, such as in KL case, the simmetry property.\n", " Hence it makes a big difference what are the positions of input.\n", " \"\"\"\n", - " set_all_mot = set(df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist())\n", + " set_all_mot = set(\n", + " df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist()\n", + " )\n", " create_new_matrix = []\n", " for x in set_all_mot:\n", " list_in = []\n", @@ -599,10 +608,16 @@ "\n", " create_new_matrix.append(list_in)\n", "\n", - " df_motifs = pd.DataFrame(create_new_matrix, columns=['motif', 'motif_a', 'motif_b'])\n", + " df_motifs = pd.DataFrame(\n", + " create_new_matrix, columns=['motif', 'motif_a', 'motif_b']\n", + " )\n", "\n", - " df_motifs['Diffusion_seqs'] = df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", - " df_motifs['Training_seqs'] = df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " df_motifs['Diffusion_seqs'] = (\n", + " df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", + " )\n", + " df_motifs['Training_seqs'] = (\n", + " df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " )\n", " if plot_motif_probs:\n", " plt.rcParams[\"figure.figsize\"] = (3, 3)\n", " sns.regplot(x='Diffusion_seqs', y='Training_seqs', data=df_motifs)\n", @@ -611,7 +626,9 @@ " plt.title('Motifs Probs')\n", " plt.show()\n", "\n", - " return motif_scoring_metric(df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values)" + " return motif_scoring_metric(\n", + " df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values\n", + " )" ] }, { @@ -623,7 +640,9 @@ }, "outputs": [], "source": [ - "def metric_comparison_between_components(original_data, generated_data, x_label_plot, y_label_plot):\n", + "def metric_comparison_between_components(\n", + " original_data, generated_data, x_label_plot, y_label_plot\n", + "):\n", " \"\"\"\n", " This functions takes as inputs dictionaries, which contain as keys different components (cell types)\n", " and as values the distribution of occurances of different motifs. These two dictionaries represent two different datasets, i.e.\n", @@ -636,7 +655,9 @@ " for components_1, motif_occurance_frequency in original_data.items():\n", " comparisons_single_component = []\n", " for components_2 in generated_data.keys():\n", - " compared_motifs_occurances = compare_motif_list(motif_occurance_frequency, generated_data[components_2])\n", + " compared_motifs_occurances = compare_motif_list(\n", + " motif_occurance_frequency, generated_data[components_2]\n", + " )\n", " comparisons_single_component.append(compared_motifs_occurances)\n", "\n", " final_comparison_all_components.append(comparisons_single_component)\n", @@ -646,7 +667,9 @@ " df_plot.columns = [CELL_NAMES[x] for x in cell_components]\n", " df_plot.index = df_plot.columns\n", " sns.heatmap(df_plot, cmap='Blues_r', annot=True, lw=0.1, vmax=1, vmin=0)\n", - " plt.title(f'Kl divergence \\n {x_label_plot} sequences x {y_label_plot} sequences \\n MOTIFS probabilities')\n", + " plt.title(\n", + " f'Kl divergence \\n {x_label_plot} sequences x {y_label_plot} sequences \\n MOTIFS probabilities'\n", + " )\n", " plt.xlabel(f'{x_label_plot} Sequences \\n(motifs dist)')\n", " plt.ylabel(f'{y_label_plot} \\n (motifs dist)')" ] @@ -718,7 +741,9 @@ " self.step = 0\n", "\n", " def update_model_average(self, ma_model, current_model):\n", - " for current_params, ma_params in zip(current_model.parameters(), ma_model.parameters()):\n", + " for current_params, ma_params in zip(\n", + " current_model.parameters(), ma_model.parameters()\n", + " ):\n", " old_weight, up_weight = ma_params.data, current_params.data\n", " ma_params.data = self.update_average(old_weight, up_weight)\n", "\n", @@ -786,7 +811,11 @@ " self.change_comp_index = change_component_index\n", " self.data = self.read_csv()\n", " self.df_generate = self.create_subsetted_components_df()\n", - " self.df_train_raw_grouped, self.df_test_raw_grouped, self.df_shuffled_raw_grouped = self.create_train_groups()\n", + " (\n", + " self.df_train_raw_grouped,\n", + " self.df_test_raw_grouped,\n", + " self.df_shuffled_raw_grouped,\n", + " ) = self.create_train_groups()\n", "\n", " def read_csv(self):\n", " \"\"\"\n", @@ -802,16 +831,25 @@ " Subset the raw csv based on components.\n", " \"\"\"\n", " df_subsetted_components = self.data.copy()\n", - " if self.subset_components != None and type(self.subset_components) == list:\n", + " if (\n", + " self.subset_components != None\n", + " and type(self.subset_components) == list\n", + " ):\n", " df_subsetted_components = df_subsetted_components.query(\n", - " ' or '.join([f'component == {c}' for c in self.subset_components])\n", + " ' or '.join(\n", + " [f'component == {c}' for c in self.subset_components]\n", + " )\n", " ).copy()\n", " print('Subseting...')\n", "\n", " if self.plot_components_distribution:\n", " (\n", - " df_subsetted_components.groupby('component').count()['raw_sequence']\n", - " / df_subsetted_components.groupby('component').count()['raw_sequence'].sum()\n", + " df_subsetted_components.groupby('component').count()[\n", + " 'raw_sequence'\n", + " ]\n", + " / df_subsetted_components.groupby('component')\n", + " .count()['raw_sequence']\n", + " .sum()\n", " ).plot.bar()\n", " plt.title('Component % on Training Sample')\n", " plt.show()\n", @@ -822,13 +860,15 @@ " \"\"\"\n", " Split the subsetted df into train test and suffled.\n", " \"\"\"\n", - " df_sampled = self.df_generate.sample(self.sample_number * 2) # getting train and test\n", + " df_sampled = self.df_generate.sample(\n", + " self.sample_number * 2\n", + " ) # getting train and test\n", " df_train = df_sampled.iloc[: self.sample_number].copy()\n", " df_test = df_sampled.iloc[self.sample_number :].copy()\n", " df_train_shuffled = df_train.copy()\n", - " df_train_shuffled['raw_sequence'] = df_train_shuffled['raw_sequence'].apply(\n", - " lambda x: ''.join(random.sample(list(x), len(x)))\n", - " )\n", + " df_train_shuffled['raw_sequence'] = df_train_shuffled[\n", + " 'raw_sequence'\n", + " ].apply(lambda x: ''.join(random.sample(list(x), len(x))))\n", " return df_train, df_test, df_train_shuffled\n", "\n", "\n", @@ -844,7 +884,12 @@ " \"\"\"\n", "\n", " def __init__(\n", - " self, df_train_raw_grouped, df_test_raw_grouped, df_shuffled_raw_grouped, subset_components, sample_number\n", + " self,\n", + " df_train_raw_grouped,\n", + " df_test_raw_grouped,\n", + " df_shuffled_raw_grouped,\n", + " subset_components,\n", + " sample_number,\n", " ):\n", " \"\"\" \"\"\"\n", " self.df_train_raw_grouped = df_train_raw_grouped\n", @@ -861,9 +906,15 @@ " \"\"\"\n", " Fetch the motifs and generate fastas for train, test and shuffled.\n", " \"\"\"\n", - " self.train = self.generate_motifs_and_fastas(self.df_train_raw_grouped, 'train')\n", - " self.test = self.generate_motifs_and_fastas(self.df_test_raw_grouped, 'test')\n", - " self.train_shuffle = self.generate_motifs_and_fastas(self.df_shuffled_raw_grouped, 'train_shuffle')\n", + " self.train = self.generate_motifs_and_fastas(\n", + " self.df_train_raw_grouped, 'train'\n", + " )\n", + " self.test = self.generate_motifs_and_fastas(\n", + " self.df_test_raw_grouped, 'test'\n", + " )\n", + " self.train_shuffle = self.generate_motifs_and_fastas(\n", + " self.df_shuffled_raw_grouped, 'train_shuffle'\n", + " )\n", "\n", " def generate_motifs_and_fastas(self, df, name):\n", " \"\"\"\n", @@ -876,7 +927,8 @@ " print('Generating Fasta and Motis:', name)\n", " print('---' * 10)\n", " fasta_saved = self.save_fasta(\n", - " df, f\"{name}_{self.sample_number}_{'_'.join([str(c) for c in self.subset_components])}\"\n", + " df,\n", + " f\"{name}_{self.sample_number}_{'_'.join([str(c) for c in self.subset_components])}\",\n", " )\n", " print('Generating Motifs (all seqs)')\n", " motif_all_components = self.motifs_from_fasta(fasta_saved)\n", @@ -896,13 +948,22 @@ " \"\"\"\n", " print('Computing Motifs....')\n", " !gimme scan $fasta -p JASPAR2020_vertebrates -g hg38 > train_results_motifs.bed\n", - " df_results_seq_guime = pd.read_csv('train_results_motifs.bed', sep='\\t', skiprows=5, header=None)\n", + " df_results_seq_guime = pd.read_csv(\n", + " 'train_results_motifs.bed', sep='\\t', skiprows=5, header=None\n", + " )\n", " df_results_seq_guime['motifs'] = df_results_seq_guime[8].apply(\n", " lambda x: x.split('motif_name \"')[1].split('\"')[0]\n", " )\n", "\n", - " df_results_seq_guime[0] = df_results_seq_guime[0].apply(lambda x: '_'.join(x.split('_')[:-1]))\n", - " df_results_seq_guime_count_out = df_results_seq_guime[[0, 'motifs']].drop_duplicates().groupby('motifs').count()\n", + " df_results_seq_guime[0] = df_results_seq_guime[0].apply(\n", + " lambda x: '_'.join(x.split('_')[:-1])\n", + " )\n", + " df_results_seq_guime_count_out = (\n", + " df_results_seq_guime[[0, 'motifs']]\n", + " .drop_duplicates()\n", + " .groupby('motifs')\n", + " .count()\n", + " )\n", " return df_results_seq_guime_count_out\n", "\n", " def save_fasta(self, df, name_fasta):\n", @@ -1052,7 +1113,9 @@ "source": [ "# Note that this .csv dataset should be downloaded from here: !wget https://www.dropbox.com/s/db6up7c0d4jwdp4/train_all_classifier_WM20220916.csv.gz?dl=2\n", "# And then potentially saved locally (in gc drive)\n", - "raw_data = DataLoading(\"train_all_classifier_WM20220916.csv\", subset_components=[3, 8, 12, 15])\n", + "raw_data = DataLoading(\n", + " \"train_all_classifier_WM20220916.csv\", subset_components=[3, 8, 12, 15]\n", + ")\n", "preprocessed_data = DataPreprocessing(\n", " raw_data.df_train_raw_grouped,\n", " raw_data.df_test_raw_grouped,\n", @@ -1172,7 +1235,9 @@ " \"\"\"\n", " steps = timesteps + 1\n", " x = torch.linspace(0, timesteps, steps)\n", - " alphas_cumprod = torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " alphas_cumprod = (\n", + " torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " )\n", " alphas_cumprod = alphas_cumprod / alphas_cumprod[0]\n", " betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1])\n", " return torch.clip(betas, 0.0001, 0.9999)\n", @@ -1210,7 +1275,9 @@ "# sqrt_one_minus_alphas_cumprod = torch.sqrt(1. - alphas_cumprod)\n", "sqrt_one_minus_alphas_cumprod = torch.sqrt(1.0 - alphas_cumprod)\n", "# calculations for posterior q(x_{t-1} | x_t, x_0)\n", - "posterior_variance = betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + "posterior_variance = (\n", + " betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + ")\n", "\n", "\n", "def extract(a, t, x_shape):\n", @@ -1225,13 +1292,17 @@ "@torch.no_grad()\n", "def p_sample(model, x, t, t_index):\n", " betas_t = extract(betas, t, x.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x.shape\n", + " )\n", " # print (x.shape, 'x_shape')\n", " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t, x.shape)\n", "\n", " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", - " model_mean = sqrt_recip_alphas_t * (x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t)\n", + " model_mean = sqrt_recip_alphas_t * (\n", + " x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t\n", + " )\n", "\n", " if t_index == 0:\n", " return model_mean\n", @@ -1249,9 +1320,17 @@ "def p_ddim_sample(model, x, t, t_index, eta=0, temp=1.0):\n", " alpha_t = extract(alphas_cumprod, t, x.shape)\n", " alpha_prev_t = extract(alphas_cumprod_prev, t, x.shape)\n", - " sigma = eta * ((1 - alpha_prev_t) / (1 - alpha_t) * (1 - alpha_t / alpha_prev_t)) ** 0.5\n", - " sqrt_one_minus_alphas_cumprod = extract(sqrt_one_minus_alphas_cumprod, t, x.shape)\n", - " pred_x0 = (x - sqrt_one_minus_alphas_cumprod * model(x, time=t)) / (alpha_t**0.5)\n", + " sigma = (\n", + " eta\n", + " * ((1 - alpha_prev_t) / (1 - alpha_t) * (1 - alpha_t / alpha_prev_t))\n", + " ** 0.5\n", + " )\n", + " sqrt_one_minus_alphas_cumprod = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x.shape\n", + " )\n", + " pred_x0 = (x - sqrt_one_minus_alphas_cumprod * model(x, time=t)) / (\n", + " alpha_t**0.5\n", + " )\n", " dir_xt = (1.0 - alpha_prev_t - sigma**2).sqrt() * model(x, time=t)\n", " if sigma == 0.0:\n", " noise = 0.0\n", @@ -1265,7 +1344,9 @@ "\n", "\n", "@torch.no_grad()\n", - "def p_sample_guided(model, x, classes, t, t_index, context_mask, cond_weight=0.0):\n", + "def p_sample_guided(\n", + " model, x, classes, t, t_index, context_mask, cond_weight=0.0\n", + "):\n", " # adapted from: https://openreview.net/pdf?id=qw8AKxfYbI\n", " # print (classes[0])\n", " batch_size = x.shape[0]\n", @@ -1273,7 +1354,9 @@ " t_double = t.repeat(2)\n", " x_double = x.repeat(2, 1, 1, 1)\n", " betas_t = extract(betas, t_double, x_double.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t_double, x_double.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t_double, x_double.shape\n", + " )\n", " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t_double, x_double.shape)\n", "\n", " # classifier free sampling interpolates between guided and non guided using `cond_weight`\n", @@ -1288,7 +1371,10 @@ " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", " model_mean = sqrt_recip_alphas_t[:batch_size] * (\n", - " x - betas_t[:batch_size] * x_t / sqrt_one_minus_alphas_cumprod_t[:batch_size]\n", + " x\n", + " - betas_t[:batch_size]\n", + " * x_t\n", + " / sqrt_one_minus_alphas_cumprod_t[:batch_size]\n", " )\n", "\n", " if t_index == 0:\n", @@ -1318,19 +1404,40 @@ " classes = classes.repeat(2)\n", " context_mask = context_mask.repeat(2)\n", " context_mask[n_sample:] = 0.0 # makes second half of batch context free\n", - " sampling_fn = partial(p_sample_guided, classes=classes, cond_weight=cond_weight, context_mask=context_mask)\n", + " sampling_fn = partial(\n", + " p_sample_guided,\n", + " classes=classes,\n", + " cond_weight=cond_weight,\n", + " context_mask=context_mask,\n", + " )\n", " else:\n", " sampling_fn = partial(p_sample)\n", "\n", - " for i in tqdm(reversed(range(0, TIMESTEPS)), desc='sampling loop time step', total=TIMESTEPS):\n", - " img = sampling_fn(model, x=img, t=torch.full((b,), i, device=device, dtype=torch.long), t_index=i)\n", + " for i in tqdm(\n", + " reversed(range(0, TIMESTEPS)),\n", + " desc='sampling loop time step',\n", + " total=TIMESTEPS,\n", + " ):\n", + " img = sampling_fn(\n", + " model,\n", + " x=img,\n", + " t=torch.full((b,), i, device=device, dtype=torch.long),\n", + " t_index=i,\n", + " )\n", " imgs.append(img.cpu().numpy())\n", " return imgs\n", "\n", "\n", "@torch.no_grad()\n", - "def sample(model, image_size, classes=None, batch_size=16, channels=3, cond_weight=0):\n", - " return p_sample_loop(model, classes=classes, shape=(batch_size, channels, 4, image_size), cond_weight=cond_weight)" + "def sample(\n", + " model, image_size, classes=None, batch_size=16, channels=3, cond_weight=0\n", + "):\n", + " return p_sample_loop(\n", + " model,\n", + " classes=classes,\n", + " shape=(batch_size, channels, 4, image_size),\n", + " cond_weight=cond_weight,\n", + " )" ] }, { @@ -1360,21 +1467,32 @@ " noise = torch.randn_like(x_start)\n", "\n", " sqrt_alphas_cumprod_t = extract(sqrt_alphas_cumprod, t, x_start.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x_start.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x_start.shape\n", + " )\n", "\n", - " return sqrt_alphas_cumprod_t * x_start + sqrt_one_minus_alphas_cumprod_t * noise\n", + " return (\n", + " sqrt_alphas_cumprod_t * x_start\n", + " + sqrt_one_minus_alphas_cumprod_t * noise\n", + " )\n", "\n", "\n", - "def p_losses(denoise_model, x_start, t, classes, noise=None, loss_type=\"l1\", p_uncond=0.1):\n", + "def p_losses(\n", + " denoise_model, x_start, t, classes, noise=None, loss_type=\"l1\", p_uncond=0.1\n", + "):\n", " \"\"\"\n", " Calculate the loss conditioned and noise injected.\n", " \"\"\"\n", " device = x_start.device\n", " if noise is None:\n", " noise = torch.randn_like(x_start) # gauss noise\n", - " x_noisy = q_sample(x_start=x_start, t=t, noise=noise) # this is the auto generated noise given t and Noise\n", + " x_noisy = q_sample(\n", + " x_start=x_start, t=t, noise=noise\n", + " ) # this is the auto generated noise given t and Noise\n", "\n", - " context_mask = torch.bernoulli(torch.zeros(classes.shape[0]) + (1 - p_uncond)).to(device)\n", + " context_mask = torch.bernoulli(\n", + " torch.zeros(classes.shape[0]) + (1 - p_uncond)\n", + " ).to(device)\n", "\n", " # mask for unconditinal guidance\n", " classes = classes * context_mask\n", @@ -1486,7 +1604,8 @@ "\n", "def Upsample(dim, dim_out=None):\n", " return nn.Sequential(\n", - " nn.Upsample(scale_factor=2, mode='nearest'), nn.Conv2d(dim, default(dim_out, dim), 3, padding=1)\n", + " nn.Upsample(scale_factor=2, mode='nearest'),\n", + " nn.Conv2d(dim, default(dim_out, dim), 3, padding=1),\n", " )\n", "\n", "\n", @@ -1546,7 +1665,11 @@ " generic one layer FC NN for embedding things \n", " '''\n", " self.input_dim = input_dim\n", - " layers = [nn.Linear(input_dim, emb_dim), nn.GELU(), nn.Linear(emb_dim, emb_dim)]\n", + " layers = [\n", + " nn.Linear(input_dim, emb_dim),\n", + " nn.GELU(),\n", + " nn.Linear(emb_dim, emb_dim),\n", + " ]\n", " self.model = nn.Sequential(*layers)\n", "\n", " def forward(self, x):\n", @@ -1581,11 +1704,17 @@ "class ResnetBlock(nn.Module):\n", " def __init__(self, dim, dim_out, *, time_emb_dim=None, groups=8):\n", " super().__init__()\n", - " self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None\n", + " self.mlp = (\n", + " nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2))\n", + " if exists(time_emb_dim)\n", + " else None\n", + " )\n", "\n", " self.block1 = Block(dim, dim_out, groups=groups)\n", " self.block2 = Block(dim_out, dim_out, groups=groups)\n", - " self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " self.res_conv = (\n", + " nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " )\n", "\n", " def forward(self, x, time_emb=None):\n", " scale_shift = None\n", @@ -1605,8 +1734,22 @@ "\n", "\n", "class ResnetBlockClassConditioned(ResnetBlock):\n", - " def __init__(self, dim, dim_out, *, num_classes, class_embed_dim, time_emb_dim=None, groups=8):\n", - " super().__init__(dim=dim + class_embed_dim, dim_out=dim_out, time_emb_dim=time_emb_dim, groups=groups)\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " dim_out,\n", + " *,\n", + " num_classes,\n", + " class_embed_dim,\n", + " time_emb_dim=None,\n", + " groups=8,\n", + " ):\n", + " super().__init__(\n", + " dim=dim + class_embed_dim,\n", + " dim_out=dim_out,\n", + " time_emb_dim=time_emb_dim,\n", + " groups=groups,\n", + " )\n", " self.class_mlp = EmbedFC(num_classes, class_embed_dim)\n", "\n", " def forward(self, x, time_emb=None, c=None):\n", @@ -1628,12 +1771,17 @@ " self.heads = heads\n", " hidden_dim = dim_head * heads\n", " self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False)\n", - " self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim))\n", + " self.to_out = nn.Sequential(\n", + " nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)\n", + " )\n", "\n", " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q = q.softmax(dim=-2)\n", " k = k.softmax(dim=-1)\n", @@ -1644,7 +1792,9 @@ " context = torch.einsum('b h d n, b h e n -> b h d e', k, v)\n", "\n", " out = torch.einsum('b h d e, b h d n -> b h e n', context, q)\n", - " out = rearrange(out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w)\n", + " out = rearrange(\n", + " out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w\n", + " )\n", " return self.to_out(out)\n", "\n", "\n", @@ -1660,7 +1810,10 @@ " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q, k = map(l2norm, (q, k))\n", "\n", @@ -1710,7 +1863,10 @@ " fourier_dim = learned_sinusoidal_dim + 1\n", "\n", " self.time_mlp = nn.Sequential(\n", - " sinu_pos_emb, nn.Linear(fourier_dim, time_dim), nn.GELU(), nn.Linear(time_dim, time_dim)\n", + " sinu_pos_emb,\n", + " nn.Linear(fourier_dim, time_dim),\n", + " nn.GELU(),\n", + " nn.Linear(time_dim, time_dim),\n", " )\n", "\n", " if num_classes is not None:\n", @@ -1730,7 +1886,9 @@ " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " Residual(PreNorm(dim_in, LinearAttention(dim_in))),\n", - " Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", + " Downsample(dim_in, dim_out)\n", + " if not is_last\n", + " else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -1746,10 +1904,16 @@ " self.ups.append(\n", " nn.ModuleList(\n", " [\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", " Residual(PreNorm(dim_out, LinearAttention(dim_out))),\n", - " Upsample(dim_out, dim_in) if not is_last else nn.Conv2d(dim_out, dim_in, 3, padding=1),\n", + " Upsample(dim_out, dim_in)\n", + " if not is_last\n", + " else nn.Conv2d(dim_out, dim_in, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -1871,7 +2035,9 @@ "\n", "optimizer = Adam(model.parameters(), lr=LEARNING_RATE)\n", "\n", - "live_kl = PlotLosses(groups={'KL': ['train', 'test', 'shuffle'], 'DiffusionLoss': ['loss']})\n", + "live_kl = PlotLosses(\n", + " groups={'KL': ['train', 'test', 'shuffle'], 'DiffusionLoss': ['loss']}\n", + ")\n", "train_kl, test_kl, shuffle_kl = 1, 1, 1" ] }, @@ -1955,10 +2121,14 @@ "\n", "final_comp_values_train = preprocessed_data.train['motifs_per_components_dict']\n", "final_comp_values_test = preprocessed_data.test['motifs_per_components_dict']\n", - "final_comp_values_shuffle = preprocessed_data.train_shuffle['motifs_per_components_dict']\n", + "final_comp_values_shuffle = preprocessed_data.train_shuffle[\n", + " 'motifs_per_components_dict'\n", + "]\n", "\n", "raw_dataset = preprocessed_data.train['dataset']\n", - "cell_components = raw_dataset.sort_values('component')['component'].unique().tolist()" + "cell_components = (\n", + " raw_dataset.sort_values('component')['component'].unique().tolist()\n", + ")" ] }, { @@ -2023,7 +2193,11 @@ ], "source": [ "X_train = np.array(\n", - " [one_hot_encode(x, NUCLEOTIDES, 200) for x in tqdm_notebook(raw_dataset['raw_sequence']) if 'N' not in x]\n", + " [\n", + " one_hot_encode(x, NUCLEOTIDES, 200)\n", + " for x in tqdm_notebook(raw_dataset['raw_sequence'])\n", + " if 'N' not in x\n", + " ]\n", ")\n", "X_train = np.array([x.T.tolist() for x in X_train])\n", "X_train[X_train == 0] = -1\n", @@ -2053,7 +2227,9 @@ "source": [ "tf = T.Compose([T.ToTensor()])\n", "seq_dataset = SequenceDataset(seqs=X_train, c=x_train_cell_type, transform=tf)\n", - "train_dl = DataLoader(seq_dataset, BATCH_SIZE, shuffle=True, num_workers=2, pin_memory=True)" + "train_dl = DataLoader(\n", + " seq_dataset, BATCH_SIZE, shuffle=True, num_workers=2, pin_memory=True\n", + ")" ] }, { @@ -2075,7 +2251,12 @@ }, "outputs": [], "source": [ - "def sampling_to_metric(number_of_samples=20, specific_group=False, group_number=None, cond_weight_to_metric=0):\n", + "def sampling_to_metric(\n", + " number_of_samples=20,\n", + " specific_group=False,\n", + " group_number=None,\n", + " cond_weight_to_metric=0,\n", + "):\n", " \"\"\"\n", " This function encapsulates the logic of sampling from the trained model in order to generate counts of the motifs.\n", " The reasoning is that we are interested only in calculating the evaluation metric\n", @@ -2111,10 +2292,21 @@ " save_motifs_syn.close()\n", " # Scan for motifs\n", " !gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed\n", - " df_results_syn = pd.read_csv('syn_results_motifs.bed', sep='\\t', skiprows=5, header=None)\n", - " df_results_syn['motifs'] = df_results_syn[8].apply(lambda x: x.split('motif_name \"')[1].split('\"')[0])\n", - " df_results_syn[0] = df_results_syn[0].apply(lambda x: '_'.join(x.split('_')[:-1]))\n", - " df_motifs_count_syn = df_results_syn[[0, 'motifs']].drop_duplicates().groupby('motifs').count()\n", + " df_results_syn = pd.read_csv(\n", + " 'syn_results_motifs.bed', sep='\\t', skiprows=5, header=None\n", + " )\n", + " df_results_syn['motifs'] = df_results_syn[8].apply(\n", + " lambda x: x.split('motif_name \"')[1].split('\"')[0]\n", + " )\n", + " df_results_syn[0] = df_results_syn[0].apply(\n", + " lambda x: '_'.join(x.split('_')[:-1])\n", + " )\n", + " df_motifs_count_syn = (\n", + " df_results_syn[[0, 'motifs']]\n", + " .drop_duplicates()\n", + " .groupby('motifs')\n", + " .count()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (30, 2)\n", " df_motifs_count_syn.sort_values(0, ascending=False).head(50)[0].plot.bar()\n", " plt.show()\n", @@ -2567,7 +2759,9 @@ " batch_size = x.shape[0]\n", "\n", " # Algorithm 1 line 3: sample t uniformally for every example in the batch\n", - " t = torch.randint(0, TIMESTEPS, (batch_size,), device=device).long() # sampling a t to generate t and t+1\n", + " t = torch.randint(\n", + " 0, TIMESTEPS, (batch_size,), device=device\n", + " ).long() # sampling a t to generate t and t+1\n", " loss = p_losses(model, x, t, y, loss_type=\"huber\")\n", " optimizer.zero_grad()\n", " loss.backward()\n", @@ -2587,7 +2781,14 @@ " sampled = torch.from_numpy(np.random.choice(cell_types, sample_bs))\n", " random_classes = sampled.cuda()\n", "\n", - " samples = sample(model, classes=random_classes, image_size=IMAGE_SIZE, batch_size=2, channels=1, cond_weight=1)\n", + " samples = sample(\n", + " model,\n", + " classes=random_classes,\n", + " image_size=IMAGE_SIZE,\n", + " batch_size=2,\n", + " channels=1,\n", + " cond_weight=1,\n", + " )\n", " n_print = 0\n", " for image, class_show in zip(samples[-1], random_classes):\n", " if n_print < 4:\n", @@ -2606,7 +2807,9 @@ " synt_df = sampling_to_metric(20)\n", " train_kl = compare_motif_list(synt_df, df_results_seq_guime_count_train)\n", " test_kl = compare_motif_list(synt_df, df_results_seq_guime_count_test)\n", - " shuffle_kl = compare_motif_list(synt_df, df_results_seq_guime_count_shuffle)\n", + " shuffle_kl = compare_motif_list(\n", + " synt_df, df_results_seq_guime_count_shuffle\n", + " )\n", " print('KL_TRAIN', train_kl, 'KL')\n", " print('KL_TEST', test_kl, 'KL')\n", " print('KL_SHUFFLE', shuffle_kl, 'KL')\n", @@ -2614,7 +2817,14 @@ " print('test_kl', test_kl)\n", " print('shuffle_kl', shuffle_kl)\n", " print('loss', loss.item())\n", - " live_kl.update({'train': train_kl, 'test': test_kl, 'shuffle': shuffle_kl, 'loss': loss.item()})\n", + " live_kl.update(\n", + " {\n", + " 'train': train_kl,\n", + " 'test': test_kl,\n", + " 'shuffle': shuffle_kl,\n", + " 'loss': loss.item(),\n", + " }\n", + " )\n", " # DO NOTE ONE THING: THE X AXIS IS NOT THE EPOCHS, but epochs*save_and_sample_every\n", " live_kl.send()" ] @@ -2656,7 +2866,9 @@ } ], "source": [ - "metric_comparison_between_components(final_comp_values_train, final_comp_values_test, \"Train\", \"Test\")" + "metric_comparison_between_components(\n", + " final_comp_values_train, final_comp_values_test, \"Train\", \"Test\"\n", + ")" ] }, { @@ -2686,7 +2898,9 @@ } ], "source": [ - "metric_comparison_between_components(final_comp_values_train, final_comp_values_shuffle, \"Train\", \"Shuffle\")" + "metric_comparison_between_components(\n", + " final_comp_values_train, final_comp_values_shuffle, \"Train\", \"Shuffle\"\n", + ")" ] }, { @@ -2716,7 +2930,9 @@ } ], "source": [ - "metric_comparison_between_components(final_comp_values_test, final_comp_values_shuffle, \"Test\", \"Shuffle\")" + "metric_comparison_between_components(\n", + " final_comp_values_test, final_comp_values_shuffle, \"Test\", \"Shuffle\"\n", + ")" ] } ], diff --git a/notebooks/experiments/conditional_diffusion/dna_diff_baseline_conditional_UNET_with_time_warping.ipynb b/notebooks/experiments/conditional_diffusion/dna_diff_baseline_conditional_UNET_with_time_warping.ipynb index ebc073f4..bcb0e415 100644 --- a/notebooks/experiments/conditional_diffusion/dna_diff_baseline_conditional_UNET_with_time_warping.ipynb +++ b/notebooks/experiments/conditional_diffusion/dna_diff_baseline_conditional_UNET_with_time_warping.ipynb @@ -525,7 +525,9 @@ " '\\n'\n", ")\n", "# Cell names\n", - "CELL_NAMES = {int(x.split(' ')[0]): x.split(' ')[1] for x in ENUMARATED_CELL_NAME}\n", + "CELL_NAMES = {\n", + " int(x.split(' ')[0]): x.split(' ')[1] for x in ENUMARATED_CELL_NAME\n", + "}\n", "# Number of epochs to train for\n", "EPOCHS = 1000\n", "# save and compare metrics after specified epoch\n", @@ -641,7 +643,9 @@ }, "outputs": [], "source": [ - "def motif_scoring_KL_divergence(original: pd.Series, generated: pd.Series) -> torch.Tensor:\n", + "def motif_scoring_KL_divergence(\n", + " original: pd.Series, generated: pd.Series\n", + ") -> torch.Tensor:\n", " \"\"\"\n", " This function encapsulates the logic of evaluating the KL divergence metric\n", " between two sequences.\n", @@ -656,7 +660,9 @@ " return np.sum(kl_pq)\n", "\n", "\n", - "def motif_scoring_JS_divergence(original: pd.Series, generated: pd.Series) -> torch.Tensor:\n", + "def motif_scoring_JS_divergence(\n", + " original: pd.Series, generated: pd.Series\n", + ") -> torch.Tensor:\n", " \"\"\"\n", " This function encapsulates the logic of evaluating the Jensen-Shannon divergence metric\n", " between two sequences.\n", @@ -681,7 +687,10 @@ "outputs": [], "source": [ "def compare_motif_list(\n", - " df_motifs_a, df_motifs_b, motif_scoring_metric=motif_scoring_KL_divergence, plot_motif_probs=True\n", + " df_motifs_a,\n", + " df_motifs_b,\n", + " motif_scoring_metric=motif_scoring_KL_divergence,\n", + " plot_motif_probs=True,\n", "):\n", " \"\"\"\n", " This function encapsulates the logic of evaluating the difference between the distribution\n", @@ -691,7 +700,9 @@ " for that is that they dont satisfy certain properties, such as in KL case, the simmetry property.\n", " Hence it makes a big difference what are the positions of input.\n", " \"\"\"\n", - " set_all_mot = set(df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist())\n", + " set_all_mot = set(\n", + " df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist()\n", + " )\n", " create_new_matrix = []\n", " for x in set_all_mot:\n", " list_in = []\n", @@ -708,10 +719,16 @@ "\n", " create_new_matrix.append(list_in)\n", "\n", - " df_motifs = pd.DataFrame(create_new_matrix, columns=['motif', 'motif_a', 'motif_b'])\n", + " df_motifs = pd.DataFrame(\n", + " create_new_matrix, columns=['motif', 'motif_a', 'motif_b']\n", + " )\n", "\n", - " df_motifs['Diffusion_seqs'] = df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", - " df_motifs['Training_seqs'] = df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " df_motifs['Diffusion_seqs'] = (\n", + " df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", + " )\n", + " df_motifs['Training_seqs'] = (\n", + " df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " )\n", " if plot_motif_probs:\n", " plt.rcParams[\"figure.figsize\"] = (3, 3)\n", " sns.regplot(x='Diffusion_seqs', y='Training_seqs', data=df_motifs)\n", @@ -720,7 +737,9 @@ " plt.title('Motifs Probs')\n", " plt.show()\n", "\n", - " return motif_scoring_metric(df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values)" + " return motif_scoring_metric(\n", + " df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values\n", + " )" ] }, { @@ -732,7 +751,9 @@ }, "outputs": [], "source": [ - "def metric_comparison_between_components(original_data, generated_data, cell_components, x_label_plot, y_label_plot):\n", + "def metric_comparison_between_components(\n", + " original_data, generated_data, cell_components, x_label_plot, y_label_plot\n", + "):\n", " \"\"\"\n", " This functions takes as inputs dictionaries, which contain as keys different components (cell types)\n", " and as values the distribution of occurances of different motifs. These two dictionaries represent two different datasets, i.e.\n", @@ -745,7 +766,9 @@ " for components_1, motif_occurance_frequency in original_data.items():\n", " comparisons_single_component = []\n", " for components_2 in generated_data.keys():\n", - " compared_motifs_occurances = compare_motif_list(motif_occurance_frequency, generated_data[components_2])\n", + " compared_motifs_occurances = compare_motif_list(\n", + " motif_occurance_frequency, generated_data[components_2]\n", + " )\n", " comparisons_single_component.append(compared_motifs_occurances)\n", "\n", " final_comparison_all_components.append(comparisons_single_component)\n", @@ -755,7 +778,9 @@ " df_plot.columns = [CELL_NAMES[x] for x in cell_components]\n", " df_plot.index = df_plot.columns\n", " sns.heatmap(df_plot, cmap='Blues_r', annot=True, lw=0.1, vmax=1, vmin=0)\n", - " plt.title(f'Kl divergence \\n {x_label_plot} sequences x {y_label_plot} sequences \\n MOTIFS probabilities')\n", + " plt.title(\n", + " f'Kl divergence \\n {x_label_plot} sequences x {y_label_plot} sequences \\n MOTIFS probabilities'\n", + " )\n", " plt.xlabel(f'{x_label_plot} Sequences \\n(motifs dist)')\n", " plt.ylabel(f'{y_label_plot} \\n (motifs dist)')" ] @@ -827,7 +852,9 @@ " self.step = 0\n", "\n", " def update_model_average(self, ma_model, current_model):\n", - " for current_params, ma_params in zip(current_model.parameters(), ma_model.parameters()):\n", + " for current_params, ma_params in zip(\n", + " current_model.parameters(), ma_model.parameters()\n", + " ):\n", " old_weight, up_weight = ma_params.data, current_params.data\n", " ma_params.data = self.update_average(old_weight, up_weight)\n", "\n", @@ -890,14 +917,20 @@ " \"\"\" \"\"\"\n", "\n", " self.csv = input_csv\n", - " self.plot_components_distribution = plot_components_distribution # lucas change\n", + " self.plot_components_distribution = (\n", + " plot_components_distribution # lucas change\n", + " )\n", " self.limit_total_sequences = limit_total_sequences\n", " self.sample_number = sample_number\n", " self.subset_components = subset_components\n", " self.change_comp_index = change_component_index\n", " self.data = self.read_csv()\n", " self.df_generate = self.create_subsetted_components_df()\n", - " self.df_train_raw_grouped, self.df_test_raw_grouped, self.df_shuffled_raw_grouped = self.create_train_groups()\n", + " (\n", + " self.df_train_raw_grouped,\n", + " self.df_test_raw_grouped,\n", + " self.df_shuffled_raw_grouped,\n", + " ) = self.create_train_groups()\n", "\n", " def read_csv(self):\n", " \"\"\"\n", @@ -911,7 +944,9 @@ " if self.limit_total_sequences:\n", " print(f'Limiting total sequences {self.limit_total_sequences}')\n", " df = df.sample(self.limit_total_sequences)\n", - " df.columns = [c.replace('seqname', 'chr') for c in df.columns.values] # change this in simon original table\n", + " df.columns = [\n", + " c.replace('seqname', 'chr') for c in df.columns.values\n", + " ] # change this in simon original table\n", "\n", " return df\n", "\n", @@ -920,7 +955,10 @@ " Subset the raw csv based on components.\n", " \"\"\"\n", " df_subsetted_components = self.data.copy()\n", - " if self.subset_components != None and type(self.subset_components) == list:\n", + " if (\n", + " self.subset_components != None\n", + " and type(self.subset_components) == list\n", + " ):\n", " df_subsetted_components = df_subsetted_components.query(\n", " ' or '.join([f'TAG == \"{c}\" ' for c in self.subset_components])\n", " ).copy()\n", @@ -929,7 +967,9 @@ " if self.plot_components_distribution:\n", " (\n", " df_subsetted_components.groupby('TAG').count()['sequence']\n", - " / df_subsetted_components.groupby('TAG').count()['sequence'].sum()\n", + " / df_subsetted_components.groupby('TAG')\n", + " .count()['sequence']\n", + " .sum()\n", " ).plot.bar()\n", " plt.title('Component % on Training Sample')\n", " plt.show()\n", @@ -962,7 +1002,12 @@ " \"\"\"\n", "\n", " def __init__(\n", - " self, df_train_raw_grouped, df_test_raw_grouped, df_shuffled_raw_grouped, subset_components, sample_number\n", + " self,\n", + " df_train_raw_grouped,\n", + " df_test_raw_grouped,\n", + " df_shuffled_raw_grouped,\n", + " subset_components,\n", + " sample_number,\n", " ):\n", " \"\"\" \"\"\"\n", " self.df_train_raw_grouped = df_train_raw_grouped\n", @@ -979,9 +1024,15 @@ " \"\"\"\n", " Fetch the motifs and generate fastas for train, test and shuffled.\n", " \"\"\"\n", - " self.train = self.generate_motifs_and_fastas(self.df_train_raw_grouped, 'train')\n", - " self.test = self.generate_motifs_and_fastas(self.df_test_raw_grouped, 'test')\n", - " self.train_shuffle = self.generate_motifs_and_fastas(self.df_shuffled_raw_grouped, 'train_shuffle')\n", + " self.train = self.generate_motifs_and_fastas(\n", + " self.df_train_raw_grouped, 'train'\n", + " )\n", + " self.test = self.generate_motifs_and_fastas(\n", + " self.df_test_raw_grouped, 'test'\n", + " )\n", + " self.train_shuffle = self.generate_motifs_and_fastas(\n", + " self.df_shuffled_raw_grouped, 'train_shuffle'\n", + " )\n", "\n", " def generate_motifs_and_fastas(self, df, name):\n", " \"\"\"\n", @@ -994,7 +1045,8 @@ " print('Generating Fasta and Motis:', name)\n", " print('---' * 10)\n", " fasta_saved = self.save_fasta(\n", - " df, f\"{name}_{self.sample_number}_{'_'.join([str(c) for c in self.subset_components])}\"\n", + " df,\n", + " f\"{name}_{self.sample_number}_{'_'.join([str(c) for c in self.subset_components])}\",\n", " )\n", " print('Generating Motifs (all seqs)')\n", " motif_all_components = self.motifs_from_fasta(fasta_saved)\n", @@ -1014,13 +1066,22 @@ " \"\"\"\n", " print('Computing Motifs....')\n", " !gimme scan $fasta -p JASPAR2020_vertebrates -g hg38 > train_results_motifs.bed\n", - " df_results_seq_guime = pd.read_csv('train_results_motifs.bed', sep='\\t', skiprows=5, header=None)\n", + " df_results_seq_guime = pd.read_csv(\n", + " 'train_results_motifs.bed', sep='\\t', skiprows=5, header=None\n", + " )\n", " df_results_seq_guime['motifs'] = df_results_seq_guime[8].apply(\n", " lambda x: x.split('motif_name \"')[1].split('\"')[0]\n", " )\n", "\n", - " df_results_seq_guime[0] = df_results_seq_guime[0].apply(lambda x: '_'.join(x.split('_')[:-1]))\n", - " df_results_seq_guime_count_out = df_results_seq_guime[[0, 'motifs']].drop_duplicates().groupby('motifs').count()\n", + " df_results_seq_guime[0] = df_results_seq_guime[0].apply(\n", + " lambda x: '_'.join(x.split('_')[:-1])\n", + " )\n", + " df_results_seq_guime_count_out = (\n", + " df_results_seq_guime[[0, 'motifs']]\n", + " .drop_duplicates()\n", + " .groupby('motifs')\n", + " .count()\n", + " )\n", " return df_results_seq_guime_count_out\n", "\n", " def save_fasta(self, df, name_fasta):\n", @@ -1030,7 +1091,9 @@ " fasta_final_name = name_fasta + '.fasta'\n", " save_fasta_file = open(fasta_final_name, 'w')\n", " write_fasta_component = '\\n'.join(\n", - " df[['dhs_id', 'sequence', 'TAG']].apply(lambda x: f'>{x[0]}_TAG_{x[2]}\\n{x[1]}', axis=1).values.tolist()\n", + " df[['dhs_id', 'sequence', 'TAG']]\n", + " .apply(lambda x: f'>{x[0]}_TAG_{x[2]}\\n{x[1]}', axis=1)\n", + " .values.tolist()\n", " )\n", " save_fasta_file.write(write_fasta_component)\n", " save_fasta_file.close()\n", @@ -1251,13 +1314,23 @@ "\n", "raw_data = DataLoading(\n", " \"selected_K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt\",\n", - " subset_components=['GM12878_ENCLB441ZZZ', 'hESCT0_ENCLB449ZZZ', 'K562_ENCLB843GMH', 'HepG2_ENCLB029COU'],\n", + " subset_components=[\n", + " 'GM12878_ENCLB441ZZZ',\n", + " 'hESCT0_ENCLB449ZZZ',\n", + " 'K562_ENCLB843GMH',\n", + " 'HepG2_ENCLB029COU',\n", + " ],\n", ") # , limit_total_sequences = 1000)\n", "preprocessed_data = DataPreprocessing(\n", " raw_data.df_train_raw_grouped,\n", " raw_data.df_test_raw_grouped,\n", " raw_data.df_shuffled_raw_grouped,\n", - " subset_components=['GM12878_ENCLB441ZZZ', 'hESCT0_ENCLB449ZZZ', 'K562_ENCLB843GMH', 'HepG2_ENCLB029COU'],\n", + " subset_components=[\n", + " 'GM12878_ENCLB441ZZZ',\n", + " 'hESCT0_ENCLB449ZZZ',\n", + " 'K562_ENCLB843GMH',\n", + " 'HepG2_ENCLB029COU',\n", + " ],\n", " sample_number=raw_data.sample_number,\n", ")" ] @@ -1342,7 +1415,9 @@ " \"\"\"\n", " steps = timesteps + 1\n", " x = torch.linspace(0, timesteps, steps)\n", - " alphas_cumprod = torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " alphas_cumprod = (\n", + " torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " )\n", " alphas_cumprod = alphas_cumprod / alphas_cumprod[0]\n", " betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1])\n", " return torch.clip(betas, 0.0001, 0.9999)\n", @@ -1380,7 +1455,9 @@ "# sqrt_one_minus_alphas_cumprod = torch.sqrt(1. - alphas_cumprod)\n", "sqrt_one_minus_alphas_cumprod = torch.sqrt(1.0 - alphas_cumprod)\n", "# calculations for posterior q(x_{t-1} | x_t, x_0)\n", - "posterior_variance = betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + "posterior_variance = (\n", + " betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + ")\n", "\n", "\n", "def extract(a, t, x_shape):\n", @@ -1395,13 +1472,17 @@ "@torch.no_grad()\n", "def p_sample(model, x, t, t_index):\n", " betas_t = extract(betas, t, x.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x.shape\n", + " )\n", " # print (x.shape, 'x_shape')\n", " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t, x.shape)\n", "\n", " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", - " model_mean = sqrt_recip_alphas_t * (x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t)\n", + " model_mean = sqrt_recip_alphas_t * (\n", + " x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t\n", + " )\n", "\n", " if t_index == 0:\n", " return model_mean\n", @@ -1413,7 +1494,9 @@ "\n", "\n", "@torch.no_grad()\n", - "def p_sample_guided(model, x, classes, t, t_index, context_mask, cond_weight=0.0):\n", + "def p_sample_guided(\n", + " model, x, classes, t, t_index, context_mask, cond_weight=0.0\n", + "):\n", " # adapted from: https://openreview.net/pdf?id=qw8AKxfYbI\n", " # print (classes[0])\n", " batch_size = x.shape[0]\n", @@ -1421,7 +1504,9 @@ " t_double = t.repeat(2)\n", " x_double = x.repeat(2, 1, 1, 1)\n", " betas_t = extract(betas, t_double, x_double.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t_double, x_double.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t_double, x_double.shape\n", + " )\n", " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t_double, x_double.shape)\n", "\n", " # classifier free sampling interpolates between guided and non guided using `cond_weight`\n", @@ -1436,7 +1521,10 @@ " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", " model_mean = sqrt_recip_alphas_t[:batch_size] * (\n", - " x - betas_t[:batch_size] * x_t / sqrt_one_minus_alphas_cumprod_t[:batch_size]\n", + " x\n", + " - betas_t[:batch_size]\n", + " * x_t\n", + " / sqrt_one_minus_alphas_cumprod_t[:batch_size]\n", " )\n", "\n", " if t_index == 0:\n", @@ -1466,19 +1554,40 @@ " classes = classes.repeat(2)\n", " context_mask = context_mask.repeat(2)\n", " context_mask[n_sample:] = 0.0 # makes second half of batch context free\n", - " sampling_fn = partial(p_sample_guided, classes=classes, cond_weight=cond_weight, context_mask=context_mask)\n", + " sampling_fn = partial(\n", + " p_sample_guided,\n", + " classes=classes,\n", + " cond_weight=cond_weight,\n", + " context_mask=context_mask,\n", + " )\n", " else:\n", " sampling_fn = partial(p_sample)\n", "\n", - " for i in tqdm(reversed(range(0, TIMESTEPS)), desc='sampling loop time step', total=TIMESTEPS):\n", - " img = sampling_fn(model, x=img, t=torch.full((b,), i, device=device, dtype=torch.long), t_index=i)\n", + " for i in tqdm(\n", + " reversed(range(0, TIMESTEPS)),\n", + " desc='sampling loop time step',\n", + " total=TIMESTEPS,\n", + " ):\n", + " img = sampling_fn(\n", + " model,\n", + " x=img,\n", + " t=torch.full((b,), i, device=device, dtype=torch.long),\n", + " t_index=i,\n", + " )\n", " imgs.append(img.cpu().numpy())\n", " return imgs\n", "\n", "\n", "@torch.no_grad()\n", - "def sample(model, image_size, classes=None, batch_size=16, channels=3, cond_weight=0):\n", - " return p_sample_loop(model, classes=classes, shape=(batch_size, channels, 4, image_size), cond_weight=cond_weight)" + "def sample(\n", + " model, image_size, classes=None, batch_size=16, channels=3, cond_weight=0\n", + "):\n", + " return p_sample_loop(\n", + " model,\n", + " classes=classes,\n", + " shape=(batch_size, channels, 4, image_size),\n", + " cond_weight=cond_weight,\n", + " )" ] }, { @@ -1508,21 +1617,32 @@ " noise = torch.randn_like(x_start)\n", "\n", " sqrt_alphas_cumprod_t = extract(sqrt_alphas_cumprod, t, x_start.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x_start.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x_start.shape\n", + " )\n", "\n", - " return sqrt_alphas_cumprod_t * x_start + sqrt_one_minus_alphas_cumprod_t * noise\n", + " return (\n", + " sqrt_alphas_cumprod_t * x_start\n", + " + sqrt_one_minus_alphas_cumprod_t * noise\n", + " )\n", "\n", "\n", - "def p_losses(denoise_model, x_start, t, classes, noise=None, loss_type=\"l1\", p_uncond=0.1):\n", + "def p_losses(\n", + " denoise_model, x_start, t, classes, noise=None, loss_type=\"l1\", p_uncond=0.1\n", + "):\n", " \"\"\"\n", " Calculate the loss conditioned and noise injected.\n", " \"\"\"\n", " device = x_start.device\n", " if noise is None:\n", " noise = torch.randn_like(x_start) # gauss noise\n", - " x_noisy = q_sample(x_start=x_start, t=t, noise=noise) # this is the auto generated noise given t and Noise\n", + " x_noisy = q_sample(\n", + " x_start=x_start, t=t, noise=noise\n", + " ) # this is the auto generated noise given t and Noise\n", "\n", - " context_mask = torch.bernoulli(torch.zeros(classes.shape[0]) + (1 - p_uncond)).to(device)\n", + " context_mask = torch.bernoulli(\n", + " torch.zeros(classes.shape[0]) + (1 - p_uncond)\n", + " ).to(device)\n", "\n", " # mask for unconditinal guidance\n", " classes = classes * context_mask\n", @@ -1634,7 +1754,8 @@ "\n", "def Upsample(dim, dim_out=None):\n", " return nn.Sequential(\n", - " nn.Upsample(scale_factor=2, mode='nearest'), nn.Conv2d(dim, default(dim_out, dim), 3, padding=1)\n", + " nn.Upsample(scale_factor=2, mode='nearest'),\n", + " nn.Conv2d(dim, default(dim_out, dim), 3, padding=1),\n", " )\n", "\n", "\n", @@ -1694,7 +1815,11 @@ " generic one layer FC NN for embedding things \n", " '''\n", " self.input_dim = input_dim\n", - " layers = [nn.Linear(input_dim, emb_dim), nn.GELU(), nn.Linear(emb_dim, emb_dim)]\n", + " layers = [\n", + " nn.Linear(input_dim, emb_dim),\n", + " nn.GELU(),\n", + " nn.Linear(emb_dim, emb_dim),\n", + " ]\n", " self.model = nn.Sequential(*layers)\n", "\n", " def forward(self, x):\n", @@ -1729,11 +1854,17 @@ "class ResnetBlock(nn.Module):\n", " def __init__(self, dim, dim_out, *, time_emb_dim=None, groups=8):\n", " super().__init__()\n", - " self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None\n", + " self.mlp = (\n", + " nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2))\n", + " if exists(time_emb_dim)\n", + " else None\n", + " )\n", "\n", " self.block1 = Block(dim, dim_out, groups=groups)\n", " self.block2 = Block(dim_out, dim_out, groups=groups)\n", - " self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " self.res_conv = (\n", + " nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " )\n", "\n", " def forward(self, x, time_emb=None):\n", " scale_shift = None\n", @@ -1753,8 +1884,22 @@ "\n", "\n", "class ResnetBlockClassConditioned(ResnetBlock):\n", - " def __init__(self, dim, dim_out, *, num_classes, class_embed_dim, time_emb_dim=None, groups=8):\n", - " super().__init__(dim=dim + class_embed_dim, dim_out=dim_out, time_emb_dim=time_emb_dim, groups=groups)\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " dim_out,\n", + " *,\n", + " num_classes,\n", + " class_embed_dim,\n", + " time_emb_dim=None,\n", + " groups=8,\n", + " ):\n", + " super().__init__(\n", + " dim=dim + class_embed_dim,\n", + " dim_out=dim_out,\n", + " time_emb_dim=time_emb_dim,\n", + " groups=groups,\n", + " )\n", " self.class_mlp = EmbedFC(num_classes, class_embed_dim)\n", "\n", " def forward(self, x, time_emb=None, c=None):\n", @@ -1776,12 +1921,17 @@ " self.heads = heads\n", " hidden_dim = dim_head * heads\n", " self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False)\n", - " self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim))\n", + " self.to_out = nn.Sequential(\n", + " nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)\n", + " )\n", "\n", " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q = q.softmax(dim=-2)\n", " k = k.softmax(dim=-1)\n", @@ -1792,7 +1942,9 @@ " context = torch.einsum('b h d n, b h e n -> b h d e', k, v)\n", "\n", " out = torch.einsum('b h d e, b h d n -> b h e n', context, q)\n", - " out = rearrange(out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w)\n", + " out = rearrange(\n", + " out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w\n", + " )\n", " return self.to_out(out)\n", "\n", "\n", @@ -1808,7 +1960,10 @@ " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q, k = map(l2norm, (q, k))\n", "\n", @@ -1858,7 +2013,10 @@ " fourier_dim = learned_sinusoidal_dim + 1\n", "\n", " self.time_mlp = nn.Sequential(\n", - " sinu_pos_emb, nn.Linear(fourier_dim, time_dim), nn.GELU(), nn.Linear(time_dim, time_dim)\n", + " sinu_pos_emb,\n", + " nn.Linear(fourier_dim, time_dim),\n", + " nn.GELU(),\n", + " nn.Linear(time_dim, time_dim),\n", " )\n", "\n", " if num_classes is not None:\n", @@ -1878,7 +2036,9 @@ " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " Residual(PreNorm(dim_in, LinearAttention(dim_in))),\n", - " Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", + " Downsample(dim_in, dim_out)\n", + " if not is_last\n", + " else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -1894,10 +2054,16 @@ " self.ups.append(\n", " nn.ModuleList(\n", " [\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", " Residual(PreNorm(dim_out, LinearAttention(dim_out))),\n", - " Upsample(dim_out, dim_in) if not is_last else nn.Conv2d(dim_out, dim_in, 3, padding=1),\n", + " Upsample(dim_out, dim_in)\n", + " if not is_last\n", + " else nn.Conv2d(dim_out, dim_in, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -2027,7 +2193,9 @@ "\n", "optimizer = Adam(model.parameters(), lr=LEARNING_RATE)\n", "\n", - "live_kl = PlotLosses(groups={'KL': ['train', 'test', 'shuffle'], 'DiffusionLoss': ['loss']})\n", + "live_kl = PlotLosses(\n", + " groups={'KL': ['train', 'test', 'shuffle'], 'DiffusionLoss': ['loss']}\n", + ")\n", "train_kl, test_kl, shuffle_kl = 1, 1, 1" ] }, @@ -2111,7 +2279,9 @@ "\n", "final_comp_values_train = preprocessed_data.train['motifs_per_components_dict']\n", "final_comp_values_test = preprocessed_data.test['motifs_per_components_dict']\n", - "final_comp_values_shuffle = preprocessed_data.train_shuffle['motifs_per_components_dict']\n", + "final_comp_values_shuffle = preprocessed_data.train_shuffle[\n", + " 'motifs_per_components_dict'\n", + "]\n", "\n", "raw_dataset = preprocessed_data.train['dataset']\n", "# cell_components = raw_dataset.sort_values('component')['component'].unique().tolist()\n", @@ -2183,7 +2353,11 @@ ], "source": [ "X_train = np.array(\n", - " [one_hot_encode(x, NUCLEOTIDES, 200) for x in tqdm_notebook(raw_dataset['sequence']) if 'N' not in x]\n", + " [\n", + " one_hot_encode(x, NUCLEOTIDES, 200)\n", + " for x in tqdm_notebook(raw_dataset['sequence'])\n", + " if 'N' not in x\n", + " ]\n", ")\n", "X_train = np.array([x.T.tolist() for x in X_train])\n", "X_train[X_train == 0] = -1\n", @@ -2204,7 +2378,9 @@ "cell_types = sorted(list(conditional_numeric_to_tag.keys()))\n", "\n", "# cell_types = sorted(list(raw_dataset['TAG'].unique()))\n", - "x_train_cell_type = torch.from_numpy(raw_dataset[\"TAG\"].apply(lambda x: conditional_tag_to_numeric[x]).to_numpy())\n", + "x_train_cell_type = torch.from_numpy(\n", + " raw_dataset[\"TAG\"].apply(lambda x: conditional_tag_to_numeric[x]).to_numpy()\n", + ")\n", "# x_train_cell_type = torch.from_numpy(raw_dataset[\"TAG\"].to_numpy())" ] }, @@ -2229,7 +2405,9 @@ "source": [ "tf = T.Compose([T.ToTensor()])\n", "seq_dataset = SequenceDataset(seqs=X_train, c=x_train_cell_type, transform=tf)\n", - "train_dl = DataLoader(seq_dataset, BATCH_SIZE, shuffle=True) # , num_workers=3, pin_memory=True)" + "train_dl = DataLoader(\n", + " seq_dataset, BATCH_SIZE, shuffle=True\n", + ") # , num_workers=3, pin_memory=True)" ] }, { @@ -2251,7 +2429,12 @@ }, "outputs": [], "source": [ - "def sampling_to_metric(number_of_samples=20, specific_group=False, group_number=None, cond_weight_to_metric=0):\n", + "def sampling_to_metric(\n", + " number_of_samples=20,\n", + " specific_group=False,\n", + " group_number=None,\n", + " cond_weight_to_metric=0,\n", + "):\n", " \"\"\"\n", " This function encapsulates the logic of sampling from the trained model in order to generate counts of the motifs.\n", " The reasoning is that we are interested only in calculating the evaluation metric\n", @@ -2287,10 +2470,21 @@ " save_motifs_syn.close()\n", " # Scan for motifs\n", " !gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed\n", - " df_results_syn = pd.read_csv('syn_results_motifs.bed', sep='\\t', skiprows=5, header=None)\n", - " df_results_syn['motifs'] = df_results_syn[8].apply(lambda x: x.split('motif_name \"')[1].split('\"')[0])\n", - " df_results_syn[0] = df_results_syn[0].apply(lambda x: '_'.join(x.split('_')[:-1]))\n", - " df_motifs_count_syn = df_results_syn[[0, 'motifs']].drop_duplicates().groupby('motifs').count()\n", + " df_results_syn = pd.read_csv(\n", + " 'syn_results_motifs.bed', sep='\\t', skiprows=5, header=None\n", + " )\n", + " df_results_syn['motifs'] = df_results_syn[8].apply(\n", + " lambda x: x.split('motif_name \"')[1].split('\"')[0]\n", + " )\n", + " df_results_syn[0] = df_results_syn[0].apply(\n", + " lambda x: '_'.join(x.split('_')[:-1])\n", + " )\n", + " df_motifs_count_syn = (\n", + " df_results_syn[[0, 'motifs']]\n", + " .drop_duplicates()\n", + " .groupby('motifs')\n", + " .count()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (30, 2)\n", " df_motifs_count_syn.sort_values(0, ascending=False).head(50)[0].plot.bar()\n", " plt.show()\n", @@ -27906,7 +28100,9 @@ " x, y = batch\n", " x = x.type(torch.float32).to(device)\n", " y = y.type(torch.long).to(device)\n", - " t = torch.randint(0, TIMESTEPS, (x.shape[0],), device=device).long() # *np.random.uniform()\n", + " t = torch.randint(\n", + " 0, TIMESTEPS, (x.shape[0],), device=device\n", + " ).long() # *np.random.uniform()\n", "\n", " if TIME_WARPING == True and step >= N_STEPS:\n", " # sort the epoch losses so that one can take the t-s with the biggest losses\n", @@ -27915,13 +28111,20 @@ "\n", " # take the t-s for the 5 biggest losses (5 was taken as example, no extensive optimization)\n", " last_n_t = sorted_t[-5:]\n", - " unnested_last_n_t = [item for sublist in last_n_t for item in sublist]\n", + " unnested_last_n_t = [\n", + " item for sublist in last_n_t for item in sublist\n", + " ]\n", "\n", " # take x.shape[0] number of t-s for the 5 biggest losses\n", - " t_not_random = torch.tensor(np.random.choice(unnested_last_n_t, size=x.shape[0]), device=\"cpu\")\n", + " t_not_random = torch.tensor(\n", + " np.random.choice(unnested_last_n_t, size=x.shape[0]),\n", + " device=\"cpu\",\n", + " )\n", " # pick between t generated above and t_not_random (to increase exploration, and not to get stuck\n", " # in the same t-s)\n", - " t = np.random.choice([t.cpu().detach(), t_not_random.cpu().detach()])\n", + " t = np.random.choice(\n", + " [t.cpu().detach(), t_not_random.cpu().detach()]\n", + " )\n", " t = t.to(device)\n", "\n", " loss = p_losses(model, x, t, y, loss_type=\"huber\")\n", @@ -27947,7 +28150,14 @@ " sampled = torch.from_numpy(np.random.choice(cell_types, sample_bs))\n", " random_classes = sampled.cuda()\n", "\n", - " samples = sample(model, classes=random_classes, image_size=IMAGE_SIZE, batch_size=2, channels=1, cond_weight=1)\n", + " samples = sample(\n", + " model,\n", + " classes=random_classes,\n", + " image_size=IMAGE_SIZE,\n", + " batch_size=2,\n", + " channels=1,\n", + " cond_weight=1,\n", + " )\n", " n_print = 0\n", " for image, class_show in zip(samples[-1], random_classes):\n", " if n_print < 4:\n", @@ -27966,7 +28176,9 @@ " synt_df = sampling_to_metric(20)\n", " train_kl = compare_motif_list(synt_df, df_results_seq_guime_count_train)\n", " test_kl = compare_motif_list(synt_df, df_results_seq_guime_count_test)\n", - " shuffle_kl = compare_motif_list(synt_df, df_results_seq_guime_count_shuffle)\n", + " shuffle_kl = compare_motif_list(\n", + " synt_df, df_results_seq_guime_count_shuffle\n", + " )\n", " print('KL_TRAIN', train_kl, 'KL')\n", " print('KL_TEST', test_kl, 'KL')\n", " print('KL_SHUFFLE', shuffle_kl, 'KL')\n", @@ -28047,7 +28259,9 @@ " print(r), 'component'\n", " comp_array = []\n", " group_compare = r\n", - " synt_df_cond = sampling_to_metric(20, True, group_compare, cond_weight_to_metric=1)\n", + " synt_df_cond = sampling_to_metric(\n", + " 20, True, group_compare, cond_weight_to_metric=1\n", + " )\n", " for k in use_comp_list:\n", " v = dict_targer_components[k]\n", " kl_out = compare_motif_list(synt_df_cond, v)\n", @@ -34086,7 +34300,9 @@ "source": [ "use_comp = [3, 8, 12, 15]\n", "\n", - "heat_new_sequences_test = kl_comparison_generated_sequences(use_comp, final_comp_values_train)" + "heat_new_sequences_test = kl_comparison_generated_sequences(\n", + " use_comp, final_comp_values_train\n", + ")" ] } ], diff --git a/notebooks/experiments/conditional_diffusion/easy_training_Conditional_Code_to_refactor_UNET_ANNOTATED_v4 (2).ipynb b/notebooks/experiments/conditional_diffusion/easy_training_Conditional_Code_to_refactor_UNET_ANNOTATED_v4 (2).ipynb index 2d261ae3..96a3709a 100644 --- a/notebooks/experiments/conditional_diffusion/easy_training_Conditional_Code_to_refactor_UNET_ANNOTATED_v4 (2).ipynb +++ b/notebooks/experiments/conditional_diffusion/easy_training_Conditional_Code_to_refactor_UNET_ANNOTATED_v4 (2).ipynb @@ -176,7 +176,9 @@ " self.step = 0\n", "\n", " def update_model_average(self, ma_model, current_model):\n", - " for current_params, ma_params in zip(current_model.parameters(), ma_model.parameters()):\n", + " for current_params, ma_params in zip(\n", + " current_model.parameters(), ma_model.parameters()\n", + " ):\n", " old_weight, up_weight = ma_params.data, current_params.data\n", " ma_params.data = self.update_average(old_weight, up_weight)\n", "\n", @@ -284,10 +286,17 @@ }, "outputs": [], "source": [ - "def sampling_to_metric(number_of_samples=20, specific_group=False, group_number=None, cond_weight_to_metric=0):\n", + "def sampling_to_metric(\n", + " number_of_samples=20,\n", + " specific_group=False,\n", + " group_number=None,\n", + " cond_weight_to_metric=0,\n", + "):\n", " # Sampling regions using the trained model\n", " final_sequences = []\n", - " for n_a in tqdm_notebook(range(number_of_samples)): # generating 20*10 sequences\n", + " for n_a in tqdm_notebook(\n", + " range(number_of_samples)\n", + " ): # generating 20*10 sequences\n", " # sampled_images = bit_diffusion.sample(batch_size = 4)\n", " sample_bs = 10\n", " if specific_group:\n", @@ -322,10 +331,21 @@ " save_motifs_syn.close()\n", " # Scan for motifs\n", " !gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed\n", - " df_results_syn = pd.read_csv('syn_results_motifs.bed', sep='\\t', skiprows=5, header=None)\n", - " df_results_syn['motifs'] = df_results_syn[8].apply(lambda x: x.split('motif_name \"')[1].split('\"')[0])\n", - " df_results_syn[0] = df_results_syn[0].apply(lambda x: '_'.join(x.split('_')[:-1]))\n", - " df_motifs_count_syn = df_results_syn[[0, 'motifs']].drop_duplicates().groupby('motifs').count()\n", + " df_results_syn = pd.read_csv(\n", + " 'syn_results_motifs.bed', sep='\\t', skiprows=5, header=None\n", + " )\n", + " df_results_syn['motifs'] = df_results_syn[8].apply(\n", + " lambda x: x.split('motif_name \"')[1].split('\"')[0]\n", + " )\n", + " df_results_syn[0] = df_results_syn[0].apply(\n", + " lambda x: '_'.join(x.split('_')[:-1])\n", + " )\n", + " df_motifs_count_syn = (\n", + " df_results_syn[[0, 'motifs']]\n", + " .drop_duplicates()\n", + " .groupby('motifs')\n", + " .count()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (30, 2)\n", " df_motifs_count_syn.sort_values(0, ascending=False).head(50)[0].plot.bar()\n", " plt.show()\n", @@ -359,7 +379,9 @@ "# Not using the total number of motifs but the count of the occurence aka: percentage of the sequences with a given motif.\n", "def compare_motif_list(df_motifs_a, df_motifs_b):\n", " # Using KL divergence to compare motifs lists distribution\n", - " set_all_mot = set(df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist())\n", + " set_all_mot = set(\n", + " df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist()\n", + " )\n", " create_new_matrix = []\n", " for x in set_all_mot:\n", " list_in = []\n", @@ -376,10 +398,16 @@ "\n", " create_new_matrix.append(list_in)\n", "\n", - " df_motifs = pd.DataFrame(create_new_matrix, columns=['motif', 'motif_a', 'motif_b'])\n", + " df_motifs = pd.DataFrame(\n", + " create_new_matrix, columns=['motif', 'motif_a', 'motif_b']\n", + " )\n", "\n", - " df_motifs['Diffusion_seqs'] = df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", - " df_motifs['Training_seqs'] = df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " df_motifs['Diffusion_seqs'] = (\n", + " df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", + " )\n", + " df_motifs['Training_seqs'] = (\n", + " df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (3, 3)\n", " sns.regplot(x='Diffusion_seqs', y='Training_seqs', data=df_motifs)\n", " plt.xlabel('Diffusion Seqs')\n", @@ -388,7 +416,9 @@ " plt.show()\n", "\n", " display(df_motifs)\n", - " kl_pq = rel_entr(df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values)\n", + " kl_pq = rel_entr(\n", + " df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values\n", + " )\n", " return np.sum(kl_pq)\n", "\n", "\n", @@ -421,13 +451,17 @@ "@torch.no_grad()\n", "def p_sample(model, x, t, t_index):\n", " betas_t = extract(betas, t, x.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x.shape\n", + " )\n", " # print (x.shape, 'x_shape')\n", " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t, x.shape)\n", "\n", " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", - " model_mean = sqrt_recip_alphas_t * (x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t)\n", + " model_mean = sqrt_recip_alphas_t * (\n", + " x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t\n", + " )\n", "\n", " if t_index == 0:\n", " return model_mean\n", @@ -439,7 +473,9 @@ "\n", "\n", "@torch.no_grad()\n", - "def p_sample_guided(model, x, classes, t, t_index, context_mask, cond_weight=0.0):\n", + "def p_sample_guided(\n", + " model, x, classes, t, t_index, context_mask, cond_weight=0.0\n", + "):\n", " # adapted from: https://openreview.net/pdf?id=qw8AKxfYbI\n", " # print (classes[0])\n", " batch_size = x.shape[0]\n", @@ -447,7 +483,9 @@ " t_double = t.repeat(2)\n", " x_double = x.repeat(2, 1, 1, 1)\n", " betas_t = extract(betas, t_double, x_double.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t_double, x_double.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t_double, x_double.shape\n", + " )\n", " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t_double, x_double.shape)\n", "\n", " # classifier free sampling interpolates between guided and non guided using `cond_weight`\n", @@ -462,7 +500,10 @@ " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", " model_mean = sqrt_recip_alphas_t[:batch_size] * (\n", - " x - betas_t[:batch_size] * x_t / sqrt_one_minus_alphas_cumprod_t[:batch_size]\n", + " x\n", + " - betas_t[:batch_size]\n", + " * x_t\n", + " / sqrt_one_minus_alphas_cumprod_t[:batch_size]\n", " )\n", "\n", " if t_index == 0:\n", @@ -492,19 +533,40 @@ " classes = classes.repeat(2)\n", " context_mask = context_mask.repeat(2)\n", " context_mask[n_sample:] = 0.0 # makes second half of batch context free\n", - " sampling_fn = partial(p_sample_guided, classes=classes, cond_weight=cond_weight, context_mask=context_mask)\n", + " sampling_fn = partial(\n", + " p_sample_guided,\n", + " classes=classes,\n", + " cond_weight=cond_weight,\n", + " context_mask=context_mask,\n", + " )\n", " else:\n", " sampling_fn = partial(p_sample)\n", "\n", - " for i in tqdm(reversed(range(0, timesteps)), desc='sampling loop time step', total=timesteps):\n", - " img = sampling_fn(model, x=img, t=torch.full((b,), i, device=device, dtype=torch.long), t_index=i)\n", + " for i in tqdm(\n", + " reversed(range(0, timesteps)),\n", + " desc='sampling loop time step',\n", + " total=timesteps,\n", + " ):\n", + " img = sampling_fn(\n", + " model,\n", + " x=img,\n", + " t=torch.full((b,), i, device=device, dtype=torch.long),\n", + " t_index=i,\n", + " )\n", " imgs.append(img.cpu().numpy())\n", " return imgs\n", "\n", "\n", "@torch.no_grad()\n", - "def sample(model, image_size, classes=None, batch_size=16, channels=3, cond_weight=0):\n", - " return p_sample_loop(model, classes=classes, shape=(batch_size, channels, 4, image_size), cond_weight=cond_weight)" + "def sample(\n", + " model, image_size, classes=None, batch_size=16, channels=3, cond_weight=0\n", + "):\n", + " return p_sample_loop(\n", + " model,\n", + " classes=classes,\n", + " shape=(batch_size, channels, 4, image_size),\n", + " cond_weight=cond_weight,\n", + " )" ] }, { @@ -532,7 +594,9 @@ " \"\"\"\n", " steps = timesteps + 1\n", " x = torch.linspace(0, timesteps, steps)\n", - " alphas_cumprod = torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " alphas_cumprod = (\n", + " torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " )\n", " alphas_cumprod = alphas_cumprod / alphas_cumprod[0]\n", " betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1])\n", " return torch.clip(betas, 0.0001, 0.9999)\n", @@ -582,11 +646,16 @@ " noise = torch.randn_like(x_start)\n", "\n", " sqrt_alphas_cumprod_t = extract(sqrt_alphas_cumprod, t, x_start.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x_start.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x_start.shape\n", + " )\n", "\n", " # print (sqrt_alphas_cumprod_t , sqrt_one_minus_alphas_cumprod_t , t)\n", "\n", - " return sqrt_alphas_cumprod_t * x_start + sqrt_one_minus_alphas_cumprod_t * noise" + " return (\n", + " sqrt_alphas_cumprod_t * x_start\n", + " + sqrt_one_minus_alphas_cumprod_t * noise\n", + " )" ] }, { @@ -632,14 +701,20 @@ "# THIS function changed to accomodate the conditional\n", "\n", "\n", - "def p_losses(denoise_model, x_start, t, classes, noise=None, loss_type=\"l1\", p_uncond=0.1):\n", + "def p_losses(\n", + " denoise_model, x_start, t, classes, noise=None, loss_type=\"l1\", p_uncond=0.1\n", + "):\n", " device = x_start.device\n", " if noise is None:\n", " noise = torch.randn_like(x_start) # guass noise\n", - " x_noisy = q_sample(x_start=x_start, t=t, noise=noise) # this is the auto generated noise given t and Noise\n", + " x_noisy = q_sample(\n", + " x_start=x_start, t=t, noise=noise\n", + " ) # this is the auto generated noise given t and Noise\n", " # print('max_q_sample', x_noisy.max(), 'mean_q_sample',x_noisy.mean() )\n", "\n", - " context_mask = torch.bernoulli(torch.zeros(classes.shape[0]) + (1 - p_uncond)).to(device)\n", + " context_mask = torch.bernoulli(\n", + " torch.zeros(classes.shape[0]) + (1 - p_uncond)\n", + " ).to(device)\n", " # print ('context mask', context_mask)\n", " # print ('classes', classes)\n", "\n", @@ -648,7 +723,9 @@ " # nn.Embedding needs type to be long, multiplying with mask changes type\n", " classes = classes.type(torch.long)\n", " # print ('final class',classes )\n", - " predicted_noise = denoise_model(x_noisy, t, classes) # this is the predicted noise given the model and step t\n", + " predicted_noise = denoise_model(\n", + " x_noisy, t, classes\n", + " ) # this is the predicted noise given the model and step t\n", " # print('max_predicted', x_noisy.max(), 'mean_predicted',x_noisy.mean() )\n", "\n", " # #predicted is ok (clipped)\n", @@ -698,7 +775,9 @@ " device = time.device\n", " half_dim = self.dim // 2\n", " embeddings = math.log(10000) / (half_dim - 1)\n", - " embeddings = torch.exp(torch.arange(half_dim, device=device) * -embeddings)\n", + " embeddings = torch.exp(\n", + " torch.arange(half_dim, device=device) * -embeddings\n", + " )\n", " embeddings = time[:, None] * embeddings[None, :]\n", " embeddings = torch.cat((embeddings.sin(), embeddings.cos()), dim=-1)\n", " return embeddings\n", @@ -772,7 +851,12 @@ " def __init__(self):\n", " super().__init__()\n", "\n", - " self.res = nn.Sequential(ResBlock(1, 2, 1), ResBlock(1, 2, 1), ResBlock(1, 2, 1), ResBlock(1, 2, 1))\n", + " self.res = nn.Sequential(\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " )\n", "\n", " self.conv = nn.Sequential(\n", " ConvBlock_2d(in_channels=1, out_channels=2),\n", @@ -791,7 +875,9 @@ " self.fc = nn.Sequential(\n", " nn.Linear(800, 800),\n", " # nn.GELU(),\n", - " nn.BatchNorm1d(800), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", + " nn.BatchNorm1d(\n", + " 800\n", + " ), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", " # nn.Linear(400, 400),\n", " # nn.BatchNorm1d(400),\n", " # nn.GELU(),\n", @@ -801,7 +887,9 @@ " self.fc2 = nn.Sequential(\n", " nn.Linear(400, 800),\n", " # nn.GELU(),\n", - " nn.BatchNorm1d(800), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", + " nn.BatchNorm1d(\n", + " 800\n", + " ), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", " # nn.Linear(400, 400),\n", " # nn.GELU(),\n", " # nn.BatchNorm1d(400),\n", @@ -882,7 +970,11 @@ " generic one layer FC NN for embedding things \n", " '''\n", " self.input_dim = input_dim\n", - " layers = [nn.Linear(input_dim, emb_dim), nn.GELU(), nn.Linear(emb_dim, emb_dim)]\n", + " layers = [\n", + " nn.Linear(input_dim, emb_dim),\n", + " nn.GELU(),\n", + " nn.Linear(emb_dim, emb_dim),\n", + " ]\n", " self.model = nn.Sequential(*layers)\n", "\n", " def forward(self, x):\n", @@ -960,7 +1052,8 @@ "\n", "def Upsample(dim, dim_out=None):\n", " return nn.Sequential(\n", - " nn.Upsample(scale_factor=2, mode='nearest'), nn.Conv2d(dim, default(dim_out, dim), 3, padding=1)\n", + " nn.Upsample(scale_factor=2, mode='nearest'),\n", + " nn.Conv2d(dim, default(dim_out, dim), 3, padding=1),\n", " )\n", "\n", "\n", @@ -1038,11 +1131,17 @@ "class ResnetBlock(nn.Module):\n", " def __init__(self, dim, dim_out, *, time_emb_dim=None, groups=8):\n", " super().__init__()\n", - " self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None\n", + " self.mlp = (\n", + " nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2))\n", + " if exists(time_emb_dim)\n", + " else None\n", + " )\n", "\n", " self.block1 = Block(dim, dim_out, groups=groups)\n", " self.block2 = Block(dim_out, dim_out, groups=groups)\n", - " self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " self.res_conv = (\n", + " nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " )\n", "\n", " def forward(self, x, time_emb=None):\n", " scale_shift = None\n", @@ -1059,8 +1158,22 @@ "\n", "\n", "class ResnetBlockClassConditioned(ResnetBlock):\n", - " def __init__(self, dim, dim_out, *, num_classes, class_embed_dim, time_emb_dim=None, groups=8):\n", - " super().__init__(dim=dim + class_embed_dim, dim_out=dim_out, time_emb_dim=time_emb_dim, groups=groups)\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " dim_out,\n", + " *,\n", + " num_classes,\n", + " class_embed_dim,\n", + " time_emb_dim=None,\n", + " groups=8,\n", + " ):\n", + " super().__init__(\n", + " dim=dim + class_embed_dim,\n", + " dim_out=dim_out,\n", + " time_emb_dim=time_emb_dim,\n", + " groups=groups,\n", + " )\n", " # print ('n_classes', num_classes, 'class_embed_dim', class_embed_dim)\n", " self.class_mlp = EmbedFC(num_classes, class_embed_dim)\n", "\n", @@ -1082,12 +1195,17 @@ " self.heads = heads\n", " hidden_dim = dim_head * heads\n", " self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False)\n", - " self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim))\n", + " self.to_out = nn.Sequential(\n", + " nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)\n", + " )\n", "\n", " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q = q.softmax(dim=-2)\n", " k = k.softmax(dim=-1)\n", @@ -1098,7 +1216,9 @@ " context = torch.einsum('b h d n, b h e n -> b h d e', k, v)\n", "\n", " out = torch.einsum('b h d e, b h d n -> b h e n', context, q)\n", - " out = rearrange(out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w)\n", + " out = rearrange(\n", + " out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w\n", + " )\n", " return self.to_out(out)\n", "\n", "\n", @@ -1114,7 +1234,10 @@ " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q, k = map(l2norm, (q, k))\n", "\n", @@ -1152,7 +1275,9 @@ "\n", "\n", "def log_snr_to_alpha_sigma(log_snr):\n", - " return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt(torch.sigmoid(-log_snr))\n", + " return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt(\n", + " torch.sigmoid(-log_snr)\n", + " )\n", "\n", "\n", "class Unet_lucas(nn.Module):\n", @@ -1201,7 +1326,10 @@ " fourier_dim = learned_sinusoidal_dim + 1\n", "\n", " self.time_mlp = nn.Sequential(\n", - " sinu_pos_emb, nn.Linear(fourier_dim, time_dim), nn.GELU(), nn.Linear(time_dim, time_dim)\n", + " sinu_pos_emb,\n", + " nn.Linear(fourier_dim, time_dim),\n", + " nn.GELU(),\n", + " nn.Linear(time_dim, time_dim),\n", " )\n", "\n", " if num_classes is not None:\n", @@ -1222,7 +1350,9 @@ " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " Residual(PreNorm(dim_in, LinearAttention(dim_in))),\n", - " Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", + " Downsample(dim_in, dim_out)\n", + " if not is_last\n", + " else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -1238,10 +1368,16 @@ " self.ups.append(\n", " nn.ModuleList(\n", " [\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", " Residual(PreNorm(dim_out, LinearAttention(dim_out))),\n", - " Upsample(dim_out, dim_in) if not is_last else nn.Conv2d(dim_out, dim_in, 3, padding=1),\n", + " Upsample(dim_out, dim_in)\n", + " if not is_last\n", + " else nn.Conv2d(dim_out, dim_in, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -1780,7 +1916,9 @@ "\n", "final_comp_values_trian = encode_data.train['motifs_per_components_dict']\n", "final_comp_values_test = encode_data.test['motifs_per_components_dict']\n", - "final_comp_values_shuffle = encode_data.train_shuffle['motifs_per_components_dict']\n", + "final_comp_values_shuffle = encode_data.train_shuffle[\n", + " 'motifs_per_components_dict'\n", + "]\n", "\n", "\n", "df = encode_data.train['dataset']\n", @@ -4357,7 +4495,9 @@ " df_plot.columns = [labels_test[x] for x in cell_components]\n", " df_plot.index = df_plot.columns\n", " sns.heatmap(df_plot, cmap='Blues_r', annot=True, lw=0.1, vmax=1, vmin=0)\n", - " plt.title(f'Kl divergence \\n {x_label} sequences x {y_label} sequences \\n MOTIFS probabilities')\n", + " plt.title(\n", + " f'Kl divergence \\n {x_label} sequences x {y_label} sequences \\n MOTIFS probabilities'\n", + " )\n", " plt.xlabel(f'{x_label} Sequences \\n(motifs dist)')\n", " plt.ylabel(f'{y_label} \\n (motifs dist)')" ] @@ -6808,7 +6948,9 @@ } ], "source": [ - "heat_train_test = kl_comparison_between_dataset(final_comp_values_trian, final_comp_values_test)" + "heat_train_test = kl_comparison_between_dataset(\n", + " final_comp_values_trian, final_comp_values_test\n", + ")" ] }, { @@ -9280,7 +9422,9 @@ } ], "source": [ - "heat_train_shuffle = kl_comparison_between_dataset(final_comp_values_trian, final_comp_values_shuffle)" + "heat_train_shuffle = kl_comparison_between_dataset(\n", + " final_comp_values_trian, final_comp_values_shuffle\n", + ")" ] }, { @@ -9367,7 +9511,11 @@ "source": [ "dna_alphabet = ['A', 'C', 'T', 'G']\n", "x_train_seq = np.array(\n", - " [one_hot_encode(x, dna_alphabet, 200) for x in tqdm_notebook(df['raw_sequence']) if 'N' not in x]\n", + " [\n", + " one_hot_encode(x, dna_alphabet, 200)\n", + " for x in tqdm_notebook(df['raw_sequence'])\n", + " if 'N' not in x\n", + " ]\n", ")\n", "X_train = x_train_seq\n", "X_train = np.array([x.T.tolist() for x in X_train])\n", @@ -9475,7 +9623,9 @@ "tf = T.Compose([T.ToTensor()])\n", "batch_size = 256\n", "seq_dataset = SequenceDataset(seqs=X_train, c=x_train_cell_type, transform=tf)\n", - "train_dl = DataLoader(seq_dataset, batch_size, shuffle=True, num_workers=3, pin_memory=True)" + "train_dl = DataLoader(\n", + " seq_dataset, batch_size, shuffle=True, num_workers=3, pin_memory=True\n", + ")" ] }, { @@ -9503,7 +9653,9 @@ "# sqrt_one_minus_alphas_cumprod = torch.sqrt(1. - alphas_cumprod)\n", "sqrt_one_minus_alphas_cumprod = torch.sqrt(1.0 - alphas_cumprod)\n", "# calculations for posterior q(x_{t-1} | x_t, x_0)\n", - "posterior_variance = betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + "posterior_variance = (\n", + " betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + ")\n", "\n", "\n", "def extract(a, t, x_shape):\n", @@ -9751,7 +9903,11 @@ "channels = 1\n", "\n", "model = Unet_lucas(\n", - " dim=200, channels=1, dim_mults=(1, 2, 4), resnet_block_groups=4, num_classes=TOTAL_class_number\n", + " dim=200,\n", + " channels=1,\n", + " dim_mults=(1, 2, 4),\n", + " resnet_block_groups=4,\n", + " num_classes=TOTAL_class_number,\n", ").cuda()\n", "\n", "# model = Classifier() # 2d conv\n", @@ -9763,7 +9919,9 @@ "from livelossplot import PlotLosses\n", "\n", "train_kl, test_kl, shuffle_kl = 1, 1, 1\n", - "live_kl = PlotLosses(groups={'KL': ['train', 'test', 'shuffle'], 'DiffusionLoss': ['loss']})\n", + "live_kl = PlotLosses(\n", + " groups={'KL': ['train', 'test', 'shuffle'], 'DiffusionLoss': ['loss']}\n", + ")\n", "# ,'Diffusion_Loss':['loss']\n", "# live_kl.update({'train':train_kl, 'test':test_kl , 'shuffle':shuffle_kl , 'loss': 100})\n", "train_kl, test_kl, shuffle_kl = 1, 1, 1" @@ -9911,7 +10069,9 @@ " # batch_size = batch[\"pixel_values\"].shape[0]\n", " # batch = batch[\"pixel_values\"].to(device)\n", " # Algorithm 1 line 3: sample t uniformally for every example in the batch\n", - " t = torch.randint(0, timesteps, (batch_size,), device=device).long() # sampling a t to generate t and t+1\n", + " t = torch.randint(\n", + " 0, timesteps, (batch_size,), device=device\n", + " ).long() # sampling a t to generate t and t+1\n", " # print (t.dtype)\n", " # loss = p_losses(model, batch, t, loss_type=\"l2\")\n", " # print (y)\n", @@ -9936,7 +10096,14 @@ " sampled = torch.from_numpy(np.random.choice(cell_types, sample_bs))\n", " random_classes = sampled.cuda()\n", "\n", - " samples = sample(model, classes=random_classes, image_size=image_size, batch_size=2, channels=1, cond_weight=1)\n", + " samples = sample(\n", + " model,\n", + " classes=random_classes,\n", + " image_size=image_size,\n", + " batch_size=2,\n", + " channels=1,\n", + " cond_weight=1,\n", + " )\n", " n_print = 0\n", " for image, class_show in zip(samples[-1], random_classes):\n", " if n_print < 4:\n", @@ -9954,8 +10121,17 @@ " synt_df = sampling_to_metric(20)\n", " train_kl = compare_motif_list(synt_df, df_results_seq_guime_count_train)\n", " test_kl = compare_motif_list(synt_df, df_results_seq_guime_count_test)\n", - " shuffle_kl = compare_motif_list(synt_df, df_results_seq_guime_count_shuffle)\n", - " live_kl.update({'train': train_kl, 'test': test_kl, 'shuffle': shuffle_kl, 'loss': loss.item()})\n", + " shuffle_kl = compare_motif_list(\n", + " synt_df, df_results_seq_guime_count_shuffle\n", + " )\n", + " live_kl.update(\n", + " {\n", + " 'train': train_kl,\n", + " 'test': test_kl,\n", + " 'shuffle': shuffle_kl,\n", + " 'loss': loss.item(),\n", + " }\n", + " )\n", " live_kl.send()\n", " print('KL_TRAIN', train_kl, 'KL')\n", " print('KL_TEST', test_kl, 'KL')\n", @@ -10024,7 +10200,9 @@ " print(r), 'component'\n", " comp_array = []\n", " group_compare = r\n", - " synt_df_cond = sampling_to_metric(20, True, group_compare, cond_weight_to_metric=1)\n", + " synt_df_cond = sampling_to_metric(\n", + " 20, True, group_compare, cond_weight_to_metric=1\n", + " )\n", " for k in use_comp_list:\n", " v = dict_targer_components[k]\n", "\n", @@ -14438,7 +14616,9 @@ ], "source": [ "use_comp = [3, 8, 12, 15]\n", - "heat_new_sequences_test = kl_comparison_generated_sequences(use_comp, final_comp_values_trian)" + "heat_new_sequences_test = kl_comparison_generated_sequences(\n", + " use_comp, final_comp_values_trian\n", + ")" ] }, { diff --git a/notebooks/experiments/conditional_diffusion/full_script_version_from_accelerate_notebook/dnadiffusion.py b/notebooks/experiments/conditional_diffusion/full_script_version_from_accelerate_notebook/dnadiffusion.py index a096732c..afa1072a 100644 --- a/notebooks/experiments/conditional_diffusion/full_script_version_from_accelerate_notebook/dnadiffusion.py +++ b/notebooks/experiments/conditional_diffusion/full_script_version_from_accelerate_notebook/dnadiffusion.py @@ -5,7 +5,6 @@ import random from functools import partial from itertools import cycle -from pathlib import Path import matplotlib.pyplot as plt import numpy as np @@ -15,7 +14,6 @@ import torch.nn.functional as F import torchvision.transforms as T from accelerate import Accelerator, DistributedDataParallelKwargs -from accelerate.utils import set_seed from einops import rearrange from memory_efficient_attention_pytorch import Attention as EfficientAttention from scipy.special import rel_entr @@ -91,7 +89,9 @@ def __init__(self, beta): self.step = 0 def update_model_average(self, ma_model, current_model): - for current_params, ma_params in zip(current_model.parameters(), ma_model.parameters()): + for current_params, ma_params in zip( + current_model.parameters(), ma_model.parameters() + ): old_weight, up_weight = ma_params.data, current_params.data ma_params.data = self.update_average(old_weight, up_weight) @@ -144,7 +144,9 @@ def sampling_to_metric( # Sampling regions using the trained model final_sequences = [] # for n_a in tqdm(range(number_of_samples)): # generating number_of_samples *10 sequences - for n_a in range(number_of_samples): # generating number_of_samples *10 sequences + for n_a in range( + number_of_samples + ): # generating number_of_samples *10 sequences print(n_a) sample_bs = 10 if specific_group: @@ -178,17 +180,35 @@ def sampling_to_metric( os.system( f"gimme scan synthetic_motifs_{current_cell}.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs_{current_cell}.bed" ) - df_results_syn = pd.read_csv(f"syn_results_motifs_{current_cell}.bed", sep="\t", skiprows=5, header=None) + df_results_syn = pd.read_csv( + f"syn_results_motifs_{current_cell}.bed", + sep="\t", + skiprows=5, + header=None, + ) else: save_motifs_syn = open("synthetic_motifs.fasta", "w") save_motifs_syn.write("\n".join(final_sequences)) save_motifs_syn.close() - os.system("gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed") - df_results_syn = pd.read_csv("new_syn_results_motifs.bed", sep="\t", skiprows=5, header=None) + os.system( + "gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed" + ) + df_results_syn = pd.read_csv( + "new_syn_results_motifs.bed", sep="\t", skiprows=5, header=None + ) - df_results_syn["motifs"] = df_results_syn[8].apply(lambda x: x.split('motif_name "')[1].split('"')[0]) - df_results_syn[0] = df_results_syn[0].apply(lambda x: "_".join(x.split("_")[:-1])) - df_motifs_count_syn = df_results_syn[[0, "motifs"]].drop_duplicates().groupby("motifs").count() + df_results_syn["motifs"] = df_results_syn[8].apply( + lambda x: x.split('motif_name "')[1].split('"')[0] + ) + df_results_syn[0] = df_results_syn[0].apply( + lambda x: "_".join(x.split("_")[:-1]) + ) + df_motifs_count_syn = ( + df_results_syn[[0, "motifs"]] + .drop_duplicates() + .groupby("motifs") + .count() + ) # plt.rcParams["figure.figsize"] = (30,2) # df_motifs_count_syn.sort_values(0, ascending=False).head(50)[0].plot.bar() # plt.show() @@ -198,7 +218,9 @@ def sampling_to_metric( def compare_motif_list(df_motifs_a, df_motifs_b): # Using KL divergence to compare motifs lists distribution - set_all_mot = set(df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist()) + set_all_mot = set( + df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist() + ) create_new_matrix = [] for x in set_all_mot: list_in = [] @@ -215,10 +237,16 @@ def compare_motif_list(df_motifs_a, df_motifs_b): create_new_matrix.append(list_in) - df_motifs = pd.DataFrame(create_new_matrix, columns=["motif", "motif_a", "motif_b"]) + df_motifs = pd.DataFrame( + create_new_matrix, columns=["motif", "motif_a", "motif_b"] + ) - df_motifs["Diffusion_seqs"] = df_motifs["motif_a"] / df_motifs["motif_a"].sum() - df_motifs["Training_seqs"] = df_motifs["motif_b"] / df_motifs["motif_b"].sum() + df_motifs["Diffusion_seqs"] = ( + df_motifs["motif_a"] / df_motifs["motif_a"].sum() + ) + df_motifs["Training_seqs"] = ( + df_motifs["motif_b"] / df_motifs["motif_b"].sum() + ) """ plt.rcParams["figure.figsize"] = (3,3) sns.regplot(x='Diffusion_seqs', y='Training_seqs',data=df_motifs) @@ -227,7 +255,9 @@ def compare_motif_list(df_motifs_a, df_motifs_b): plt.title('Motifs Probs') plt.show() """ - kl_pq = rel_entr(df_motifs["Diffusion_seqs"].values, df_motifs["Training_seqs"].values) + kl_pq = rel_entr( + df_motifs["Diffusion_seqs"].values, df_motifs["Training_seqs"].values + ) return np.sum(kl_pq) @@ -278,7 +308,9 @@ def generate_heatmap(df_heat, x_label, y_label): df_plot.columns = [x.split("_")[0] for x in cell_components] df_plot.index = df_plot.columns sns.heatmap(df_plot, cmap="Blues_r", annot=True, lw=0.1, vmax=1, vmin=0) - plt.title(f"Kl divergence \n {x_label} sequences x {y_label} sequences \n MOTIFS probabilities") + plt.title( + f"Kl divergence \n {x_label} sequences x {y_label} sequences \n MOTIFS probabilities" + ) plt.xlabel(f"{x_label} Sequences \n(motifs dist)") plt.ylabel(f"{y_label} \n (motifs dist)") plt.grid(False) @@ -289,7 +321,11 @@ def generate_heatmap(df_heat, x_label, y_label): def generate_similarity_metric(): """Capture the syn_motifs.fasta and compare with the dataset motifs""" seqs_file = open("synthetic_motifs.fasta").readlines() - seqs_to_hotencoder = [one_hot_encode(s.replace("\n", ""), nucleotides, 200).T for s in seqs_file if ">" not in s] + seqs_to_hotencoder = [ + one_hot_encode(s.replace("\n", ""), nucleotides, 200).T + for s in seqs_file + if ">" not in s + ] return seqs_to_hotencoder @@ -299,7 +335,9 @@ def get_best_match(db, x_seq): # transforming in a function def calculate_mean_similarity(database, input_query_seqs, seq_len=200): - final_base_max_match = np.mean([get_best_match(database, x) for x in tqdm(input_query_seqs)]) + final_base_max_match = np.mean( + [get_best_match(database, x) for x in tqdm(input_query_seqs)] + ) return final_base_max_match / seq_len @@ -307,19 +345,25 @@ def generate_similarity_using_train(X_train_in): convert_X_train = X_train_in.copy() convert_X_train[convert_X_train == -1] = 0 generated_seqs_to_similarity = generate_similarity_metric() - return calculate_mean_similarity(convert_X_train, generated_seqs_to_similarity) + return calculate_mean_similarity( + convert_X_train, generated_seqs_to_similarity + ) # Sampling Loop @torch.no_grad() def p_sample(model, x, t, t_index): betas_t = extract(betas, t, x.shape) - sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x.shape) + sqrt_one_minus_alphas_cumprod_t = extract( + sqrt_one_minus_alphas_cumprod, t, x.shape + ) sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t, x.shape) # Equation 11 in the paper # Use our model (noise predictor) to predict the mean - model_mean = sqrt_recip_alphas_t * (x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t) + model_mean = sqrt_recip_alphas_t * ( + x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t + ) if t_index == 0: return model_mean @@ -354,8 +398,12 @@ def p_sample_guided( betas = betas.to(device) sqrt_one_minus_alphas_cumprod = sqrt_one_minus_alphas_cumprod.to(device) betas_t = extract(betas, t_double, x_double.shape, device=device) - sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t_double, x_double.shape, device=device) - sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t_double, x_double.shape, device=device) + sqrt_one_minus_alphas_cumprod_t = extract( + sqrt_one_minus_alphas_cumprod, t_double, x_double.shape, device=device + ) + sqrt_recip_alphas_t = extract( + sqrt_recip_alphas, t_double, x_double.shape, device=device + ) # classifier free sampling interpolates between guided and non guided using `cond_weight` classes_masked = classes * context_mask @@ -364,7 +412,9 @@ def p_sample_guided( model = accelerator.unwrap_model(model) model.output_attention = True show_out_test = model(x_double, time=t_double, classes=classes_masked) - preds, cross_map_full = model(x_double, time=t_double, classes=classes_masked) # I added cross_map + preds, cross_map_full = model( + x_double, time=t_double, classes=classes_masked + ) # I added cross_map model.output_attention = False cross_map = cross_map_full[:batch_size] eps1 = (1 + cond_weight) * preds[:batch_size] @@ -374,13 +424,18 @@ def p_sample_guided( # Equation 11 in the paper # Use our model (noise predictor) to predict the mean model_mean = sqrt_recip_alphas_t[:batch_size] * ( - x - betas_t[:batch_size] * x_t / sqrt_one_minus_alphas_cumprod_t[:batch_size] + x + - betas_t[:batch_size] + * x_t + / sqrt_one_minus_alphas_cumprod_t[:batch_size] ) if t_index == 0: return model_mean, cross_map else: - posterior_variance_t = extract(posterior_variance, t, x.shape, device=device) + posterior_variance_t = extract( + posterior_variance, t, x.shape, device=device + ) noise = torch.randn_like(x) # Algorithm 2 line 4: return model_mean + torch.sqrt(posterior_variance_t) * noise, cross_map @@ -492,10 +547,17 @@ def q_sample( if noise is None: noise = torch.randn_like(x_start) - sqrt_alphas_cumprod_t = extract(sqrt_alphas_cumprod, t, x_start.shape).to(device) - sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x_start.shape).to(device) + sqrt_alphas_cumprod_t = extract(sqrt_alphas_cumprod, t, x_start.shape).to( + device + ) + sqrt_one_minus_alphas_cumprod_t = extract( + sqrt_one_minus_alphas_cumprod, t, x_start.shape + ).to(device) - return sqrt_alphas_cumprod_t * x_start + sqrt_one_minus_alphas_cumprod_t * noise + return ( + sqrt_alphas_cumprod_t * x_start + + sqrt_one_minus_alphas_cumprod_t * noise + ) def p_losses( @@ -521,7 +583,9 @@ def p_losses( device=device, ) # this is the auto generated noise given t and Noise - context_mask = torch.bernoulli(torch.zeros(classes.shape[0]) + (1 - p_uncond)).to(device) + context_mask = torch.bernoulli( + torch.zeros(classes.shape[0]) + (1 - p_uncond) + ).to(device) # mask for unconditinal guidance classes = classes * context_mask @@ -630,7 +694,11 @@ def __init__(self, input_dim, emb_dim): generic one layer FC NN for embedding things """ self.input_dim = input_dim - layers = [nn.Linear(input_dim, emb_dim), nn.GELU(), nn.Linear(emb_dim, emb_dim)] + layers = [ + nn.Linear(input_dim, emb_dim), + nn.GELU(), + nn.Linear(emb_dim, emb_dim), + ] self.model = nn.Sequential(*layers) def forward(self, x): @@ -723,11 +791,17 @@ def forward(self, x, scale_shift=None): class ResnetBlock(nn.Module): def __init__(self, dim, dim_out, *, time_emb_dim=None, groups=8): super().__init__() - self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None + self.mlp = ( + nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) + if exists(time_emb_dim) + else None + ) self.block1 = Block(dim, dim_out, groups=groups) self.block2 = Block(dim_out, dim_out, groups=groups) - self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity() + self.res_conv = ( + nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity() + ) def forward(self, x, time_emb=None): scale_shift = None @@ -744,7 +818,16 @@ def forward(self, x, time_emb=None): class ResnetBlockClassConditioned(ResnetBlock): - def __init__(self, dim, dim_out, *, num_classes, class_embed_dim, time_emb_dim=None, groups=8): + def __init__( + self, + dim, + dim_out, + *, + num_classes, + class_embed_dim, + time_emb_dim=None, + groups=8, + ): super().__init__( dim=dim + class_embed_dim, dim_out=dim_out, @@ -769,12 +852,17 @@ def __init__(self, dim, heads=4, dim_head=32): self.heads = heads hidden_dim = dim_head * heads self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) - self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)) + self.to_out = nn.Sequential( + nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim) + ) def forward(self, x): b, c, h, w = x.shape qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) + q, k, v = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv + ) q = q.softmax(dim=-2) k = k.softmax(dim=-1) @@ -785,7 +873,9 @@ def forward(self, x): context = torch.einsum("b h d n, b h e n -> b h d e", k, v) out = torch.einsum("b h d e, b h d n -> b h e n", context, q) - out = rearrange(out, "b h c (x y) -> b (h c) x y", h=self.heads, x=h, y=w) + out = rearrange( + out, "b h c (x y) -> b (h c) x y", h=self.heads, x=h, y=w + ) return self.to_out(out) @@ -801,7 +891,10 @@ def __init__(self, dim, heads=4, dim_head=32, scale=10): def forward(self, x): b, c, h, w = x.shape qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) + q, k, v = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv + ) q, k = map(l2norm, (q, k)) @@ -828,9 +921,15 @@ def forward(self, x, y): qkv_x = self.to_qkv(x).chunk(3, dim=1) qkv_y = self.to_qkv(y).chunk(3, dim=1) - q_x, k_x, v_x = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv_x) + q_x, k_x, v_x = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv_x + ) - q_y, k_y, v_y = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv_y) + q_y, k_y, v_y = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv_y + ) q, k = map(l2norm, (q_x, k_y)) @@ -857,11 +956,15 @@ def beta_linear_log_snr(t): def alpha_cosine_log_snr(t, s: float = 0.008): - return -log((torch.cos((t + s) / (1 + s) * math.pi * 0.5) ** -2) - 1, eps=1e-5) + return -log( + (torch.cos((t + s) / (1 + s) * math.pi * 0.5) ** -2) - 1, eps=1e-5 + ) def log_snr_to_alpha_sigma(log_snr): - return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt(torch.sigmoid(-log_snr)) + return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt( + torch.sigmoid(-log_snr) + ) # Unet Model @@ -926,7 +1029,9 @@ def __init__( block_klass(dim_in, dim_in, time_emb_dim=time_dim), block_klass(dim_in, dim_in, time_emb_dim=time_dim), Residual(PreNorm(dim_in, LinearAttention(dim_in))), - Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1), + Downsample(dim_in, dim_out) + if not is_last + else nn.Conv2d(dim_in, dim_out, 3, padding=1), ] ) ) @@ -941,10 +1046,16 @@ def __init__( self.ups.append( nn.ModuleList( [ - block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim), - block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim), + block_klass( + dim_out + dim_in, dim_out, time_emb_dim=time_dim + ), + block_klass( + dim_out + dim_in, dim_out, time_emb_dim=time_dim + ), Residual(PreNorm(dim_out, LinearAttention(dim_out))), - Upsample(dim_out, dim_in) if not is_last else nn.Conv2d(dim_out, dim_in, 3, padding=1), + Upsample(dim_out, dim_in) + if not is_last + else nn.Conv2d(dim_out, dim_in, 3, padding=1), ] ) ) @@ -1023,16 +1134,31 @@ def forward(self, x, time, classes, x_self_cond=None): # Loading data and Motifs def motifs_from_fasta(fasta, generate_heatmap=True): print("Computing Motifs....") - os.system(f"gimme scan {fasta} -p JASPAR2020_vertebrates -g hg38 > train_results_motifs.bed") - df_results_seq_guime = pd.read_csv("train_results_motifs.bed", sep="\t", skiprows=5, header=None) - df_results_seq_guime["motifs"] = df_results_seq_guime[8].apply(lambda x: x.split('motif_name "')[1].split('"')[0]) + os.system( + f"gimme scan {fasta} -p JASPAR2020_vertebrates -g hg38 > train_results_motifs.bed" + ) + df_results_seq_guime = pd.read_csv( + "train_results_motifs.bed", sep="\t", skiprows=5, header=None + ) + df_results_seq_guime["motifs"] = df_results_seq_guime[8].apply( + lambda x: x.split('motif_name "')[1].split('"')[0] + ) # if generate_heatmap: # generate_heatmap_motifs(df_results_seq_guime) - df_results_seq_guime[0] = df_results_seq_guime[0].apply(lambda x: "_".join(x.split("_")[:-1])) - df_results_seq_guime_count_out = df_results_seq_guime[[0, "motifs"]].drop_duplicates().groupby("motifs").count() + df_results_seq_guime[0] = df_results_seq_guime[0].apply( + lambda x: "_".join(x.split("_")[:-1]) + ) + df_results_seq_guime_count_out = ( + df_results_seq_guime[[0, "motifs"]] + .drop_duplicates() + .groupby("motifs") + .count() + ) plt.rcParams["figure.figsize"] = (30, 2) - df_results_seq_guime_count_out.sort_values(0, ascending=False).head(50)[0].plot.bar() + df_results_seq_guime_count_out.sort_values(0, ascending=False).head(50)[ + 0 + ].plot.bar() plt.title("Top 50 MOTIFS on component 0 ") plt.show() return df_results_seq_guime_count_out @@ -1068,7 +1194,9 @@ def __init__( self.df_test_in, self.df_train_shuffled_in, ) = self.create_train_groups() - self.number_of_sequences_to_motif_creation = number_of_sequences_to_motif_creation + self.number_of_sequences_to_motif_creation = ( + number_of_sequences_to_motif_creation + ) self.train = None self.test = None self.train_shuffle = None @@ -1083,22 +1211,30 @@ def read_csv(self): print(f"Limiting total sequences {self.limit_total_sequences}") df = df.sample(self.limit_total_sequences) - df.columns = [c.replace("seqname", "chr") for c in df.columns.values] # change this in simon original table + df.columns = [ + c.replace("seqname", "chr") for c in df.columns.values + ] # change this in simon original table return df def experiment(self): df_generate = self.data.copy() print(df_generate.head().columns) print(list(self.subset_components)) - if self.subset_components != None and type(self.subset_components) == list: + if ( + self.subset_components != None + and type(self.subset_components) == list + ): print(" or ".join([f"TAG == {c}" for c in self.subset_components])) - df_generate = df_generate.query(" or ".join([f'TAG == "{c}" ' for c in self.subset_components])).copy() + df_generate = df_generate.query( + " or ".join([f'TAG == "{c}" ' for c in self.subset_components]) + ).copy() print("Subseting...") if self.plot: print(df_generate.head()) ( - df_generate.groupby("TAG").count()["sequence"] / df_generate.groupby("TAG").count()["sequence"].sum() + df_generate.groupby("TAG").count()["sequence"] + / df_generate.groupby("TAG").count()["sequence"].sum() ).plot.bar() plt.title("Component % on Training Sample") plt.show() @@ -1120,13 +1256,17 @@ def create_train_groups(self): def get_motif(self): self.train = self.generate_motifs_and_fastas(self.df_train_in, "train") self.test = self.generate_motifs_and_fastas(self.df_test_in, "test") - self.train_shuffle = self.generate_motifs_and_fastas(self.df_train_shuffled_in, "train_shuffle") + self.train_shuffle = self.generate_motifs_and_fastas( + self.df_train_shuffled_in, "train_shuffle" + ) def generate_motifs_and_fastas(self, df, name): """return fasta anem , and dict with components motifs""" print("Generating Fasta and Motis:", name) print("---" * 10) - fasta_saved = self.save_fasta(df, f"{name}_{'_'.join([str(c) for c in self.subset_components])}") + fasta_saved = self.save_fasta( + df, f"{name}_{'_'.join([str(c) for c in self.subset_components])}" + ) print("Generating Motifs (all seqs)") motif_all_components = motifs_from_fasta(fasta_saved, False) print("Generating Motifs per component") @@ -1144,7 +1284,10 @@ def save_fasta(self, df, name_fasta, to_seq_groups_comparison=False): save_fasta_file = open(fasta_final_name, "w") number_to_sample = df.shape[0] - if to_seq_groups_comparison and self.number_of_sequences_to_motif_creation: + if ( + to_seq_groups_comparison + and self.number_of_sequences_to_motif_creation + ): number_to_sample = self.number_of_sequences_to_motif_creation print(number_to_sample, "#seq used") @@ -1163,7 +1306,9 @@ def generate_motifs_components(self, df): for comp, v_comp in df.groupby("TAG"): print(comp) print("number of sequences used to generate the motifs") - name_c_fasta = self.save_fasta(v_comp, "temp_component", to_seq_groups_comparison=True) + name_c_fasta = self.save_fasta( + v_comp, "temp_component", to_seq_groups_comparison=True + ) final_comp_values[comp] = motifs_from_fasta(name_c_fasta, False) return final_comp_values @@ -1226,7 +1371,9 @@ def __init__( # set_seed(12345) ddp_kwargs = DistributedDataParallelKwargs(find_unused_parameters=True) - self.accelerator = Accelerator(kwargs_handlers=[ddp_kwargs], split_batches=True, log_with=["wandb"]) + self.accelerator = Accelerator( + kwargs_handlers=[ddp_kwargs], split_batches=True, log_with=["wandb"] + ) self.device = self.accelerator.device if load_saved_data: @@ -1248,24 +1395,46 @@ def __init__( # Splitting encode data into train/test/shuffle self.df_results_seq_guime_count_train = encode_data.train["motifs"] self.df_results_seq_guime_count_test = encode_data.test["motifs"] - self.df_results_seq_guime_count_shuffle = encode_data.train_shuffle["motifs"] - - self.final_comp_values_train = encode_data.train["motifs_per_components_dict"] - self.final_comp_values_test = encode_data.test["motifs_per_components_dict"] - self.final_comp_values_shuffle = encode_data.train_shuffle["motifs_per_components_dict"] + self.df_results_seq_guime_count_shuffle = encode_data.train_shuffle[ + "motifs" + ] + + self.final_comp_values_train = encode_data.train[ + "motifs_per_components_dict" + ] + self.final_comp_values_test = encode_data.test[ + "motifs_per_components_dict" + ] + self.final_comp_values_shuffle = encode_data.train_shuffle[ + "motifs_per_components_dict" + ] # Dataset used for sequences df = encode_data.train["dataset"] cell_components = df.sort_values("TAG")["TAG"].unique().tolist() - self.conditional_tag_to_numeric = {x: n + 1 for n, x in enumerate(df.TAG.unique())} - self.conditional_numeric_to_tag = {n + 1: x for n, x in enumerate(df.TAG.unique())} - conditional_tags_to_numeric = {n + 1: x for n, x in enumerate(df.TAG.unique())} + self.conditional_tag_to_numeric = { + x: n + 1 for n, x in enumerate(df.TAG.unique()) + } + self.conditional_numeric_to_tag = { + n + 1: x for n, x in enumerate(df.TAG.unique()) + } + conditional_tags_to_numeric = { + n + 1: x for n, x in enumerate(df.TAG.unique()) + } cell_types = sorted(conditional_numeric_to_tag.keys()) - x_train_cell_type = torch.from_numpy(df["TAG"].apply(lambda x: conditional_tag_to_numeric[x]).to_numpy()) + x_train_cell_type = torch.from_numpy( + df["TAG"].apply(lambda x: conditional_tag_to_numeric[x]).to_numpy() + ) # Creating X_train for sequence similarity dna_alphabet = ["A", "C", "T", "G"] - x_train_seq = np.array([one_hot_encode(x, dna_alphabet, 200) for x in tqdm(df["sequence"]) if "N" not in x]) + x_train_seq = np.array( + [ + one_hot_encode(x, dna_alphabet, 200) + for x in tqdm(df["sequence"]) + if "N" not in x + ] + ) X_train = x_train_seq X_train = np.array([x.T.tolist() for x in X_train]) X_train[X_train == 0] = -1 @@ -1273,20 +1442,34 @@ def __init__( # Sequence dataset loading tf = T.Compose([T.ToTensor()]) - seq_dataset = SequenceDataset(seqs=X_train, c=x_train_cell_type, transform=tf) - train_dl = DataLoader(seq_dataset, batch_size, shuffle=True, num_workers=48, pin_memory=True) + seq_dataset = SequenceDataset( + seqs=X_train, c=x_train_cell_type, transform=tf + ) + train_dl = DataLoader( + seq_dataset, + batch_size, + shuffle=True, + num_workers=48, + pin_memory=True, + ) # Preparing model/optimizer/EMA/dataloader - self.model = Unet_lucas(dim=200, channels=1, dim_mults=(1, 2, 4), resnet_block_groups=4) + self.model = Unet_lucas( + dim=200, channels=1, dim_mults=(1, 2, 4), resnet_block_groups=4 + ) self.optimizer = Adam(self.model.parameters(), lr=1e-4) if self.accelerator.is_main_process: self.ema = EMA(0.995) - self.ema_model = copy.deepcopy(self.model).eval().requires_grad_(False) + self.ema_model = ( + copy.deepcopy(self.model).eval().requires_grad_(False) + ) self.start_epoch = 0 self.train_kl, self.test_kl, self.shuffle_kl = 1, 1, 1 self.seq_similarity = 0.38 - self.model, self.optimizer, self.train_dl = self.accelerator.prepare(self.model, self.optimizer, train_dl) + self.model, self.optimizer, self.train_dl = self.accelerator.prepare( + self.model, self.optimizer, train_dl + ) # Saving model def save(self, epoch, results_path): @@ -1315,7 +1498,9 @@ def load(self, model_path, model_name): # Recreating EMA if self.accelerator.is_main_process: self.ema = EMA(0.995) - self.ema_model = copy.deepcopy(self.model).eval().requires_grad_(False) + self.ema_model = ( + copy.deepcopy(self.model).eval().requires_grad_(False) + ) self.ema_model.load_state_dict(checkpoint_dict["ema_model"]) self.train_kl = checkpoint_dict["train_kl"] @@ -1324,7 +1509,9 @@ def load(self, model_path, model_name): self.seq_similarity = checkpoint_dict["seq_similarity"] # Continue training - self.model, self.optimizer = self.accelerator.prepare(self.model, self.optimizer) + self.model, self.optimizer = self.accelerator.prepare( + self.model, self.optimizer + ) self.train() def create_samples(self, model_path, model_name): @@ -1341,7 +1528,9 @@ def create_samples(self, model_path, model_name): sqrt_alphas_cumprod = torch.sqrt(alphas_cumprod) sqrt_one_minus_alphas_cumprod = torch.sqrt(1.0 - alphas_cumprod) # calculations for posterior q(x_{t-1} | x_t, x_0) - posterior_variance = betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod) + posterior_variance = ( + betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod) + ) # Recreating model checkpoint_dict = torch.load(model_path + model_name) @@ -1384,7 +1573,9 @@ def create_samples(self, model_path, model_name): additional_variables=additional_variables, number_of_sequences_sample_per_cell=self.num_sampling_to_compare_cells, ) - generate_heatmap(heat_new_sequences_shuffle, "DNADIFFUSION", "Shuffle") + generate_heatmap( + heat_new_sequences_shuffle, "DNADIFFUSION", "Shuffle" + ) def train(self): # define beta schedule @@ -1399,12 +1590,16 @@ def train(self): sqrt_alphas_cumprod = torch.sqrt(alphas_cumprod) sqrt_one_minus_alphas_cumprod = torch.sqrt(1.0 - alphas_cumprod) # calculations for posterior q(x_{t-1} | x_t, x_0) - posterior_variance = betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod) + posterior_variance = ( + betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod) + ) if self.accelerator.is_main_process: self.accelerator.init_trackers( "dnadiffusion", - init_kwargs={"wandb": {"notes": "testing wandb accelerate script"}}, + init_kwargs={ + "wandb": {"notes": "testing wandb accelerate script"} + }, ) for epoch in tqdm(range(self.start_epoch, self.epochs)): @@ -1439,7 +1634,10 @@ def train(self): self.accelerator.wait_for_everyone() if self.accelerator.is_main_process: - self.ema.step_ema(self.ema_model, self.accelerator.unwrap_model(self.model)) + self.ema.step_ema( + self.ema_model, + self.accelerator.unwrap_model(self.model), + ) if (epoch % self.epochs_loss_show) == 0: if self.accelerator.is_main_process: @@ -1455,12 +1653,18 @@ def train(self): ) print(f" Epoch {epoch} Loss:", loss.item()) - if epoch != 0 and epoch % self.save_and_sample_every == 0 and self.accelerator.is_main_process: + if ( + epoch != 0 + and epoch % self.save_and_sample_every == 0 + and self.accelerator.is_main_process + ): self.model.eval() print("saving") sample_bs = 2 - sampled = torch.from_numpy(np.random.choice(cell_types, sample_bs)) + sampled = torch.from_numpy( + np.random.choice(cell_types, sample_bs) + ) random_classes = sampled.to(self.device) additional_variables = { "model": self.model, @@ -1477,15 +1681,25 @@ def train(self): int(self.num_sampling_to_compare_cells / 10), additional_variables=additional_variables, ) - self.train_kl = compare_motif_list(synt_df, self.df_results_seq_guime_count_train) - self.test_kl = compare_motif_list(synt_df, self.df_results_seq_guime_count_test) - self.shuffle_kl = compare_motif_list(synt_df, self.df_results_seq_guime_count_shuffle) + self.train_kl = compare_motif_list( + synt_df, self.df_results_seq_guime_count_train + ) + self.test_kl = compare_motif_list( + synt_df, self.df_results_seq_guime_count_test + ) + self.shuffle_kl = compare_motif_list( + synt_df, self.df_results_seq_guime_count_shuffle + ) print("Similarity", self.seq_similarity, "Similarity") print("KL_TRAIN", self.train_kl, "KL") print("KL_TEST", self.test_kl, "KL") print("KL_SHUFFLE", self.shuffle_kl, "KL") - if epoch != 0 and epoch % 500 == 0 and self.accelerator.is_main_process: + if ( + epoch != 0 + and epoch % 500 == 0 + and self.accelerator.is_main_process + ): model_path = f"./models/epoch_{epoch!s}_{self.model_name}.pt" self.save(epoch, model_path) @@ -1494,11 +1708,19 @@ def train(self): encode_data = np.load("./encode_data.npy", allow_pickle=True).item() df = encode_data.train["dataset"] cell_components = df.sort_values("TAG")["TAG"].unique().tolist() - conditional_tag_to_numeric = {x: n + 1 for n, x in enumerate(df.TAG.unique())} - conditional_numeric_to_tag = {n + 1: x for n, x in enumerate(df.TAG.unique())} - conditional_tags_to_numeric = {n + 1: x for n, x in enumerate(df.TAG.unique())} # check if this is changing order + conditional_tag_to_numeric = { + x: n + 1 for n, x in enumerate(df.TAG.unique()) + } + conditional_numeric_to_tag = { + n + 1: x for n, x in enumerate(df.TAG.unique()) + } + conditional_tags_to_numeric = { + n + 1: x for n, x in enumerate(df.TAG.unique()) + } # check if this is changing order cell_types = sorted(conditional_numeric_to_tag.keys()) - x_train_cell_type = torch.from_numpy(df["TAG"].apply(lambda x: conditional_tag_to_numeric[x]).to_numpy()) + x_train_cell_type = torch.from_numpy( + df["TAG"].apply(lambda x: conditional_tag_to_numeric[x]).to_numpy() + ) nucleotides = ["A", "C", "T", "G"] trainer = Trainer() diff --git a/notebooks/experiments/conditional_diffusion/previous_version/Conditional_Code_to_refactor_UNET_ANNOTATED_v3 (2).ipynb b/notebooks/experiments/conditional_diffusion/previous_version/Conditional_Code_to_refactor_UNET_ANNOTATED_v3 (2).ipynb index 5e13244e..260c5c62 100644 --- a/notebooks/experiments/conditional_diffusion/previous_version/Conditional_Code_to_refactor_UNET_ANNOTATED_v3 (2).ipynb +++ b/notebooks/experiments/conditional_diffusion/previous_version/Conditional_Code_to_refactor_UNET_ANNOTATED_v3 (2).ipynb @@ -275,16 +275,30 @@ "def sampling_to_metric(number_of_samples=20, specific_group=False):\n", " # Sampling regions using the trained model\n", " final_sequences = []\n", - " for n_a in tqdm_notebook(range(number_of_samples)): # generating 20*10 sequences\n", + " for n_a in tqdm_notebook(\n", + " range(number_of_samples)\n", + " ): # generating 20*10 sequences\n", " # sampled_images = bit_diffusion.sample(batch_size = 4)\n", " sample_bs = 10\n", " if specific_group:\n", " sampled = torch.from_numpy(np.array([specific_group] * sample_bs))\n", " else:\n", - " sampled = torch.from_numpy(np.random.randint(0, len(cell_types), size=(sample_bs)))\n", + " sampled = torch.from_numpy(\n", + " np.random.randint(0, len(cell_types), size=(sample_bs))\n", + " )\n", " random_classes = torch.zeros((sample_bs, len(cell_types)))\n", - " random_classes = random_classes.scatter_(1, sampled.unsqueeze(dim=1), 1).float().cuda()\n", - " sampled_images = sample(model, classes=random_classes, image_size=image_size, batch_size=sample_bs, channels=1)\n", + " random_classes = (\n", + " random_classes.scatter_(1, sampled.unsqueeze(dim=1), 1)\n", + " .float()\n", + " .cuda()\n", + " )\n", + " sampled_images = sample(\n", + " model,\n", + " classes=random_classes,\n", + " image_size=image_size,\n", + " batch_size=sample_bs,\n", + " channels=1,\n", + " )\n", " # sampled_images = sampled_images\n", " for n_b, x in enumerate(sampled_images[-1]):\n", " # x = x[-1]\n", @@ -300,10 +314,21 @@ " save_motifs_syn.close()\n", " # Scan for motifs\n", " !gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed\n", - " df_results_syn = pd.read_csv('syn_results_motifs.bed', sep='\\t', skiprows=5, header=None)\n", - " df_results_syn['motifs'] = df_results_syn[8].apply(lambda x: x.split('motif_name \"')[1].split('\"')[0])\n", - " df_results_syn[0] = df_results_syn[0].apply(lambda x: '_'.join(x.split('_')[:-1]))\n", - " df_motifs_count_syn = df_results_syn[[0, 'motifs']].drop_duplicates().groupby('motifs').count()\n", + " df_results_syn = pd.read_csv(\n", + " 'syn_results_motifs.bed', sep='\\t', skiprows=5, header=None\n", + " )\n", + " df_results_syn['motifs'] = df_results_syn[8].apply(\n", + " lambda x: x.split('motif_name \"')[1].split('\"')[0]\n", + " )\n", + " df_results_syn[0] = df_results_syn[0].apply(\n", + " lambda x: '_'.join(x.split('_')[:-1])\n", + " )\n", + " df_motifs_count_syn = (\n", + " df_results_syn[[0, 'motifs']]\n", + " .drop_duplicates()\n", + " .groupby('motifs')\n", + " .count()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (30, 2)\n", " df_motifs_count_syn.sort_values(0, ascending=False).head(50)[0].plot.bar()\n", " plt.show()\n", @@ -324,7 +349,9 @@ "# Not using the total number of motifs but the count of the occurence aka: percentage of the sequences with a given motif.\n", "def compare_motif_list(df_motifs_a, df_motifs_b):\n", " # Using KL divergence to compare motifs lists distribution\n", - " set_all_mot = set(df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist())\n", + " set_all_mot = set(\n", + " df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist()\n", + " )\n", " create_new_matrix = []\n", " for x in set_all_mot:\n", " list_in = []\n", @@ -341,10 +368,16 @@ "\n", " create_new_matrix.append(list_in)\n", "\n", - " df_motifs = pd.DataFrame(create_new_matrix, columns=['motif', 'motif_a', 'motif_b'])\n", + " df_motifs = pd.DataFrame(\n", + " create_new_matrix, columns=['motif', 'motif_a', 'motif_b']\n", + " )\n", "\n", - " df_motifs['Diffusion_seqs'] = df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", - " df_motifs['Training_seqs'] = df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " df_motifs['Diffusion_seqs'] = (\n", + " df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", + " )\n", + " df_motifs['Training_seqs'] = (\n", + " df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (3, 3)\n", " sns.regplot(x='Diffusion_seqs', y='Training_seqs', data=df_motifs)\n", " plt.xlabel('Diffusion Seqs')\n", @@ -353,7 +386,9 @@ " plt.show()\n", "\n", " display(df_motifs)\n", - " kl_pq = rel_entr(df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values)\n", + " kl_pq = rel_entr(\n", + " df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values\n", + " )\n", " return np.sum(kl_pq)\n", "\n", "\n", @@ -420,14 +455,19 @@ "@torch.no_grad()\n", "def p_sample(model, x, classes, t, t_index):\n", " betas_t = extract(betas, t, x.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x.shape\n", + " )\n", " # print (x.shape, 'x_shape')\n", " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t, x.shape)\n", "\n", " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", " model_mean = sqrt_recip_alphas_t * (\n", - " x - betas_t * model(x, classes=classes, time=t) / sqrt_one_minus_alphas_cumprod_t\n", + " x\n", + " - betas_t\n", + " * model(x, classes=classes, time=t)\n", + " / sqrt_one_minus_alphas_cumprod_t\n", " )\n", "\n", " if t_index == 0:\n", @@ -453,8 +493,18 @@ " img = torch.randn(shape, device=device)\n", " imgs = []\n", "\n", - " for i in tqdm(reversed(range(0, timesteps)), desc='sampling loop time step', total=timesteps):\n", - " img = p_sample(model, x=img, classes=classes, t=torch.full((b,), i, device=device, dtype=torch.long), t_index=i)\n", + " for i in tqdm(\n", + " reversed(range(0, timesteps)),\n", + " desc='sampling loop time step',\n", + " total=timesteps,\n", + " ):\n", + " img = p_sample(\n", + " model,\n", + " x=img,\n", + " classes=classes,\n", + " t=torch.full((b,), i, device=device, dtype=torch.long),\n", + " t_index=i,\n", + " )\n", " imgs.append(img.cpu().numpy())\n", " return imgs\n", "\n", @@ -464,7 +514,9 @@ "\n", "@torch.no_grad()\n", "def sample(model, classes, image_size, batch_size=16, channels=3):\n", - " return p_sample_loop(model, classes=classes, shape=(batch_size, channels, 4, image_size))" + " return p_sample_loop(\n", + " model, classes=classes, shape=(batch_size, channels, 4, image_size)\n", + " )" ] }, { @@ -492,7 +544,9 @@ " \"\"\"\n", " steps = timesteps + 1\n", " x = torch.linspace(0, timesteps, steps)\n", - " alphas_cumprod = torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " alphas_cumprod = (\n", + " torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " )\n", " alphas_cumprod = alphas_cumprod / alphas_cumprod[0]\n", " betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1])\n", " return torch.clip(betas, 0.0001, 0.9999)\n", @@ -542,11 +596,16 @@ " noise = torch.randn_like(x_start)\n", "\n", " sqrt_alphas_cumprod_t = extract(sqrt_alphas_cumprod, t, x_start.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x_start.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x_start.shape\n", + " )\n", "\n", " # print (sqrt_alphas_cumprod_t , sqrt_one_minus_alphas_cumprod_t , t)\n", "\n", - " return sqrt_alphas_cumprod_t * x_start + sqrt_one_minus_alphas_cumprod_t * noise" + " return (\n", + " sqrt_alphas_cumprod_t * x_start\n", + " + sqrt_one_minus_alphas_cumprod_t * noise\n", + " )" ] }, { @@ -574,9 +633,13 @@ "def p_losses(denoise_model, x_start, t, classes, noise=None, loss_type=\"l1\"): #\n", " if noise is None:\n", " noise = torch.randn_like(x_start) # guass noise\n", - " x_noisy = q_sample(x_start=x_start, t=t, noise=noise) # this is the auto generated noise given t and Noise\n", + " x_noisy = q_sample(\n", + " x_start=x_start, t=t, noise=noise\n", + " ) # this is the auto generated noise given t and Noise\n", " # print('max_q_sample', x_noisy.max(), 'mean_q_sample',x_noisy.mean() )\n", - " predicted_noise = denoise_model(x_noisy, t, classes) # this is the predicted noise given the model and step t\n", + " predicted_noise = denoise_model(\n", + " x_noisy, t, classes\n", + " ) # this is the predicted noise given the model and step t\n", " # print('max_predicted', x_noisy.max(), 'mean_predicted',x_noisy.mean() )\n", "\n", " # #predicted is ok (clipped)\n", @@ -637,7 +700,9 @@ " device = time.device\n", " half_dim = self.dim // 2\n", " embeddings = math.log(10000) / (half_dim - 1)\n", - " embeddings = torch.exp(torch.arange(half_dim, device=device) * -embeddings)\n", + " embeddings = torch.exp(\n", + " torch.arange(half_dim, device=device) * -embeddings\n", + " )\n", " embeddings = time[:, None] * embeddings[None, :]\n", " embeddings = torch.cat((embeddings.sin(), embeddings.cos()), dim=-1)\n", " return embeddings\n", @@ -711,7 +776,12 @@ " def __init__(self):\n", " super().__init__()\n", "\n", - " self.res = nn.Sequential(ResBlock(1, 2, 1), ResBlock(1, 2, 1), ResBlock(1, 2, 1), ResBlock(1, 2, 1))\n", + " self.res = nn.Sequential(\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " )\n", "\n", " self.conv = nn.Sequential(\n", " ConvBlock_2d(in_channels=1, out_channels=2),\n", @@ -730,7 +800,9 @@ " self.fc = nn.Sequential(\n", " nn.Linear(800, 800),\n", " # nn.GELU(),\n", - " nn.BatchNorm1d(800), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", + " nn.BatchNorm1d(\n", + " 800\n", + " ), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", " # nn.Linear(400, 400),\n", " # nn.BatchNorm1d(400),\n", " # nn.GELU(),\n", @@ -740,7 +812,9 @@ " self.fc2 = nn.Sequential(\n", " nn.Linear(400, 800),\n", " # nn.GELU(),\n", - " nn.BatchNorm1d(800), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", + " nn.BatchNorm1d(\n", + " 800\n", + " ), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", " # nn.Linear(400, 400),\n", " # nn.GELU(),\n", " # nn.BatchNorm1d(400),\n", @@ -908,7 +982,8 @@ "\n", "def Upsample(dim, dim_out=None):\n", " return nn.Sequential(\n", - " nn.Upsample(scale_factor=2, mode='nearest'), nn.Conv2d(dim, default(dim_out, dim), 3, padding=1)\n", + " nn.Upsample(scale_factor=2, mode='nearest'),\n", + " nn.Conv2d(dim, default(dim_out, dim), 3, padding=1),\n", " )\n", "\n", "\n", @@ -986,11 +1061,17 @@ "class ResnetBlock(nn.Module):\n", " def __init__(self, dim, dim_out, *, time_emb_dim=None, groups=8):\n", " super().__init__()\n", - " self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None\n", + " self.mlp = (\n", + " nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2))\n", + " if exists(time_emb_dim)\n", + " else None\n", + " )\n", "\n", " self.block1 = Block(dim, dim_out, groups=groups)\n", " self.block2 = Block(dim_out, dim_out, groups=groups)\n", - " self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " self.res_conv = (\n", + " nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " )\n", "\n", " def forward(self, x, time_emb=None):\n", " scale_shift = None\n", @@ -1010,8 +1091,22 @@ "\n", "\n", "class ResnetBlockClassConditioned(ResnetBlock):\n", - " def __init__(self, dim, dim_out, *, num_classes, class_embed_dim, time_emb_dim=None, groups=8):\n", - " super().__init__(dim=dim + class_embed_dim, dim_out=dim_out, time_emb_dim=time_emb_dim, groups=groups)\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " dim_out,\n", + " *,\n", + " num_classes,\n", + " class_embed_dim,\n", + " time_emb_dim=None,\n", + " groups=8,\n", + " ):\n", + " super().__init__(\n", + " dim=dim + class_embed_dim,\n", + " dim_out=dim_out,\n", + " time_emb_dim=time_emb_dim,\n", + " groups=groups,\n", + " )\n", " self.class_mlp = EmbedFC(num_classes, class_embed_dim)\n", "\n", " def forward(self, x, time_emb=None, c=None, mask=None):\n", @@ -1037,12 +1132,17 @@ " hidden_dim = dim_head * heads\n", " self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False)\n", "\n", - " self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim))\n", + " self.to_out = nn.Sequential(\n", + " nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)\n", + " )\n", "\n", " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q = q.softmax(dim=-2)\n", " k = k.softmax(dim=-1)\n", @@ -1053,7 +1153,9 @@ " context = torch.einsum('b h d n, b h e n -> b h d e', k, v)\n", "\n", " out = torch.einsum('b h d e, b h d n -> b h e n', context, q)\n", - " out = rearrange(out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w)\n", + " out = rearrange(\n", + " out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w\n", + " )\n", " return self.to_out(out)\n", "\n", "\n", @@ -1069,7 +1171,10 @@ " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q, k = map(l2norm, (q, k))\n", "\n", @@ -1107,7 +1212,9 @@ "\n", "\n", "def log_snr_to_alpha_sigma(log_snr):\n", - " return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt(torch.sigmoid(-log_snr))\n", + " return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt(\n", + " torch.sigmoid(-log_snr)\n", + " )\n", "\n", "\n", "# ClASS CHANGED ADD CONDITIONING\n", @@ -1161,7 +1268,10 @@ " fourier_dim = learned_sinusoidal_dim + 1\n", "\n", " self.time_mlp = nn.Sequential(\n", - " sinu_pos_emb, nn.Linear(fourier_dim, time_dim), nn.GELU(), nn.Linear(time_dim, time_dim)\n", + " sinu_pos_emb,\n", + " nn.Linear(fourier_dim, time_dim),\n", + " nn.GELU(),\n", + " nn.Linear(time_dim, time_dim),\n", " )\n", "\n", " # layers\n", @@ -1179,7 +1289,9 @@ " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " Residual(PreNorm(dim_in, LinearAttention(dim_in))),\n", - " Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", + " Downsample(dim_in, dim_out)\n", + " if not is_last\n", + " else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -1195,10 +1307,16 @@ " self.ups.append(\n", " nn.ModuleList(\n", " [\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", " Residual(PreNorm(dim_out, LinearAttention(dim_out))),\n", - " Upsample(dim_out, dim_in) if not is_last else nn.Conv2d(dim_out, dim_in, 3, padding=1),\n", + " Upsample(dim_out, dim_in)\n", + " if not is_last\n", + " else nn.Conv2d(dim_out, dim_in, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -1319,7 +1437,10 @@ } ], "source": [ - "(df.groupby('component').count()['raw_sequence'] / df.groupby('component').count()['raw_sequence'].sum()).plot.bar()\n", + "(\n", + " df.groupby('component').count()['raw_sequence']\n", + " / df.groupby('component').count()['raw_sequence'].sum()\n", + ").plot.bar()\n", "plt.title('Component % on Training Sample')" ] }, @@ -1759,14 +1880,21 @@ "source": [ "def generate_heatmap_motifs(df_results_seq_matrix):\n", " print('Generating Matrix....')\n", - " all_motifs = {m_init: 0 for m_init in set(df_results_seq_matrix['motifs'].values)}\n", - " sequen_dic = {seq_init: all_motifs.copy() for seq_init in set(df_results_seq_matrix[0].values)}\n", + " all_motifs = {\n", + " m_init: 0 for m_init in set(df_results_seq_matrix['motifs'].values)\n", + " }\n", + " sequen_dic = {\n", + " seq_init: all_motifs.copy()\n", + " for seq_init in set(df_results_seq_matrix[0].values)\n", + " }\n", "\n", " for k, v in df_results_seq_matrix.groupby([0, 'motifs']):\n", " sequen_dic[k[0]][k[1]] = v.count()[0]\n", "\n", " df_motifs_matrix = pd.DataFrame(sequen_dic).T\n", - " df_motifs_matrix['comps'] = [int(comp_i.split('_')[-1]) for comp_i in df_motifs_matrix.index.values]\n", + " df_motifs_matrix['comps'] = [\n", + " int(comp_i.split('_')[-1]) for comp_i in df_motifs_matrix.index.values\n", + " ]\n", " df_motifs_matrix = df_motifs_matrix.sort_values(['comps'])\n", " comp_rows = df_motifs_matrix['comps'].values\n", " del df_motifs_matrix['comps']\n", @@ -1775,7 +1903,10 @@ " pal_hls = sns.hls_palette(16).as_hex()\n", " {e: c for e, c in enumerate(pal_hls[:])}\n", " sns.clustermap(\n", - " df_motifs_matrix, cmap='Reds', row_cluster=True, row_colors=[pal_hls[c_extract] for c_extract in comp_rows]\n", + " df_motifs_matrix,\n", + " cmap='Reds',\n", + " row_cluster=True,\n", + " row_colors=[pal_hls[c_extract] for c_extract in comp_rows],\n", " )\n", " plt.show()" ] @@ -1798,15 +1929,28 @@ "def motifs_from_fasta(fasta, generate_heatmap=True):\n", " print('Computing Motifs....')\n", " !gimme scan $fasta -p JASPAR2020_vertebrates -g hg38 > train_results_motifs.bed\n", - " df_results_seq_guime = pd.read_csv('train_results_motifs.bed', sep='\\t', skiprows=5, header=None)\n", - " df_results_seq_guime['motifs'] = df_results_seq_guime[8].apply(lambda x: x.split('motif_name \"')[1].split('\"')[0])\n", + " df_results_seq_guime = pd.read_csv(\n", + " 'train_results_motifs.bed', sep='\\t', skiprows=5, header=None\n", + " )\n", + " df_results_seq_guime['motifs'] = df_results_seq_guime[8].apply(\n", + " lambda x: x.split('motif_name \"')[1].split('\"')[0]\n", + " )\n", " if generate_heatmap:\n", " generate_heatmap_motifs(df_results_seq_guime)\n", "\n", - " df_results_seq_guime[0] = df_results_seq_guime[0].apply(lambda x: '_'.join(x.split('_')[:-1]))\n", - " df_results_seq_guime_count_out = df_results_seq_guime[[0, 'motifs']].drop_duplicates().groupby('motifs').count()\n", + " df_results_seq_guime[0] = df_results_seq_guime[0].apply(\n", + " lambda x: '_'.join(x.split('_')[:-1])\n", + " )\n", + " df_results_seq_guime_count_out = (\n", + " df_results_seq_guime[[0, 'motifs']]\n", + " .drop_duplicates()\n", + " .groupby('motifs')\n", + " .count()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (30, 2)\n", - " df_results_seq_guime_count_out.sort_values(0, ascending=False).head(50)[0].plot.bar()\n", + " df_results_seq_guime_count_out.sort_values(0, ascending=False).head(50)[\n", + " 0\n", + " ].plot.bar()\n", " plt.title('Top 50 MOTIFS on component 0 ')\n", " plt.show()\n", " return df_results_seq_guime_count_out\n", @@ -2436,7 +2580,9 @@ "for comp, v_comp in df.groupby('component'):\n", " print(comp)\n", " save_fasta(v_comp)\n", - " final_comp_values[comp] = motifs_from_fasta('fastq_train_component.fasta', False)" + " final_comp_values[comp] = motifs_from_fasta(\n", + " 'fastq_train_component.fasta', False\n", + " )" ] }, { @@ -2518,7 +2664,11 @@ "source": [ "dna_alphabet = ['A', 'C', 'T', 'G']\n", "x_train_seq = np.array(\n", - " [one_hot_encode(x, dna_alphabet, 200) for x in tqdm_notebook(df['raw_sequence']) if 'N' not in x]\n", + " [\n", + " one_hot_encode(x, dna_alphabet, 200)\n", + " for x in tqdm_notebook(df['raw_sequence'])\n", + " if 'N' not in x\n", + " ]\n", ")\n", "X_train = x_train_seq\n", "X_train = np.array([x.T.tolist() for x in X_train])\n", @@ -2587,7 +2737,9 @@ "# changes to conditioning\n", "# conditional training init\n", "cell_types = sorted(list(df.component.unique()))\n", - "x_train_cell_type = F.one_hot(torch.from_numpy(df[\"component\"].to_numpy()), len(cell_types))\n", + "x_train_cell_type = F.one_hot(\n", + " torch.from_numpy(df[\"component\"].to_numpy()), len(cell_types)\n", + ")\n", "x_train_cell_type.shape" ] }, @@ -2605,7 +2757,9 @@ "tf = T.Compose([T.ToTensor()])\n", "batch_size = 64\n", "seq_dataset = SequenceDataset(seqs=X_train, c=x_train_cell_type, transform=tf)\n", - "train_dl = DataLoader(seq_dataset, batch_size, shuffle=True, num_workers=2, pin_memory=True)" + "train_dl = DataLoader(\n", + " seq_dataset, batch_size, shuffle=True, num_workers=2, pin_memory=True\n", + ")" ] }, { @@ -2633,7 +2787,9 @@ "# sqrt_one_minus_alphas_cumprod = torch.sqrt(1. - alphas_cumprod)\n", "sqrt_one_minus_alphas_cumprod = torch.sqrt(1.0 - alphas_cumprod)\n", "# calculations for posterior q(x_{t-1} | x_t, x_0)\n", - "posterior_variance = betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + "posterior_variance = (\n", + " betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + ")\n", "\n", "\n", "def extract(a, t, x_shape):\n", @@ -3260,7 +3416,13 @@ "image_size = 200\n", "channels = 1\n", "\n", - "model = Unet_lucas(dim=200, channels=1, dim_mults=(1, 2, 4), resnet_block_groups=1, num_classes=len(cell_types)).cuda()\n", + "model = Unet_lucas(\n", + " dim=200,\n", + " channels=1,\n", + " dim_mults=(1, 2, 4),\n", + " resnet_block_groups=1,\n", + " num_classes=len(cell_types),\n", + ").cuda()\n", "\n", "# model = Classifier() # 2d conv\n", "\n", @@ -6694,7 +6856,9 @@ " # batch_size = batch[\"pixel_values\"].shape[0]\n", " # batch = batch[\"pixel_values\"].to(device)\n", " # Algorithm 1 line 3: sample t uniformally for every example in the batch\n", - " t = torch.randint(0, timesteps, (batch_size,), device=device).long() # sampling a t to generate t and t+1\n", + " t = torch.randint(\n", + " 0, timesteps, (batch_size,), device=device\n", + " ).long() # sampling a t to generate t and t+1\n", " # print (t.dtype)\n", " # loss = p_losses(model, batch, t, loss_type=\"l2\")\n", " loss = p_losses(model, x, t, y, loss_type=\"huber\")\n", @@ -6709,13 +6873,27 @@ " print('saving')\n", " milestone = step // save_and_sample_every\n", " sample_bs = 2\n", - " sampled = torch.from_numpy(np.random.randint(0, len(cell_types), size=(sample_bs)))\n", + " sampled = torch.from_numpy(\n", + " np.random.randint(0, len(cell_types), size=(sample_bs))\n", + " )\n", " random_classes = torch.zeros((sample_bs, len(cell_types)))\n", - " random_classes = random_classes.scatter_(1, sampled.unsqueeze(dim=1), 1).float().cuda()\n", + " random_classes = (\n", + " random_classes.scatter_(1, sampled.unsqueeze(dim=1), 1)\n", + " .float()\n", + " .cuda()\n", + " )\n", "\n", - " samples = sample(model, classes=random_classes, image_size=image_size, batch_size=2, channels=1)\n", + " samples = sample(\n", + " model,\n", + " classes=random_classes,\n", + " image_size=image_size,\n", + " batch_size=2,\n", + " channels=1,\n", + " )\n", " n_print = 0\n", - " for image, class_show in zip(samples[-1], random_classes.argmax(1).tolist()):\n", + " for image, class_show in zip(\n", + " samples[-1], random_classes.argmax(1).tolist()\n", + " ):\n", " if n_print < 4:\n", " plt.rcParams[\"figure.figsize\"] = (20, 1)\n", " pd_seq = pd.DataFrame(image.reshape(4, 200))\n", @@ -16945,7 +17123,14 @@ " frames = [Image.open(f'image_temp/{image}') for image in images_reverse]\n", " # print (len(frames))\n", " frame_one = frames[0]\n", - " frame_one.save(\"diff_first.gif\", format=\"GIF\", append_images=frames, save_all=True, duration=30, loop=0)" + " frame_one.save(\n", + " \"diff_first.gif\",\n", + " format=\"GIF\",\n", + " append_images=frames,\n", + " save_all=True,\n", + " duration=30,\n", + " loop=0,\n", + " )" ] }, { diff --git a/notebooks/experiments/conditional_diffusion/vq_vae_accelerate_diffusion_conditional_4_cells.ipynb b/notebooks/experiments/conditional_diffusion/vq_vae_accelerate_diffusion_conditional_4_cells.ipynb index ca782159..c756f703 100644 --- a/notebooks/experiments/conditional_diffusion/vq_vae_accelerate_diffusion_conditional_4_cells.ipynb +++ b/notebooks/experiments/conditional_diffusion/vq_vae_accelerate_diffusion_conditional_4_cells.ipynb @@ -124,7 +124,9 @@ "\n", "BATCH_SIZE = 240\n", "LIMIT_TOTAL_SEQUENCES = False # False # OR False to use everything\n", - "VQ_VAE_MODEL_NAME = '../../../dnadiffusion/data/model4cells_train_split_3_50_dims.pkl'\n", + "VQ_VAE_MODEL_NAME = (\n", + " '../../../dnadiffusion/data/model4cells_train_split_3_50_dims.pkl'\n", + ")\n", "MODEL_NAME = 'model_48k_sequences_per_group_K562_hESCT0_HepG2_GM12878_12k'\n", "NUMBER_OF_GPUS = 8\n", "\n", @@ -440,7 +442,9 @@ " self.step = 0\n", "\n", " def update_model_average(self, ma_model, current_model):\n", - " for current_params, ma_params in zip(current_model.parameters(), ma_model.parameters()):\n", + " for current_params, ma_params in zip(\n", + " current_model.parameters(), ma_model.parameters()\n", + " ):\n", " old_weight, up_weight = ma_params.data, current_params.data\n", " ma_params.data = self.update_average(old_weight, up_weight)\n", "\n", @@ -638,12 +642,18 @@ "outputs": [], "source": [ "def sampling_to_metric(\n", - " number_of_samples=20, specific_group=False, group_number=None, cond_weight_to_metric=0, additional_variables=None\n", + " number_of_samples=20,\n", + " specific_group=False,\n", + " group_number=None,\n", + " cond_weight_to_metric=0,\n", + " additional_variables=None,\n", "):\n", " # Sampling regions using the trained model\n", " final_sequences = []\n", " model_vq = None\n", - " for n_a in tqdm_notebook(range(number_of_samples)): # generating number_of_samples *10 sequences\n", + " for n_a in tqdm_notebook(\n", + " range(number_of_samples)\n", + " ): # generating number_of_samples *10 sequences\n", " # sampled_images = bit_diffusion.sample(batch_size = 4)\n", " print(n_a)\n", " sample_bs = 10\n", @@ -699,10 +709,21 @@ " save_motifs_syn.close()\n", " # Scan for motifs\n", " !gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed\n", - " df_results_syn = pd.read_csv('syn_results_motifs.bed', sep='\\t', skiprows=5, header=None)\n", - " df_results_syn['motifs'] = df_results_syn[8].apply(lambda x: x.split('motif_name \"')[1].split('\"')[0])\n", - " df_results_syn[0] = df_results_syn[0].apply(lambda x: '_'.join(x.split('_')[:-1]))\n", - " df_motifs_count_syn = df_results_syn[[0, 'motifs']].drop_duplicates().groupby('motifs').count()\n", + " df_results_syn = pd.read_csv(\n", + " 'syn_results_motifs.bed', sep='\\t', skiprows=5, header=None\n", + " )\n", + " df_results_syn['motifs'] = df_results_syn[8].apply(\n", + " lambda x: x.split('motif_name \"')[1].split('\"')[0]\n", + " )\n", + " df_results_syn[0] = df_results_syn[0].apply(\n", + " lambda x: '_'.join(x.split('_')[:-1])\n", + " )\n", + " df_motifs_count_syn = (\n", + " df_results_syn[[0, 'motifs']]\n", + " .drop_duplicates()\n", + " .groupby('motifs')\n", + " .count()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (30, 2)\n", " df_motifs_count_syn.sort_values(0, ascending=False).head(50)[0].plot.bar()\n", " plt.show()\n", @@ -764,7 +785,9 @@ "# Not using the total number of motifs but the count of the occurence aka: percentage of the sequences with a given motif.\n", "def compare_motif_list(df_motifs_a, df_motifs_b):\n", " # Using KL divergence to compare motifs lists distribution\n", - " set_all_mot = set(df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist())\n", + " set_all_mot = set(\n", + " df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist()\n", + " )\n", " create_new_matrix = []\n", " for x in set_all_mot:\n", " list_in = []\n", @@ -781,10 +804,16 @@ "\n", " create_new_matrix.append(list_in)\n", "\n", - " df_motifs = pd.DataFrame(create_new_matrix, columns=['motif', 'motif_a', 'motif_b'])\n", + " df_motifs = pd.DataFrame(\n", + " create_new_matrix, columns=['motif', 'motif_a', 'motif_b']\n", + " )\n", "\n", - " df_motifs['Diffusion_seqs'] = df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", - " df_motifs['Training_seqs'] = df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " df_motifs['Diffusion_seqs'] = (\n", + " df_motifs['motif_a'] / df_motifs['motif_a'].sum()\n", + " )\n", + " df_motifs['Training_seqs'] = (\n", + " df_motifs['motif_b'] / df_motifs['motif_b'].sum()\n", + " )\n", " plt.rcParams[\"figure.figsize\"] = (3, 3)\n", " sns.regplot(x='Diffusion_seqs', y='Training_seqs', data=df_motifs)\n", " plt.xlabel('Diffusion Seqs')\n", @@ -793,7 +822,9 @@ " plt.show()\n", "\n", " display(df_motifs)\n", - " kl_pq = rel_entr(df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values)\n", + " kl_pq = rel_entr(\n", + " df_motifs['Diffusion_seqs'].values, df_motifs['Training_seqs'].values\n", + " )\n", " return np.sum(kl_pq)\n", "\n", "\n", @@ -854,8 +885,12 @@ " self._embedding_dim = embedding_dim\n", " self._num_embeddings = num_embeddings\n", "\n", - " self._embedding = nn.Embedding(self._num_embeddings, self._embedding_dim)\n", - " self._embedding.weight.data.uniform_(-1 / self._num_embeddings, 1 / self._num_embeddings)\n", + " self._embedding = nn.Embedding(\n", + " self._num_embeddings, self._embedding_dim\n", + " )\n", + " self._embedding.weight.data.uniform_(\n", + " -1 / self._num_embeddings, 1 / self._num_embeddings\n", + " )\n", " self._commitment_cost = commitment_cost\n", "\n", " def forward(self, inputs):\n", @@ -875,11 +910,17 @@ "\n", " # Encoding\n", " encoding_indices = torch.argmin(distances, dim=1).unsqueeze(1)\n", - " encodings = torch.zeros(encoding_indices.shape[0], self._num_embeddings, device=inputs.device)\n", + " encodings = torch.zeros(\n", + " encoding_indices.shape[0],\n", + " self._num_embeddings,\n", + " device=inputs.device,\n", + " )\n", " encodings.scatter_(1, encoding_indices, 1)\n", "\n", " # Quantize and unflatten\n", - " quantized = torch.matmul(encodings, self._embedding.weight).view(input_shape)\n", + " quantized = torch.matmul(encodings, self._embedding.weight).view(\n", + " input_shape\n", + " )\n", "\n", " # Loss\n", " e_latent_loss = F.mse_loss(quantized.detach(), inputs)\n", @@ -888,10 +929,18 @@ "\n", " quantized = inputs + (quantized - inputs).detach()\n", " avg_probs = torch.mean(encodings, dim=0)\n", - " perplexity = torch.exp(-torch.sum(avg_probs * torch.log(avg_probs + 1e-10)))\n", + " perplexity = torch.exp(\n", + " -torch.sum(avg_probs * torch.log(avg_probs + 1e-10))\n", + " )\n", "\n", " # convert quantized from BHWC -> BCHW\n", - " return loss, quantized.permute(0, 3, 1, 2).contiguous(), perplexity, encodings, encoding_indices\n", + " return (\n", + " loss,\n", + " quantized.permute(0, 3, 1, 2).contiguous(),\n", + " perplexity,\n", + " encodings,\n", + " encoding_indices,\n", + " )\n", "\n", "\n", "class VectorQuantizerEMA(nn.Module):\n", @@ -927,18 +976,29 @@ " space, with shape (batch_size, 1).\n", " \"\"\"\n", "\n", - " def __init__(self, num_embeddings, embedding_dim, commitment_cost, decay, epsilon=1e-5):\n", + " def __init__(\n", + " self,\n", + " num_embeddings,\n", + " embedding_dim,\n", + " commitment_cost,\n", + " decay,\n", + " epsilon=1e-5,\n", + " ):\n", " super(VectorQuantizerEMA, self).__init__()\n", "\n", " self._embedding_dim = embedding_dim\n", " self._num_embeddings = num_embeddings\n", "\n", - " self._embedding = nn.Embedding(self._num_embeddings, self._embedding_dim)\n", + " self._embedding = nn.Embedding(\n", + " self._num_embeddings, self._embedding_dim\n", + " )\n", " self._embedding.weight.data.normal_()\n", " self._commitment_cost = commitment_cost\n", "\n", " self.register_buffer('_ema_cluster_size', torch.zeros(num_embeddings))\n", - " self._ema_w = nn.Parameter(torch.Tensor(num_embeddings, self._embedding_dim))\n", + " self._ema_w = nn.Parameter(\n", + " torch.Tensor(num_embeddings, self._embedding_dim)\n", + " )\n", " self._ema_w.data.normal_()\n", "\n", " self._decay = decay\n", @@ -961,26 +1021,40 @@ "\n", " # Encoding\n", " encoding_indices = torch.argmin(distances, dim=1).unsqueeze(1)\n", - " encodings = torch.zeros(encoding_indices.shape[0], self._num_embeddings, device=inputs.device)\n", + " encodings = torch.zeros(\n", + " encoding_indices.shape[0],\n", + " self._num_embeddings,\n", + " device=inputs.device,\n", + " )\n", " encodings.scatter_(1, encoding_indices, 1)\n", "\n", " # Quantize and unflatten\n", - " quantized = torch.matmul(encodings, self._embedding.weight).view(input_shape)\n", + " quantized = torch.matmul(encodings, self._embedding.weight).view(\n", + " input_shape\n", + " )\n", "\n", " # Use EMA to update the embedding vectors\n", " if self.training:\n", - " self._ema_cluster_size = self._ema_cluster_size * self._decay + (1 - self._decay) * torch.sum(encodings, 0)\n", + " self._ema_cluster_size = self._ema_cluster_size * self._decay + (\n", + " 1 - self._decay\n", + " ) * torch.sum(encodings, 0)\n", "\n", " # Laplace smoothing of the cluster size\n", " n = torch.sum(self._ema_cluster_size.data)\n", " self._ema_cluster_size = (\n", - " (self._ema_cluster_size + self._epsilon) / (n + self._num_embeddings * self._epsilon) * n\n", + " (self._ema_cluster_size + self._epsilon)\n", + " / (n + self._num_embeddings * self._epsilon)\n", + " * n\n", " )\n", "\n", " dw = torch.matmul(encodings.t(), flat_input)\n", - " self._ema_w = nn.Parameter(self._ema_w * self._decay + (1 - self._decay) * dw)\n", + " self._ema_w = nn.Parameter(\n", + " self._ema_w * self._decay + (1 - self._decay) * dw\n", + " )\n", "\n", - " self._embedding.weight = nn.Parameter(self._ema_w / self._ema_cluster_size.unsqueeze(1))\n", + " self._embedding.weight = nn.Parameter(\n", + " self._ema_w / self._ema_cluster_size.unsqueeze(1)\n", + " )\n", "\n", " # Loss\n", " e_latent_loss = F.mse_loss(quantized.detach(), inputs)\n", @@ -989,10 +1063,18 @@ " # Straight Through Estimator\n", " quantized = inputs + (quantized - inputs).detach()\n", " avg_probs = torch.mean(encodings, dim=0)\n", - " perplexity = torch.exp(-torch.sum(avg_probs * torch.log(avg_probs + 1e-10)))\n", + " perplexity = torch.exp(\n", + " -torch.sum(avg_probs * torch.log(avg_probs + 1e-10))\n", + " )\n", "\n", " # convert quantized from BHWC -> BCHW\n", - " return loss, quantized.permute(0, 3, 1, 2).contiguous(), perplexity, encodings, encoding_indices\n", + " return (\n", + " loss,\n", + " quantized.permute(0, 3, 1, 2).contiguous(),\n", + " perplexity,\n", + " encodings,\n", + " encoding_indices,\n", + " )\n", "\n", "\n", "class Residual(nn.Module):\n", @@ -1027,7 +1109,13 @@ " bias=False,\n", " ),\n", " nn.ReLU(True),\n", - " nn.Conv2d(in_channels=num_residual_hiddens, out_channels=num_hiddens, kernel_size=1, stride=1, bias=False),\n", + " nn.Conv2d(\n", + " in_channels=num_residual_hiddens,\n", + " out_channels=num_hiddens,\n", + " kernel_size=1,\n", + " stride=1,\n", + " bias=False,\n", + " ),\n", " )\n", "\n", " def forward(self, x):\n", @@ -1054,11 +1142,20 @@ " - y (torch.Tensor): The output tensor with shape (batch_size, num_hiddens, height, width).\n", " \"\"\"\n", "\n", - " def __init__(self, in_channels, num_hiddens, num_residual_layers, num_residual_hiddens):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " num_hiddens,\n", + " num_residual_layers,\n", + " num_residual_hiddens,\n", + " ):\n", " super(ResidualStack, self).__init__()\n", " self._num_residual_layers = num_residual_layers\n", " self._layers = nn.ModuleList(\n", - " [Residual(in_channels, num_hiddens, num_residual_hiddens) for _ in range(self._num_residual_layers)]\n", + " [\n", + " Residual(in_channels, num_hiddens, num_residual_hiddens)\n", + " for _ in range(self._num_residual_layers)\n", + " ]\n", " )\n", "\n", " def forward(self, x):\n", @@ -1068,16 +1165,36 @@ "\n", "\n", "class Encoder(nn.Module):\n", - " def __init__(self, in_channels, num_hiddens, num_residual_layers, num_residual_hiddens):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " num_hiddens,\n", + " num_residual_layers,\n", + " num_residual_hiddens,\n", + " ):\n", " super(Encoder, self).__init__()\n", "\n", " self._conv_1 = nn.Conv2d(\n", - " in_channels=in_channels, out_channels=num_hiddens // 2, kernel_size=(4, 10), stride=2, padding=(1, 4)\n", + " in_channels=in_channels,\n", + " out_channels=num_hiddens // 2,\n", + " kernel_size=(4, 10),\n", + " stride=2,\n", + " padding=(1, 4),\n", " )\n", " self._conv_2 = nn.Conv2d(\n", - " in_channels=num_hiddens // 2, out_channels=num_hiddens, kernel_size=4, stride=2, padding=1\n", + " in_channels=num_hiddens // 2,\n", + " out_channels=num_hiddens,\n", + " kernel_size=4,\n", + " stride=2,\n", + " padding=1,\n", + " )\n", + " self._conv_3 = nn.Conv2d(\n", + " in_channels=num_hiddens,\n", + " out_channels=num_hiddens,\n", + " kernel_size=3,\n", + " stride=1,\n", + " padding=1,\n", " )\n", - " self._conv_3 = nn.Conv2d(in_channels=num_hiddens, out_channels=num_hiddens, kernel_size=3, stride=1, padding=1)\n", " self._residual_stack = ResidualStack(\n", " in_channels=num_hiddens,\n", " num_hiddens=num_hiddens,\n", @@ -1118,10 +1235,22 @@ " and width of the decoded tensor, respectively.\n", " \"\"\"\n", "\n", - " def __init__(self, in_channels, num_hiddens, num_residual_layers, num_residual_hiddens):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " num_hiddens,\n", + " num_residual_layers,\n", + " num_residual_hiddens,\n", + " ):\n", " super(Decoder, self).__init__()\n", "\n", - " self._conv_1 = nn.Conv2d(in_channels=in_channels, out_channels=num_hiddens, kernel_size=3, stride=1, padding=1)\n", + " self._conv_1 = nn.Conv2d(\n", + " in_channels=in_channels,\n", + " out_channels=num_hiddens,\n", + " kernel_size=3,\n", + " stride=1,\n", + " padding=1,\n", + " )\n", "\n", " self._residual_stack = ResidualStack(\n", " in_channels=num_hiddens,\n", @@ -1131,11 +1260,19 @@ " )\n", "\n", " self._conv_trans_1 = nn.ConvTranspose2d(\n", - " in_channels=num_hiddens, out_channels=num_hiddens // 2, kernel_size=4, stride=2, padding=1\n", + " in_channels=num_hiddens,\n", + " out_channels=num_hiddens // 2,\n", + " kernel_size=4,\n", + " stride=2,\n", + " padding=1,\n", " )\n", "\n", " self._conv_trans_2 = nn.ConvTranspose2d(\n", - " in_channels=num_hiddens // 2, out_channels=1, kernel_size=(2, 12), stride=2, padding=(0, 5)\n", + " in_channels=num_hiddens // 2,\n", + " out_channels=1,\n", + " kernel_size=(2, 12),\n", + " stride=2,\n", + " padding=(0, 5),\n", " )\n", "\n", " def forward(self, inputs):\n", @@ -1184,13 +1321,29 @@ " ):\n", " super(Model, self).__init__()\n", "\n", - " self._encoder = Encoder(1, num_hiddens, num_residual_layers, num_residual_hiddens)\n", - " self._pre_vq_conv = nn.Conv2d(in_channels=num_hiddens, out_channels=embedding_dim, kernel_size=1, stride=1)\n", + " self._encoder = Encoder(\n", + " 1, num_hiddens, num_residual_layers, num_residual_hiddens\n", + " )\n", + " self._pre_vq_conv = nn.Conv2d(\n", + " in_channels=num_hiddens,\n", + " out_channels=embedding_dim,\n", + " kernel_size=1,\n", + " stride=1,\n", + " )\n", " if decay > 0.0:\n", - " self._vq_vae = VectorQuantizerEMA(num_embeddings, embedding_dim, commitment_cost, decay)\n", + " self._vq_vae = VectorQuantizerEMA(\n", + " num_embeddings, embedding_dim, commitment_cost, decay\n", + " )\n", " else:\n", - " self._vq_vae = VectorQuantizer(num_embeddings, embedding_dim, commitment_cost)\n", - " self._decoder = Decoder(embedding_dim, num_hiddens, num_residual_layers, num_residual_hiddens)\n", + " self._vq_vae = VectorQuantizer(\n", + " num_embeddings, embedding_dim, commitment_cost\n", + " )\n", + " self._decoder = Decoder(\n", + " embedding_dim,\n", + " num_hiddens,\n", + " num_residual_layers,\n", + " num_residual_hiddens,\n", + " )\n", "\n", " def forward(self, x):\n", " z = self._encoder(x)\n", @@ -1226,7 +1379,11 @@ " num_seq = []\n", " for nucleotide in seq:\n", " num_seq.append(NUCLEOTIDES[nucleotide])\n", - " return (F.one_hot(torch.tensor(num_seq).to(torch.int64), num_classes=len(NUCLEOTIDES))).T\n", + " return (\n", + " F.one_hot(\n", + " torch.tensor(num_seq).to(torch.int64), num_classes=len(NUCLEOTIDES)\n", + " )\n", + " ).T\n", "\n", "\n", "class PeaksDataset(Dataset):\n", @@ -1276,7 +1433,8 @@ " model_vq = pickle.load(f)\n", "\n", "train_df = pd.read_csv(\n", - " \"../../../dnadiffusion/data/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt\", delim_whitespace=True\n", + " \"../../../dnadiffusion/data/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt\",\n", + " delim_whitespace=True,\n", ")\n", "train_df = train_df[train_df.data_label == 'training']\n", "train_df[\"raw_sequence\"] = train_df[\"sequence\"]\n", @@ -1355,13 +1513,17 @@ "@torch.no_grad()\n", "def p_sample(model, x, t, t_index):\n", " betas_t = extract(betas, t, x.shape)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x.shape)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x.shape\n", + " )\n", " # print (x.shape, 'x_shape')\n", " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t, x.shape)\n", "\n", " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", - " model_mean = sqrt_recip_alphas_t * (x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t)\n", + " model_mean = sqrt_recip_alphas_t * (\n", + " x - betas_t * model(x, time=t) / sqrt_one_minus_alphas_cumprod_t\n", + " )\n", "\n", " if t_index == 0:\n", " return model_mean\n", @@ -1397,8 +1559,12 @@ " betas = betas.to(device)\n", " sqrt_one_minus_alphas_cumprod = sqrt_one_minus_alphas_cumprod.to(device)\n", " betas_t = extract(betas, t_double, x_double.shape, device=device)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t_double, x_double.shape, device=device)\n", - " sqrt_recip_alphas_t = extract(sqrt_recip_alphas, t_double, x_double.shape, device=device)\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t_double, x_double.shape, device=device\n", + " )\n", + " sqrt_recip_alphas_t = extract(\n", + " sqrt_recip_alphas, t_double, x_double.shape, device=device\n", + " )\n", "\n", " # classifier free sampling interpolates between guided and non guided using `cond_weight`\n", " classes_masked = classes * context_mask\n", @@ -1415,7 +1581,9 @@ " # print (show_out_test[1])\n", " # print (show_out_test[2])\n", " # print (show_out_test[3])\n", - " preds, cross_map_full = model(x_double, time=t_double, classes=classes_masked) # I added cross_map\n", + " preds, cross_map_full = model(\n", + " x_double, time=t_double, classes=classes_masked\n", + " ) # I added cross_map\n", " model.output_attention = False\n", " cross_map = cross_map_full[:batch_size]\n", " eps1 = (1 + cond_weight) * preds[:batch_size]\n", @@ -1425,13 +1593,18 @@ " # Equation 11 in the paper\n", " # Use our model (noise predictor) to predict the mean\n", " model_mean = sqrt_recip_alphas_t[:batch_size] * (\n", - " x - betas_t[:batch_size] * x_t / sqrt_one_minus_alphas_cumprod_t[:batch_size]\n", + " x\n", + " - betas_t[:batch_size]\n", + " * x_t\n", + " / sqrt_one_minus_alphas_cumprod_t[:batch_size]\n", " )\n", "\n", " if t_index == 0:\n", " return model_mean, cross_map\n", " else:\n", - " posterior_variance_t = extract(posterior_variance, t, x.shape, device=device)\n", + " posterior_variance_t = extract(\n", + " posterior_variance, t, x.shape, device=device\n", + " )\n", " noise = torch.randn_like(x)\n", " # Algorithm 2 line 4:\n", " return model_mean + torch.sqrt(posterior_variance_t) * noise, cross_map\n", @@ -1484,8 +1657,17 @@ " else:\n", " sampling_fn = partial(p_sample)\n", "\n", - " for i in tqdm(reversed(range(0, timesteps)), desc='sampling loop time step', total=timesteps):\n", - " img, cross_matrix = sampling_fn(model, x=img, t=torch.full((b,), i, device=device, dtype=torch.long), t_index=i)\n", + " for i in tqdm(\n", + " reversed(range(0, timesteps)),\n", + " desc='sampling loop time step',\n", + " total=timesteps,\n", + " ):\n", + " img, cross_matrix = sampling_fn(\n", + " model,\n", + " x=img,\n", + " t=torch.full((b,), i, device=device, dtype=torch.long),\n", + " t_index=i,\n", + " )\n", " imgs.append(img.cpu().numpy())\n", " cross_images_final.append(cross_matrix.cpu().numpy())\n", " if get_cross_map:\n", @@ -1617,7 +1799,9 @@ " \"\"\"\n", " steps = timesteps + 1\n", " x = torch.linspace(0, timesteps, steps)\n", - " alphas_cumprod = torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " alphas_cumprod = (\n", + " torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2\n", + " )\n", " alphas_cumprod = alphas_cumprod / alphas_cumprod[0]\n", " betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1])\n", " return torch.clip(betas, 0.0001, 0.9999)\n", @@ -1684,19 +1868,33 @@ "outputs": [], "source": [ "# forward diffusion\n", - "def q_sample(x_start, t, sqrt_alphas_cumprod, sqrt_one_minus_alphas_cumprod, noise=None, device=None):\n", + "def q_sample(\n", + " x_start,\n", + " t,\n", + " sqrt_alphas_cumprod,\n", + " sqrt_one_minus_alphas_cumprod,\n", + " noise=None,\n", + " device=None,\n", + "):\n", " if noise is None:\n", " noise = torch.randn_like(x_start)\n", "\n", - " sqrt_alphas_cumprod_t = extract(sqrt_alphas_cumprod, t, x_start.shape).to(device)\n", - " sqrt_one_minus_alphas_cumprod_t = extract(sqrt_one_minus_alphas_cumprod, t, x_start.shape).to(device)\n", + " sqrt_alphas_cumprod_t = extract(sqrt_alphas_cumprod, t, x_start.shape).to(\n", + " device\n", + " )\n", + " sqrt_one_minus_alphas_cumprod_t = extract(\n", + " sqrt_one_minus_alphas_cumprod, t, x_start.shape\n", + " ).to(device)\n", "\n", " # print (sqrt_alphas_cumprod_t , sqrt_one_minus_alphas_cumprod_t , t)\n", " # print (sqrt_alphas_cumprod_t.device, 'sqrt_alphas_cumprod_t')\n", " # print (x_start.device, 'x_start' )\n", " # print (sqrt_one_minus_alphas_cumprod_t.device , 'sqrt_one_minus_alphas_cumprod_t')\n", " # print (noise.device , 'noise.device')\n", - " return sqrt_alphas_cumprod_t * x_start + sqrt_one_minus_alphas_cumprod_t * noise" + " return (\n", + " sqrt_alphas_cumprod_t * x_start\n", + " + sqrt_one_minus_alphas_cumprod_t * noise\n", + " )" ] }, { @@ -1840,7 +2038,9 @@ " ) # this is the auto generated noise given t and Noise\n", " # print('max_q_sample', x_noisy.max(), 'mean_q_sample',x_noisy.mean() )\n", "\n", - " context_mask = torch.bernoulli(torch.zeros(classes.shape[0]) + (1 - p_uncond)).to(device)\n", + " context_mask = torch.bernoulli(\n", + " torch.zeros(classes.shape[0]) + (1 - p_uncond)\n", + " ).to(device)\n", " # print ('context mask', context_mask)\n", " # print ('classes', classes)\n", "\n", @@ -1849,7 +2049,9 @@ " # nn.Embedding needs type to be long, multiplying with mask changes type\n", " classes = classes.type(torch.long)\n", " # print ('final class',classes )\n", - " predicted_noise = denoise_model(x_noisy, t, classes) # this is the predicted noise given the model and step t\n", + " predicted_noise = denoise_model(\n", + " x_noisy, t, classes\n", + " ) # this is the predicted noise given the model and step t\n", " # print('max_predicted', x_noisy.max(), 'mean_predicted',x_noisy.mean() )\n", "\n", " # #predicted is ok (clipped)\n", @@ -1997,7 +2199,12 @@ " def __init__(self):\n", " super().__init__()\n", "\n", - " self.res = nn.Sequential(ResBlock(1, 2, 1), ResBlock(1, 2, 1), ResBlock(1, 2, 1), ResBlock(1, 2, 1))\n", + " self.res = nn.Sequential(\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " ResBlock(1, 2, 1),\n", + " )\n", "\n", " self.conv = nn.Sequential(\n", " ConvBlock_2d(in_channels=1, out_channels=2),\n", @@ -2016,7 +2223,9 @@ " self.fc = nn.Sequential(\n", " nn.Linear(800, 800),\n", " # nn.GELU(),\n", - " nn.BatchNorm1d(800), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", + " nn.BatchNorm1d(\n", + " 800\n", + " ), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", " # nn.Linear(400, 400),\n", " # nn.BatchNorm1d(400),\n", " # nn.GELU(),\n", @@ -2026,7 +2235,9 @@ " self.fc2 = nn.Sequential(\n", " nn.Linear(400, 800),\n", " # nn.GELU(),\n", - " nn.BatchNorm1d(800), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", + " nn.BatchNorm1d(\n", + " 800\n", + " ), # ALWAYS BATCHNORM THIS CHANGES A LOT THE RESULTS\n", " # nn.Linear(400, 400),\n", " # nn.GELU(),\n", " # nn.BatchNorm1d(400),\n", @@ -2165,7 +2376,11 @@ " generic one layer FC NN for embedding things \n", " '''\n", " self.input_dim = input_dim\n", - " layers = [nn.Linear(input_dim, emb_dim), nn.GELU(), nn.Linear(emb_dim, emb_dim)]\n", + " layers = [\n", + " nn.Linear(input_dim, emb_dim),\n", + " nn.GELU(),\n", + " nn.Linear(emb_dim, emb_dim),\n", + " ]\n", " self.model = nn.Sequential(*layers)\n", "\n", " def forward(self, x):\n", @@ -2258,7 +2473,8 @@ "\n", "def Upsample(dim, dim_out=None, kernel_upsample=3):\n", " return nn.Sequential(\n", - " nn.Upsample(scale_factor=2, mode='nearest'), nn.Conv2d(dim, default(dim_out, dim), kernel_upsample, padding=1)\n", + " nn.Upsample(scale_factor=2, mode='nearest'),\n", + " nn.Conv2d(dim, default(dim_out, dim), kernel_upsample, padding=1),\n", " )\n", "\n", "\n", @@ -2336,11 +2552,17 @@ "class ResnetBlock(nn.Module):\n", " def __init__(self, dim, dim_out, *, time_emb_dim=None, groups=8):\n", " super().__init__()\n", - " self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None\n", + " self.mlp = (\n", + " nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2))\n", + " if exists(time_emb_dim)\n", + " else None\n", + " )\n", "\n", " self.block1 = Block(dim, dim_out, groups=groups)\n", " self.block2 = Block(dim_out, dim_out, groups=groups)\n", - " self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " self.res_conv = (\n", + " nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity()\n", + " )\n", "\n", " def forward(self, x, time_emb=None):\n", " scale_shift = None\n", @@ -2357,8 +2579,22 @@ "\n", "\n", "class ResnetBlockClassConditioned(ResnetBlock):\n", - " def __init__(self, dim, dim_out, *, num_classes, class_embed_dim, time_emb_dim=None, groups=8):\n", - " super().__init__(dim=dim + class_embed_dim, dim_out=dim_out, time_emb_dim=time_emb_dim, groups=groups)\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " dim_out,\n", + " *,\n", + " num_classes,\n", + " class_embed_dim,\n", + " time_emb_dim=None,\n", + " groups=8,\n", + " ):\n", + " super().__init__(\n", + " dim=dim + class_embed_dim,\n", + " dim_out=dim_out,\n", + " time_emb_dim=time_emb_dim,\n", + " groups=groups,\n", + " )\n", " # print ('n_classes', num_classes, 'class_embed_dim', class_embed_dim)\n", " self.class_mlp = EmbedFC(num_classes, class_embed_dim)\n", "\n", @@ -2380,12 +2616,17 @@ " self.heads = heads\n", " hidden_dim = dim_head * heads\n", " self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False)\n", - " self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim))\n", + " self.to_out = nn.Sequential(\n", + " nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)\n", + " )\n", "\n", " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q = q.softmax(dim=-2)\n", " k = k.softmax(dim=-1)\n", @@ -2396,7 +2637,9 @@ " context = torch.einsum('b h d n, b h e n -> b h d e', k, v)\n", "\n", " out = torch.einsum('b h d e, b h d n -> b h e n', context, q)\n", - " out = rearrange(out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w)\n", + " out = rearrange(\n", + " out, 'b h c (x y) -> b (h c) x y', h=self.heads, x=h, y=w\n", + " )\n", " return self.to_out(out)\n", "\n", "\n", @@ -2412,7 +2655,10 @@ " def forward(self, x):\n", " b, c, h, w = x.shape\n", " qkv = self.to_qkv(x).chunk(3, dim=1)\n", - " q, k, v = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv)\n", + " q, k, v = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv,\n", + " )\n", "\n", " q, k = map(l2norm, (q, k))\n", "\n", @@ -2439,9 +2685,15 @@ " qkv_x = self.to_qkv(x).chunk(3, dim=1)\n", " qkv_y = self.to_qkv(y).chunk(3, dim=1)\n", "\n", - " q_x, k_x, v_x = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv_x)\n", + " q_x, k_x, v_x = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv_x,\n", + " )\n", "\n", - " q_y, k_y, v_y = map(lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads), qkv_y)\n", + " q_y, k_y, v_y = map(\n", + " lambda t: rearrange(t, 'b (h c) x y -> b h c (x y)', h=self.heads),\n", + " qkv_y,\n", + " )\n", "\n", " q, k = map(l2norm, (q_x, k_y))\n", "\n", @@ -2479,7 +2731,9 @@ "\n", "\n", "def log_snr_to_alpha_sigma(log_snr):\n", - " return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt(torch.sigmoid(-log_snr))\n", + " return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt(\n", + " torch.sigmoid(-log_snr)\n", + " )\n", "\n", "\n", "class Unet_lucas(nn.Module):\n", @@ -2534,7 +2788,10 @@ " fourier_dim = learned_sinusoidal_dim + 1\n", "\n", " self.time_mlp = nn.Sequential(\n", - " sinu_pos_emb, nn.Linear(fourier_dim, time_dim), nn.GELU(), nn.Linear(time_dim, time_dim)\n", + " sinu_pos_emb,\n", + " nn.Linear(fourier_dim, time_dim),\n", + " nn.GELU(),\n", + " nn.Linear(time_dim, time_dim),\n", " )\n", "\n", " if num_classes is not None:\n", @@ -2555,7 +2812,9 @@ " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " block_klass(dim_in, dim_in, time_emb_dim=time_dim),\n", " Residual(PreNorm(dim_in, LinearAttention(dim_in))),\n", - " Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", + " Downsample(dim_in, dim_out)\n", + " if not is_last\n", + " else nn.Conv2d(dim_in, dim_out, 3, padding=1),\n", " ]\n", " )\n", " )\n", @@ -2571,8 +2830,12 @@ " self.ups.append(\n", " nn.ModuleList(\n", " [\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", - " block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", + " block_klass(\n", + " dim_out + dim_in, dim_out, time_emb_dim=time_dim\n", + " ),\n", " Residual(PreNorm(dim_out, LinearAttention(dim_out))),\n", " Upsample(dim_out, dim_in, kernel_upsample)\n", " if not is_last\n", @@ -2589,7 +2852,12 @@ " # print('self.final_conv' , self.final_conv)\n", "\n", " self.cross_attn = EfficientAttention(\n", - " dim=200, dim_head=64, heads=1, memory_efficient=True, q_bucket_size=1024, k_bucket_size=2048\n", + " dim=200,\n", + " dim_head=64,\n", + " heads=1,\n", + " memory_efficient=True,\n", + " q_bucket_size=1024,\n", + " k_bucket_size=2048,\n", " )\n", "\n", " # mask = torch.ones(1, 65536).bool().cuda()\n", @@ -3393,11 +3661,15 @@ "\n", "final_comp_values_train = encode_data.train['motifs_per_components_dict']\n", "final_comp_values_test = encode_data.test['motifs_per_components_dict']\n", - "final_comp_values_shuffle = encode_data.train_shuffle['motifs_per_components_dict']\n", + "final_comp_values_shuffle = encode_data.train_shuffle[\n", + " 'motifs_per_components_dict'\n", + "]\n", "\n", "\n", "df = encode_data.train['dataset']\n", - "cell_components = df.sort_values('TAG')['TAG'].unique().tolist() # I need to add this function inside the dataloader" + "cell_components = (\n", + " df.sort_values('TAG')['TAG'].unique().tolist()\n", + ") # I need to add this function inside the dataloader" ] }, { @@ -3531,7 +3803,9 @@ " df_plot.columns = [x.split('_')[0] for x in cell_components]\n", " df_plot.index = df_plot.columns\n", " sns.heatmap(df_plot, cmap='Blues_r', annot=True, lw=0.1, vmax=1, vmin=0)\n", - " plt.title(f'Kl divergence \\n {x_label} sequences x {y_label} sequences \\n MOTIFS probabilities')\n", + " plt.title(\n", + " f'Kl divergence \\n {x_label} sequences x {y_label} sequences \\n MOTIFS probabilities'\n", + " )\n", " plt.xlabel(f'{x_label} Sequences \\n(motifs dist)')\n", " plt.ylabel(f'{y_label} \\n (motifs dist)')" ] @@ -5991,7 +6265,9 @@ } ], "source": [ - "heat_train_test = kl_comparison_between_dataset(final_comp_values_train, final_comp_values_test)" + "heat_train_test = kl_comparison_between_dataset(\n", + " final_comp_values_train, final_comp_values_test\n", + ")" ] }, { @@ -8564,7 +8840,9 @@ } ], "source": [ - "heat_train_shuffle = kl_comparison_between_dataset(final_comp_values_train, final_comp_values_shuffle)" + "heat_train_shuffle = kl_comparison_between_dataset(\n", + " final_comp_values_train, final_comp_values_shuffle\n", + ")" ] }, { @@ -8727,7 +9005,13 @@ " train_data_quantized = train_data_quantized.flatten(start_dim=2)\n", " X_train = train_data_quantized.cpu().detach().numpy()\n", " else:\n", - " X_train = np.array([one_hot_encode(x, nucleotides, 200) for x in tqdm_notebook(df['sequence']) if 'N' not in x])\n", + " X_train = np.array(\n", + " [\n", + " one_hot_encode(x, nucleotides, 200)\n", + " for x in tqdm_notebook(df['sequence'])\n", + " if 'N' not in x\n", + " ]\n", + " )\n", " X_train = np.array([x.T.tolist() for x in X_train])\n", " X_train[X_train == 0] = -1\n", "\n", @@ -8735,12 +9019,16 @@ " conditional_tag_to_numeric = {\n", " x: n + 1 for n, x in enumerate(df['TAG'].unique())\n", " } # check if this is changing order during different executions\n", - " conditional_numeric_to_tag = {n + 1: x for n, x in enumerate(df['TAG'].unique())} # check if this is changing order\n", + " conditional_numeric_to_tag = {\n", + " n + 1: x for n, x in enumerate(df['TAG'].unique())\n", + " } # check if this is changing order\n", "\n", " cell_types = sorted(list(conditional_numeric_to_tag.keys()))\n", "\n", " # cell_types = sorted(list(df['TAG'].unique()))\n", - " x_train_cell_type = torch.from_numpy(df[\"TAG\"].apply(lambda x: conditional_tag_to_numeric[x]).to_numpy())\n", + " x_train_cell_type = torch.from_numpy(\n", + " df[\"TAG\"].apply(lambda x: conditional_tag_to_numeric[x]).to_numpy()\n", + " )\n", " # x_train_cell_type = torch.from_numpy(df[\"TAG\"].to_numpy())\n", "\n", " return X_train, cell_types, x_train_cell_type\n", @@ -8816,7 +9104,9 @@ "from pathlib import Path\n", "\n", "\n", - "def save_model(milestone, step, accelerator, opt, model, ema_model, folder_path_string=''):\n", + "def save_model(\n", + " milestone, step, accelerator, opt, model, ema_model, folder_path_string=''\n", + "):\n", " results_folder = Path(folder_path_string)\n", "\n", " data = {\n", @@ -8833,11 +9123,20 @@ "def recreating_models(latent=False):\n", " if latent:\n", " model = Unet_lucas(\n", - " dim=50, channels=1, dim_mults=(1, 2), resnet_block_groups=2, num_classes=TOTAL_class_number, latent=True\n", + " dim=50,\n", + " channels=1,\n", + " dim_mults=(1, 2),\n", + " resnet_block_groups=2,\n", + " num_classes=TOTAL_class_number,\n", + " latent=True,\n", " )\n", " else:\n", " model = Unet_lucas(\n", - " dim=200, channels=1, dim_mults=(1, 2, 4), resnet_block_groups=4, num_classes=TOTAL_class_number\n", + " dim=200,\n", + " channels=1,\n", + " dim_mults=(1, 2, 4),\n", + " resnet_block_groups=4,\n", + " num_classes=TOTAL_class_number,\n", " )\n", "\n", " # ema = EMA(0.995)\n", @@ -9360,7 +9659,9 @@ }, "outputs": [], "source": [ - "model_loaded, step = load_model('model_48k_sequences_per_group_K562_hESCT0_HepG2_GM12878_12k.pt')" + "model_loaded, step = load_model(\n", + " 'model_48k_sequences_per_group_K562_hESCT0_HepG2_GM12878_12k.pt'\n", + ")" ] }, { @@ -9397,7 +9698,9 @@ "# sqrt_one_minus_alphas_cumprod = torch.sqrt(1. - alphas_cumprod)\n", "sqrt_one_minus_alphas_cumprod = torch.sqrt(1.0 - alphas_cumprod)\n", "# calculations for posterior q(x_{t-1} | x_t, x_0)\n", - "posterior_variance = betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + "posterior_variance = (\n", + " betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n", + ")\n", "print(torch.cuda.is_initialized())\n", "\n", "loaded_additional_variables = {\n", @@ -9416,7 +9719,12 @@ "\n", "class_in = torch.Tensor([2]).cuda()\n", "sample_test_out, attention_out = sample(\n", - " classes=class_in, batch_size=1, channels=1, cond_weight=1, get_cross_map=True, **loaded_additional_variables\n", + " classes=class_in,\n", + " batch_size=1,\n", + " channels=1,\n", + " cond_weight=1,\n", + " get_cross_map=True,\n", + " **loaded_additional_variables,\n", ")\n", "model_loaded.output_attention = False\n", "\n", @@ -9549,7 +9857,10 @@ "outputs": [], "source": [ "def kl_comparison_generated_sequences(\n", - " components_list, dict_targer_components, additional_variables=None, number_of_sequences_to_sample_by_component=500\n", + " components_list,\n", + " dict_targer_components,\n", + " additional_variables=None,\n", + " number_of_sequences_to_sample_by_component=500,\n", "):\n", " '''\n", " ex: components_list = [3, 8, 12, 15]\n", diff --git a/notebooks/filter_master.ipynb b/notebooks/filter_master.ipynb index f69129a1..0328a7fc 100644 --- a/notebooks/filter_master.ipynb +++ b/notebooks/filter_master.ipynb @@ -442,9 +442,13 @@ "\n", " def _test_data_structure(self):\n", " # Ensures all columns after the 11th are named cell names\n", - " assert all('_ENCL' in x for x in self.df.columns[11:]), '_ENCL not in all columns after 11th'\n", + " assert all(\n", + " '_ENCL' in x for x in self.df.columns[11:]\n", + " ), '_ENCL not in all columns after 11th'\n", "\n", - " def filter_exclusive_replicates(self, sort: bool = False, balance: bool = True):\n", + " def filter_exclusive_replicates(\n", + " self, sort: bool = False, balance: bool = True\n", + " ):\n", " \"\"\"Given a specific set of samples (one per cell type),\n", " capture the exclusive peaks of each samples (the ones matching just one sample for the whole set)\n", " and then filter the dataset to keep only these peaks.\n", @@ -458,7 +462,9 @@ " # Creating a new dataframe with only the columns corresponding to the cell types\n", " df_subset = self.df[subset_cols]\n", " # Creating a new column for each cell type with the exclusive peaks or 'NO_TAG' if not exclusive\n", - " df_subset['TAG'] = df_subset[self.cell_list].apply(lambda x: 'NO_TAG' if x.sum() != 1 else x.idxmax(), axis=1)\n", + " df_subset['TAG'] = df_subset[self.cell_list].apply(\n", + " lambda x: 'NO_TAG' if x.sum() != 1 else x.idxmax(), axis=1\n", + " )\n", "\n", " # Creating a new dataframe with only the rows with exclusive peaks\n", " new_df_list = []\n", @@ -466,10 +472,15 @@ " if k != 'NO_TAG':\n", " cell, replicate = '_'.join(k.split('_')[:-1]), k.split('_')[-1]\n", " v['additional_replicates_with_peak'] = (\n", - " df[df.filter(like=cell).columns].apply(lambda x: x.sum(), axis=1) - 1\n", + " df[df.filter(like=cell).columns].apply(\n", + " lambda x: x.sum(), axis=1\n", + " )\n", + " - 1\n", " )\n", " temp_df = self.df.filter(like=cell)\n", - " print(f'Cell type: {cell}, Replicate: {replicate}, Number of exclusive peaks: {v.shape[0]}')\n", + " print(\n", + " f'Cell type: {cell}, Replicate: {replicate}, Number of exclusive peaks: {v.shape[0]}'\n", + " )\n", " else:\n", " v['additional_replicates_with_peak'] = 0\n", " new_df_list.append(v)\n", @@ -483,7 +494,10 @@ " new_df = pd.concat(\n", " [\n", " x_v.sort_values(\n", - " by=['additional_replicates_with_peak', 'other_samples_with_peak_not_considering_reps'],\n", + " by=[\n", + " 'additional_replicates_with_peak',\n", + " 'other_samples_with_peak_not_considering_reps',\n", + " ],\n", " ascending=[False, True],\n", " )\n", " for x_k, x_v in new_df.groupby('TAG')\n", @@ -495,7 +509,11 @@ " if balance:\n", " lowest_peak_count = new_df.groupby('TAG').count()['sequence'].min()\n", " new_df = pd.concat(\n", - " [v_bal.head(lowest_peak_count) for k_bal, v_bal in new_df.groupby('TAG') if k_bal != 'NO_TAG']\n", + " [\n", + " v_bal.head(lowest_peak_count)\n", + " for k_bal, v_bal in new_df.groupby('TAG')\n", + " if k_bal != 'NO_TAG'\n", + " ]\n", " )\n", "\n", " return new_df" @@ -538,7 +556,13 @@ ], "source": [ "test_df = FilteringData(\n", - " df, cell_list=['K562_ENCLB843GMH', 'hESCT0_ENCLB449ZZZ', 'HepG2_ENCLB029COU', 'GM12878_ENCLB441ZZZ']\n", + " df,\n", + " cell_list=[\n", + " 'K562_ENCLB843GMH',\n", + " 'hESCT0_ENCLB449ZZZ',\n", + " 'HepG2_ENCLB029COU',\n", + " 'GM12878_ENCLB441ZZZ',\n", + " ],\n", ").filter_exclusive_replicates(sort=True, balance=True)" ] }, diff --git a/notebooks/master_dataset.ipynb b/notebooks/master_dataset.ipynb index 925fdbe8..5f56a822 100644 --- a/notebooks/master_dataset.ipynb +++ b/notebooks/master_dataset.ipynb @@ -81,7 +81,10 @@ "\n", " @classmethod\n", " def from_path(cls, path):\n", - " genome_dict = {record.id: str(record.seq).upper() for record in SeqIO.parse(path, \"fasta\")}\n", + " genome_dict = {\n", + " record.id: str(record.seq).upper()\n", + " for record in SeqIO.parse(path, \"fasta\")\n", + " }\n", " return cls(genome_dict, path)\n", "\n", " @classmethod\n", @@ -481,7 +484,9 @@ "!wget https://www.meuleman.org/DHS_Index_and_Vocabulary_metadata.tsv\n", "\n", "# Last row is empty\n", - "DHS_Index_and_Vocabulary_metadata = pd.read_table('./DHS_Index_and_Vocabulary_metadata.tsv').iloc[:-1]\n", + "DHS_Index_and_Vocabulary_metadata = pd.read_table(\n", + " './DHS_Index_and_Vocabulary_metadata.tsv'\n", + ").iloc[:-1]\n", "with pd.option_context('display.max_rows', 5, 'display.max_columns', None):\n", " display(DHS_Index_and_Vocabulary_metadata)" ] @@ -496,7 +501,9 @@ "# Used later to map component number within metadata dataframe and find proportion for given component\n", "\n", "# Downloading basis\n", - "basis_array = requests.get(\"https://zenodo.org/record/3838751/files/2018-06-08NC16_NNDSVD_Basis.npy.gz?download=1\")\n", + "basis_array = requests.get(\n", + " \"https://zenodo.org/record/3838751/files/2018-06-08NC16_NNDSVD_Basis.npy.gz?download=1\"\n", + ")\n", "\n", "with open('2018-06-08NC16_NNDSVD_Basis.npy.gz', 'wb') as f:\n", " f.write(basis_array.content)\n", @@ -513,7 +520,9 @@ "\n", "\n", "# Joining metadata with component presence matrix\n", - "DHS_Index_and_Vocabulary_metadata = pd.concat([DHS_Index_and_Vocabulary_metadata, nmf_loadings], axis=1)" + "DHS_Index_and_Vocabulary_metadata = pd.concat(\n", + " [DHS_Index_and_Vocabulary_metadata, nmf_loadings], axis=1\n", + ")" ] }, { @@ -758,7 +767,9 @@ "]\n", "\n", "DHS_Index_and_Vocabulary_metadata['component'] = (\n", - " DHS_Index_and_Vocabulary_metadata[COMPONENT_COLUMNS].idxmax(axis=1).apply(lambda x: int(x[1:]))\n", + " DHS_Index_and_Vocabulary_metadata[COMPONENT_COLUMNS]\n", + " .idxmax(axis=1)\n", + " .apply(lambda x: int(x[1:]))\n", ")" ] }, @@ -785,7 +796,9 @@ "outputs": [], "source": [ "# File loaded from drive available from below link\n", - "mixture_array = requests.get(\"https://zenodo.org/record/3838751/files/2018-06-08NC16_NNDSVD_Mixture.npy.gz?download=1\")\n", + "mixture_array = requests.get(\n", + " \"https://zenodo.org/record/3838751/files/2018-06-08NC16_NNDSVD_Mixture.npy.gz?download=1\"\n", + ")\n", "\n", "# Downloading mixture array that contains 3.5M x 16 matrix of peak presence/absence decomposed into 16 components\n", "with open('2018-06-08NC16_NNDSVD_Mixture.npy.gz', 'wb') as f:\n", @@ -797,7 +810,9 @@ "np.savetxt(\"2018-06-08NC16_NNDSVD_Mixture.csv\", mixture_array, delimiter=\",\")\n", "\n", "# Creating nmf_loadings matrix from csv and renaming columns\n", - "nmf_loadings = pd.read_csv('2018-06-08NC16_NNDSVD_Mixture.csv', header=None, names=COMPONENT_COLUMNS)" + "nmf_loadings = pd.read_csv(\n", + " '2018-06-08NC16_NNDSVD_Mixture.csv', header=None, names=COMPONENT_COLUMNS\n", + ")" ] }, { @@ -839,7 +854,9 @@ "!gunzip -d DHS_Index_and_Vocabulary_hg38_WM20190703.txt.gz\n", "\n", "# Loading sequence metadata\n", - "sequence_metadata = pd.read_table('./DHS_Index_and_Vocabulary_hg38_WM20190703.txt', sep='\\t')\n", + "sequence_metadata = pd.read_table(\n", + " './DHS_Index_and_Vocabulary_hg38_WM20190703.txt', sep='\\t'\n", + ")\n", "\n", "# Dropping component column that contains associated tissue rather than component number (We will use the component number from DHS_Index_and_Vocabulary_metadata)\n", "sequence_metadata = sequence_metadata.drop(columns=['component'], axis=1)\n", @@ -1138,11 +1155,19 @@ "\n", "\n", "# Recreating some of the columns from our original dataset\n", - "df['component'] = df[COMPONENT_COLUMNS].idxmax(axis=1).apply(lambda x: int(x[1:]))\n", - "df['proportion'] = df[COMPONENT_COLUMNS].max(axis=1) / df[COMPONENT_COLUMNS].sum(axis=1)\n", + "df['component'] = (\n", + " df[COMPONENT_COLUMNS].idxmax(axis=1).apply(lambda x: int(x[1:]))\n", + ")\n", + "df['proportion'] = df[COMPONENT_COLUMNS].max(axis=1) / df[\n", + " COMPONENT_COLUMNS\n", + "].sum(axis=1)\n", "df['total_signal'] = df['mean_signal'] * df['numsamples']\n", - "df['proportion'] = df[COMPONENT_COLUMNS].max(axis=1) / df[COMPONENT_COLUMNS].sum(axis=1)\n", - "df['dhs_id'] = df[['seqname', 'start', 'end', 'summit']].apply(lambda x: '_'.join(map(str, x)), axis=1)\n", + "df['proportion'] = df[COMPONENT_COLUMNS].max(axis=1) / df[\n", + " COMPONENT_COLUMNS\n", + "].sum(axis=1)\n", + "df['dhs_id'] = df[['seqname', 'start', 'end', 'summit']].apply(\n", + " lambda x: '_'.join(map(str, x)), axis=1\n", + ")\n", "df['DHS_width'] = df['end'] - df['start']\n", "\n", "# Creating sequence column\n", @@ -1195,7 +1220,8 @@ "\n", "# Collecting names of cells into a list with fromat celltype_encodeID\n", "celltype_encodeID = [\n", - " row['Biosample name'] + \"_\" + row['DCC Library ID'] for _, row in DHS_Index_and_Vocabulary_metadata.iterrows()\n", + " row['Biosample name'] + \"_\" + row['DCC Library ID']\n", + " for _, row in DHS_Index_and_Vocabulary_metadata.iterrows()\n", "]\n", "\n", "# Renaming columns using celltype_encodeID list\n", diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..d743bf05 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,7168 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "accelerate" +version = "0.24.1" +description = "Accelerate" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "accelerate-0.24.1-py3-none-any.whl", hash = "sha256:866dec394da60e8da964be212379d8cf6cc0d0e5e28a7c0d7e09507715d21c61"}, + {file = "accelerate-0.24.1.tar.gz", hash = "sha256:85ab2aeb4d06194b75113339f81b7d650523414a82c9e91b2912a655f53dfa8e"}, +] + +[package.dependencies] +huggingface-hub = "*" +numpy = ">=1.17" +packaging = ">=20.0" +psutil = "*" +pyyaml = "*" +torch = ">=1.10.0" + +[package.extras] +dev = ["bitsandbytes", "black (>=23.1,<24.0)", "datasets", "deepspeed", "evaluate", "hf-doc-builder (>=0.3.0)", "parameterized", "pytest", "pytest-subtests", "pytest-xdist", "rich", "ruff (>=0.0.241)", "scikit-learn", "scipy", "timm", "tqdm", "transformers", "urllib3 (<2.0.0)"] +quality = ["black (>=23.1,<24.0)", "hf-doc-builder (>=0.3.0)", "ruff (>=0.0.241)", "urllib3 (<2.0.0)"] +rich = ["rich"] +sagemaker = ["sagemaker"] +test-dev = ["bitsandbytes", "datasets", "deepspeed", "evaluate", "scikit-learn", "scipy", "timm", "tqdm", "transformers"] +test-prod = ["parameterized", "pytest", "pytest-subtests", "pytest-xdist"] +test-trackers = ["comet-ml", "tensorboard", "wandb"] +testing = ["bitsandbytes", "datasets", "deepspeed", "evaluate", "parameterized", "pytest", "pytest-subtests", "pytest-xdist", "scikit-learn", "scipy", "timm", "tqdm", "transformers"] + +[[package]] +name = "adlfs" +version = "2023.10.0" +description = "Access Azure Datalake Gen1 with fsspec and dask" +optional = false +python-versions = ">=3.8" +files = [ + {file = "adlfs-2023.10.0-py3-none-any.whl", hash = "sha256:dfdc8cc782bd78262435fb1bc2a8cfdbdd80342bb1b1ae9dfff968de912b0b09"}, + {file = "adlfs-2023.10.0.tar.gz", hash = "sha256:f5cf06c5b0074d17d43838d4c434791a98420d9e768b36a1a02c7b3930686543"}, +] + +[package.dependencies] +aiohttp = ">=3.7.0" +azure-core = ">=1.23.1,<2.0.0" +azure-datalake-store = ">=0.0.46,<0.1" +azure-identity = "*" +azure-storage-blob = ">=12.12.0" +fsspec = ">=2023.9.0" + +[package.extras] +docs = ["furo", "myst-parser", "numpydoc", "sphinx"] + +[[package]] +name = "aiobotocore" +version = "2.5.4" +description = "Async client for aws services using botocore and aiohttp" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiobotocore-2.5.4-py3-none-any.whl", hash = "sha256:4b32218728ca3d0be83835b604603a0cd6c329066e884bb78149334267f92440"}, + {file = "aiobotocore-2.5.4.tar.gz", hash = "sha256:60341f19eda77e41e1ab11eef171b5a98b5dbdb90804f5334b6f90e560e31fae"}, +] + +[package.dependencies] +aiohttp = ">=3.3.1,<4.0.0" +aioitertools = ">=0.5.1,<1.0.0" +botocore = ">=1.31.17,<1.31.18" +wrapt = ">=1.10.10,<2.0.0" + +[package.extras] +awscli = ["awscli (>=1.29.17,<1.29.18)"] +boto3 = ["boto3 (>=1.28.17,<1.28.18)"] + +[[package]] +name = "aiohttp" +version = "3.9.1" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, + {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, + {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, + {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, + {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, + {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, + {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, + {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, + {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, + {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, + {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, + {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aioitertools" +version = "0.11.0" +description = "itertools and builtins for AsyncIO and mixed iterables" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aioitertools-0.11.0-py3-none-any.whl", hash = "sha256:04b95e3dab25b449def24d7df809411c10e62aab0cbe31a50ca4e68748c43394"}, + {file = "aioitertools-0.11.0.tar.gz", hash = "sha256:42c68b8dd3a69c2bf7f2233bf7df4bb58b557bca5252ac02ed5187bbc67d6831"}, +] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "antlr4-python3-runtime" +version = "4.9.3" +description = "ANTLR 4.9.3 runtime for Python 3.7" +optional = false +python-versions = "*" +files = [ + {file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"}, +] + +[[package]] +name = "anyio" +version = "4.1.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f"}, + {file = "anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = "*" +files = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] + +[[package]] +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = "*" +files = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] + +[[package]] +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + +[[package]] +name = "arrow" +version = "1.3.0" +description = "Better dates & times for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "async-lru" +version = "2.0.4" +description = "Simple LRU cache for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, + {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "azure-core" +version = "1.29.5" +description = "Microsoft Azure Core Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-core-1.29.5.tar.gz", hash = "sha256:52983c89d394c6f881a121e5101c5fa67278ca3b1f339c8fb2ef39230c70e9ac"}, + {file = "azure_core-1.29.5-py3-none-any.whl", hash = "sha256:0fa04b7b1f7d44a4fb8468c4093deb2ea01fdf4faddbf802ed9205615f99d68c"}, +] + +[package.dependencies] +requests = ">=2.18.4" +six = ">=1.11.0" +typing-extensions = ">=4.6.0" + +[package.extras] +aio = ["aiohttp (>=3.0)"] + +[[package]] +name = "azure-datalake-store" +version = "0.0.53" +description = "Azure Data Lake Store Filesystem Client Library for Python" +optional = false +python-versions = "*" +files = [ + {file = "azure-datalake-store-0.0.53.tar.gz", hash = "sha256:05b6de62ee3f2a0a6e6941e6933b792b800c3e7f6ffce2fc324bc19875757393"}, + {file = "azure_datalake_store-0.0.53-py2.py3-none-any.whl", hash = "sha256:a30c902a6e360aa47d7f69f086b426729784e71c536f330b691647a51dc42b2b"}, +] + +[package.dependencies] +cffi = "*" +msal = ">=1.16.0,<2" +requests = ">=2.20.0" + +[[package]] +name = "azure-identity" +version = "1.15.0" +description = "Microsoft Azure Identity Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-identity-1.15.0.tar.gz", hash = "sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8"}, + {file = "azure_identity-1.15.0-py3-none-any.whl", hash = "sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912"}, +] + +[package.dependencies] +azure-core = ">=1.23.0,<2.0.0" +cryptography = ">=2.5" +msal = ">=1.24.0,<2.0.0" +msal-extensions = ">=0.3.0,<2.0.0" + +[[package]] +name = "azure-storage-blob" +version = "12.19.0" +description = "Microsoft Azure Blob Storage Client Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-storage-blob-12.19.0.tar.gz", hash = "sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897"}, + {file = "azure_storage_blob-12.19.0-py3-none-any.whl", hash = "sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b"}, +] + +[package.dependencies] +azure-core = ">=1.28.0,<2.0.0" +cryptography = ">=2.1.4" +isodate = ">=0.6.1" +typing-extensions = ">=4.3.0" + +[package.extras] +aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] + +[[package]] +name = "babel" +version = "2.13.1" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, + {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "beautifulsoup4" +version = "4.12.2" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "binaryornot" +version = "0.4.4" +description = "Ultra-lightweight pure Python package to check if a file is binary or text." +optional = false +python-versions = "*" +files = [ + {file = "binaryornot-0.4.4-py2.py3-none-any.whl", hash = "sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4"}, + {file = "binaryornot-0.4.4.tar.gz", hash = "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061"}, +] + +[package.dependencies] +chardet = ">=3.0.2" + +[[package]] +name = "biofluff" +version = "3.0.4" +description = "fluff : exploratory analysis and visualization of high-throughput sequencing data" +optional = false +python-versions = "*" +files = [ + {file = "biofluff-3.0.4.tar.gz", hash = "sha256:ef7b0a54103a830f197f21aa3d1ade8bdcddf613b437ea38c95260bb45324d6b"}, +] + +[package.dependencies] +HTSeq = "*" +matplotlib = "*" +numpy = "*" +palettable = "*" +pybedtools = "*" +pyBigWig = "*" +pysam = "*" +scikit-learn = "*" +scipy = "*" + +[[package]] +name = "biopython" +version = "1.81" +description = "Freely available tools for computational molecular biology." +optional = false +python-versions = ">=3.7" +files = [ + {file = "biopython-1.81-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef7c79b65b0b3f3c7dc59e20a7f8ae5758d8e852cb8b9cace590dc5617e348ba"}, + {file = "biopython-1.81-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ebfbce0d91796c7aef422ee9dffe8827e07e5abaa94545e006f1f20e965c80b"}, + {file = "biopython-1.81-cp310-cp310-win32.whl", hash = "sha256:919a2c583cabf9c96d2ae4e1245a6b0376932fb342aca302a0fc198b71ab3275"}, + {file = "biopython-1.81-cp310-cp310-win_amd64.whl", hash = "sha256:b37c0d24191e5c96ca02415a5188551980c83a0d518bbc4ffe3c9a5d1fe0ee81"}, + {file = "biopython-1.81-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7a168709694e10b338718c18d967edd5b56c237dc88642c22275796007a70000"}, + {file = "biopython-1.81-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a51d9c1d1b4b634447535da74a644fae59bc234fbbf9001e2dc6b6fbabb98019"}, + {file = "biopython-1.81-cp311-cp311-win32.whl", hash = "sha256:2f9cfaf16d55ab80d514e7aebe5710dabe4e4ff47ede851031202e33b3249da3"}, + {file = "biopython-1.81-cp311-cp311-win_amd64.whl", hash = "sha256:e41b55edcfd448630e77bf4de66a7235324a8a149621499891da6bd1d5085b9a"}, + {file = "biopython-1.81-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:175fcddc9f22a070aa6db54755d60c4b31090cc39f5f5f4b0a9a5d1ae3b45cd7"}, + {file = "biopython-1.81-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ec149487f3d1e0cf2b52b6071641c161ed545b0855ff51a71506152e14fc5bb"}, + {file = "biopython-1.81-cp312-cp312-win32.whl", hash = "sha256:daeab15274bbcc0455cbd378636e14f53bc7c5b1f383e77021d7222e72cc3418"}, + {file = "biopython-1.81-cp312-cp312-win_amd64.whl", hash = "sha256:22f5741aca91af0a76c0d5617e58e554fd3374bbd16e0c0ac1facf45b107313b"}, + {file = "biopython-1.81-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b36ba1bf6395c09a365c53530c9d71f3617763fa2c1d452b3d8948368c0f1de"}, + {file = "biopython-1.81-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c5c07123ff5f44c9e6b5369df854a38afd3c0c50ef58498a0ae8f7eb799f3e8"}, + {file = "biopython-1.81-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97cbdbed01b2512471f36c74b91658d1dfbdcbf39bc038f6ce5a41c3e60a8fc6"}, + {file = "biopython-1.81-cp37-cp37m-win32.whl", hash = "sha256:35506e39822c52d11cf09a3951e82375ca1bb9303960b4286acf02c9a6f6c4cc"}, + {file = "biopython-1.81-cp37-cp37m-win_amd64.whl", hash = "sha256:793c42a376cd63f62f8a088ce39b7dc6b5c55e4e9031d887c434de1595bfa4b8"}, + {file = "biopython-1.81-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:11d673698b3d0d6589292ea951fb62cb24ea27d273eca0d08dbbd956690f97f5"}, + {file = "biopython-1.81-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:655df416936662c0c8a06a549cb25e1560e1fea5067d850f34fb714b8a3fae6c"}, + {file = "biopython-1.81-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:762c6c43a8486b5fcd07f136a3217b87d24755618b9ea9da1f17124ff44c2ad6"}, + {file = "biopython-1.81-cp38-cp38-win32.whl", hash = "sha256:ee51bb1cd7decffd24da6b76d5e01b7e2fd818ab85cf0c180226cbb5793a3abd"}, + {file = "biopython-1.81-cp38-cp38-win_amd64.whl", hash = "sha256:ccd729249fd5f586dd4c2a3507c2ea2456825d7e615e97c07c409c850eaf4594"}, + {file = "biopython-1.81-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9ba33244f0eff830beaa7240065bdb5095d96fded6599b76bbb9ddab45cd2bbd"}, + {file = "biopython-1.81-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bb0c690c7368f255ed45236bf0f5464b476b8c083c8f634533921af78278261"}, + {file = "biopython-1.81-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65b93b513ce9dd7b2ce058720eadf42cd03f312db3409356efeb93123d1320aa"}, + {file = "biopython-1.81-cp39-cp39-win32.whl", hash = "sha256:811796f8d222aa3869a50e31e54ce62b69106b47cd8bb06934867c0d843297b5"}, + {file = "biopython-1.81-cp39-cp39-win_amd64.whl", hash = "sha256:b09efcb4733c8770f25eab5fe555a96a08f5ab9e1bc36939e08ebf2ffbf3e0f1"}, + {file = "biopython-1.81.tar.gz", hash = "sha256:2cf38112b6d8415ad39d6a611988cd11fb5f33eb09346666a87263beba9614e0"}, +] + +[package.dependencies] +numpy = "*" + +[[package]] +name = "biothings-client" +version = "0.3.1" +description = "Python Client for BioThings API services." +optional = false +python-versions = ">=2.7" +files = [ + {file = "biothings_client-0.3.1-py2.py3-none-any.whl", hash = "sha256:c08437f652d9282da785e098288ef7cf3aa2a79f5d90c480eadfce96b846013e"}, + {file = "biothings_client-0.3.1.tar.gz", hash = "sha256:c972bf2e02b6f9cc78f7f2fbc5ef02cc56fe4f8a2adcb8801ec902f4ab7011e6"}, +] + +[package.dependencies] +requests = ">=2.3.0" + +[package.extras] +caching = ["requests-cache (>=0.4.13)"] +dataframe = ["pandas (>=0.18.0)"] +jsonld = ["PyLD (>=0.7.2)"] + +[[package]] +name = "bitarray" +version = "2.8.4" +description = "efficient arrays of booleans -- C extension" +optional = false +python-versions = "*" +files = [ + {file = "bitarray-2.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:69be498a39ac29ea4f8e4dce36e64342d4fe813eeffa7bd9ead4ce18309fb903"}, + {file = "bitarray-2.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6acbfa5b85717c91bfa1bc1702c1cc6a3d1500f832f2c3c040f0d4668c75b2b5"}, + {file = "bitarray-2.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:305f1aa2a3aedd033ab2ab1fc930c5f0a987bf993f3ecc83a224db237a95cd18"}, + {file = "bitarray-2.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8f7a1a4793c4dec2dc7c6c1fac5370123a24c6dabc7312fbce8766a0d5c40c8"}, + {file = "bitarray-2.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87acfa228524b8564ba5d5a431ff6c708721ff7755f718992184bb9a81365f0e"}, + {file = "bitarray-2.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45782033c17ea2d1c9967128bc69aee1417210b104fbda35d4da77d907afb3c5"}, + {file = "bitarray-2.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f343ea39f61f899bac145aac260dd456a98df59e4258ad8d395892b6b4759b20"}, + {file = "bitarray-2.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52c8501aa71a353dbe8dd6440bbd3449d8ffcae843bff139f87b9a84149315ce"}, + {file = "bitarray-2.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bb23c806f175a88db60b3894bca4956f6d557ed0571b2fcc7818c1c83f000759"}, + {file = "bitarray-2.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:efbe1d873f916fa31235b8acec6a686e7b7e47e3e95490cbe8b257dabaa14d3b"}, + {file = "bitarray-2.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0690c5483e31d7e4d7f26b045baf7f9d0aa30e91fcf1c5117095652d149b1a96"}, + {file = "bitarray-2.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2cc0c09edd3fa893303513d3fb9a0d335f20b19b3f0276fe752bf88ffd5522c0"}, + {file = "bitarray-2.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:045b813c3567818673f2dcd9c0b41a63214c5f5a9c230ede76ac211fbcf0185a"}, + {file = "bitarray-2.8.4-cp310-cp310-win32.whl", hash = "sha256:ddfd3632e5f04619d780f60e85a5fe082a8eebce33aefb08b6783779ff04d017"}, + {file = "bitarray-2.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:c19c27056cb34b352c064ac0d58ac7f7da29cd225cb3140b8ff69455e6858966"}, + {file = "bitarray-2.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6d83fda6e4d83742d60f522ce3bd61ce0d4690c19b73dc79ee8da2a48f2ef065"}, + {file = "bitarray-2.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de1f491b329e424f7e7b2874624a604c163ea05341f709cd47c1a46f4930ca97"}, + {file = "bitarray-2.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c8e25c5530bd6bb5a96ad11de2dc16bebbbec8b4e2c1600bf1ce78cbf36c96e6"}, + {file = "bitarray-2.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:602b429cedf3631cb3b36a7e08f484972b2e13bb0fc1e240b71935aef32bb9d9"}, + {file = "bitarray-2.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:102b988fdbb0b221bdb71dac7d96475bfa47a767ee0fc1014a9ad5be46ebd20b"}, + {file = "bitarray-2.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:665f88d204d4a8fd0fe63fea66c1a420b331887e72a2b10778d97d22182e8474"}, + {file = "bitarray-2.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965e52d079e8746abe6d15e8b1da7b65d9f1ccb5bceb1aa410072f09a1cdb3fd"}, + {file = "bitarray-2.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b619c691c94f2770373a91144bbbe42056a993fa95aba67f87a7625f71384040"}, + {file = "bitarray-2.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c27b57205a6946de4dedb169d42f63d8f61e51a70e3096ffce18680c8407616c"}, + {file = "bitarray-2.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2f8484bea6cededfacc2921fd5715e3132467d1df50f941635b91c9920dfd66f"}, + {file = "bitarray-2.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7b705a7c3bb5c7a86a2e4bf5d4607e22194d821e050b5f5605a69ded99dc5c3"}, + {file = "bitarray-2.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:afb4e6edfeb6797165a25e5ea221992043c46b3475f7d4d96e2c25271dfea4d8"}, + {file = "bitarray-2.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2bb731fe68c07d5a3aeb9df798008e41999c933ed81786c7688b190f5082e079"}, + {file = "bitarray-2.8.4-cp311-cp311-win32.whl", hash = "sha256:22a0d11bf53553e2de20eb1dbf507bba32a6c28a2b84232ff5f28289ba9ec659"}, + {file = "bitarray-2.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8ace24f1b028dee7168556e0a83c1f608abe63f4b82dc05b26ad43495d8717bf"}, + {file = "bitarray-2.8.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1009f6a4117d50c2e9e4a2d6d5a03d0cb030f649dd410fbbef4d3f3a9aca40c9"}, + {file = "bitarray-2.8.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9a53bf859e4b54ad06dda20aa42a16dd36b03f11626beacc41b570f25cfcb471"}, + {file = "bitarray-2.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4991d8c2b4ccccd1ea9115dae5dc51b60b562bc627784e53c31aae5699a55932"}, + {file = "bitarray-2.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7832ecd8adc5ef9f0af7c376ea4ab8ba66077da45e1d00da9f93366cbfb70dfe"}, + {file = "bitarray-2.8.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:644fc022c5b3973472e39930f43c113865f9ba1b4e918b52f5921d709af0e9e3"}, + {file = "bitarray-2.8.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99cd2f5dd46e886a63bc08dbb44ae63b16eeff94d714be55ce41ff86604bbc97"}, + {file = "bitarray-2.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e938552f8fd83ecdde6f43d7f91854fa2604cc7c7e2929fed78c3779c843ba6"}, + {file = "bitarray-2.8.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6910610f1f54e7b9e5aa5311acff812e5ae2ca5f6c041a40c9201f768c4a6893"}, + {file = "bitarray-2.8.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8becb576748328b2fdf9740a973e62f41de83702a92761e0ff441b65ebe25fce"}, + {file = "bitarray-2.8.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8b1c84803dd57f8b81397dcc24eca73bc44f1c5da36b440f358372b50c7bb7da"}, + {file = "bitarray-2.8.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:2e15d244cb7dab42cb1f31933da3b66d6405b1db969917460c094ba8441ea5a0"}, + {file = "bitarray-2.8.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:55d52dd5af45dfb09e9b107749b4fcad4a3774d5429345faa47ab459ae478de0"}, + {file = "bitarray-2.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ef2dbbb2924c5048bea586ddf204607c8e91fbe70b95a7dce1d5b5403f2ef06f"}, + {file = "bitarray-2.8.4-cp312-cp312-win32.whl", hash = "sha256:7ecd20dfef83d3180d9f851476e5e3d9a76973e24432721f7cc8cac52a646d3a"}, + {file = "bitarray-2.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:e7ac4f3cc1cdbe5b31bce988260ac12ae0e273ec6108bf35de66384599fabc25"}, + {file = "bitarray-2.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ebdaea27ada24e53d673c46a8a4bba8e1904fa8589512bd3146382d877ab4be9"}, + {file = "bitarray-2.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf6e39e296422588c39eaa8bea17c3d5af9335c7273691615e6aa262f3a1c469"}, + {file = "bitarray-2.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bbd70d2a0be93349ee76652992164d89dab54e55cb05d302d4375851b60d173"}, + {file = "bitarray-2.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed4e54d4425c9f5eb766ff8ee4b992fe0011575a7da5daa8bf898675c684808c"}, + {file = "bitarray-2.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f18e53a47619ef092cb28ac1f1f2b457ad68177369a5c02a1da930f5f0a43d78"}, + {file = "bitarray-2.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a1232d989dc37b2b0d760ed3cd040f848a7578417d0bda24e544e73f5d6b02a"}, + {file = "bitarray-2.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3df0ac492e4e795e26710ee20cfd25c7bfd81c3866490078fcc7d97ccc74b01f"}, + {file = "bitarray-2.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:06d9ad81d56547b2b256e70a819eb4eefa4e7e21595b06b4102666a71eb4b961"}, + {file = "bitarray-2.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc9808782e3147fb71b44129f3dfabfbe839bc35954f9f7f3dd8dd4c149413c"}, + {file = "bitarray-2.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:56cc56c382167360a94e36a54a3a14320ecbe9e8ca672574c739703136d0b5e0"}, + {file = "bitarray-2.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:de8b30056fe36947d50597680aa016f5a9a59f2621b496ca0fe8ad037ee63f76"}, + {file = "bitarray-2.8.4-cp36-cp36m-win32.whl", hash = "sha256:d80a356e6123b0910171ab7b2ce4d058146170748f11b7ec3c005da54bfbc059"}, + {file = "bitarray-2.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:00bb1de6d3c68e18fb16c6c7390e68bc656a60dfde4004d5649c792b8871a531"}, + {file = "bitarray-2.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6db1bc132b3ee46bb79a1d86bfadce71d581943156004e481045ce903f1979db"}, + {file = "bitarray-2.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27b824ae449cd38c8c77349ae7d27dc11662c9c40806729943dd175c91334a4c"}, + {file = "bitarray-2.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7947134bc4b28a00c07616e07013680628954bc93daa4cbab99a6d7aea402809"}, + {file = "bitarray-2.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c392d44cc072255e88efc4335be67ebdfb88ae4b3757bd573c49fae35e23470"}, + {file = "bitarray-2.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c787dbacf218cde121611706e2bb6a64e3fb566a828bab7d608c6c96cfec8a4"}, + {file = "bitarray-2.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c59e589f162dffb8bea47fb8108961891df5d54d3a1c59660f211a53084438cd"}, + {file = "bitarray-2.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d1efcf28571909ea4c12184d51bd953370fd28ec227b1ded7cb88563c17d42a"}, + {file = "bitarray-2.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:afe8bedc9893a358a29430e98164a902816fd7787f92b476193a0de7aae4b985"}, + {file = "bitarray-2.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:64e6e316452d8018d11954698f9898a2ee69fe2f1093333c2650a4b91246c675"}, + {file = "bitarray-2.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:02f8002eac8ba7796e05690f317056c6ddd40ac88f73d1dd3405c5d4df15a61d"}, + {file = "bitarray-2.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d26fdf9d680eb9738e8b58ab7946cb35ed8b076dac823189f2614d732565e89a"}, + {file = "bitarray-2.8.4-cp37-cp37m-win32.whl", hash = "sha256:9e52a186b68b5995c3756f243e286ea701470273ba938b9f83a0ef055edeb95e"}, + {file = "bitarray-2.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:3baf866f2674241b02ab9547acaae2f705e7e9ca5a620484e8b09a25fc625def"}, + {file = "bitarray-2.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2c9d06891a565bdc572dc8a2d76275fc3d51e63ddff51c3e03a9a95b600ca673"}, + {file = "bitarray-2.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:242f9ddfed9e7b70edb2888056af1710dfbf3767342d6ef1c110fe1d3b346ad6"}, + {file = "bitarray-2.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9245d3181346f1f608b56cb45fb21c080558426dac566074a2c4145daa411588"}, + {file = "bitarray-2.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eacc7d5ad4b120131da4c6cecd8ded5e545dab3654de592cf8901a7acfd58c18"}, + {file = "bitarray-2.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:228e20443c841585454e95e17cf66610c9c53c3a1c66f3a9bc90a1ce31218b9d"}, + {file = "bitarray-2.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3449769a8f6c6a39d3d8c8760d265ff754107715c8ad3d66e90961ea463e6284"}, + {file = "bitarray-2.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25e882da07d5735ee089cec12dc75d55b90434e607aae5522515f23132612091"}, + {file = "bitarray-2.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80eca1ef96a3b379026bcf531d7cbfbfad767da37ba4e90bc529e6695f88ba09"}, + {file = "bitarray-2.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6c8ebc5b2cf89b4dd2d407312eeec4ed1f999863a6d29d1d1834696f6db08ac8"}, + {file = "bitarray-2.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9849d06f254fffd45d35ba2b39694dbc839f6c5cca8990a509b3058588f23d77"}, + {file = "bitarray-2.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2ff712ba8259599135d24fcc555dbca2dc32ff5d18e8efb8d47456d2467e630f"}, + {file = "bitarray-2.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e3fc442c3ae66c4f9a0d35f2c2a0e36f6a9c125b94c3db1ee8fa5af4dca51a57"}, + {file = "bitarray-2.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:275f4deacd4cee28311cee611cea64bf5ec197da4a95c23b2af00ecc1dee6e97"}, + {file = "bitarray-2.8.4-cp38-cp38-win32.whl", hash = "sha256:b349092caf10b6b0585e0ff0ed17e5fc8a88c3bdacb37b38778de4a1ae568827"}, + {file = "bitarray-2.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:ed37c74e33c67f805e046c0e4d1af2007d4087d01748fa47a56ee3501c1bb597"}, + {file = "bitarray-2.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3be310edafc506a4f7c405d7d2d97274ab3ec3f2cbd2793705ccdb692559a009"}, + {file = "bitarray-2.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c334ab66657dc0250281d1deaaa0243bb2072da0939fc89cbce4513a79b7ebdc"}, + {file = "bitarray-2.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c6ab07a20fe548f4830bc3d795d4e8193616379abb8715fcf0391ca599cf4f4b"}, + {file = "bitarray-2.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3994b319e6f18040652b769ceb09e28b5546bffa29138019b788bafa8577478f"}, + {file = "bitarray-2.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803dc8ca520db0db6e14bc61c52666a2344b5ff45c9c4524967f1920779ef64f"}, + {file = "bitarray-2.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a2f6cd2861922bf951451cd19c0f658d93ac313424ec705c59768626eb4b1f0"}, + {file = "bitarray-2.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98df463206a76ef02d8662490bafc6ca2d6dec10cfff3dda90798c0e4f330151"}, + {file = "bitarray-2.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da7086efb3d388078874b2fafd5042a5c6464e08ecb68bf3813c3b9d54d236b4"}, + {file = "bitarray-2.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:343876f1a38d9b2857f18f7d606be49b11344dc3b9c46f072760dec364a35a54"}, + {file = "bitarray-2.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0488c1eaf83c993fc672115176cc6c27070d5abd5e673499ed46eeb87de52169"}, + {file = "bitarray-2.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:aadc4b8ac31ef4ac31f13ab416d5891ff1886b0c3115e88b4be53d3ce08e235f"}, + {file = "bitarray-2.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:bb05962feb649bbb5589eab89b9fa57679ce8285e647195bee76c8c0821fcf22"}, + {file = "bitarray-2.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:67accba68ceb3cb57bae9ed86ddd075043e373c4af6243e20c8f00153c5f374a"}, + {file = "bitarray-2.8.4-cp39-cp39-win32.whl", hash = "sha256:0adf959b63e314ea74c7d67ca6732c2a840769a7bcfe779d52d777ac6877d671"}, + {file = "bitarray-2.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:d0fc43f5f5ae113ad60b502ec1efee42218c21a1e00dd1bd7c82d00b25cf72ad"}, + {file = "bitarray-2.8.4-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:63e1bb1c98d6d3004e44cb1958393c631e79c640877086a7f403c223c18687cb"}, + {file = "bitarray-2.8.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea66a30fb0b9d3109db950b490f6aa211fb15162f097b20141b1aeb5057a670"}, + {file = "bitarray-2.8.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8aaea18c41dacf2bf1a6f81960c196f85e3991c9387c3d9bff97976be2c195a4"}, + {file = "bitarray-2.8.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad2b129e43998292f89f138dfda32ec1b9ba31e68b35a61948bc10bf53e94444"}, + {file = "bitarray-2.8.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6647e03def035371ce0ce073912d6594ed197f799aa34641f0acce343a8f7cca"}, + {file = "bitarray-2.8.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31416540af1ad2994a33cf7f2e98e1e8f50722e410afc54ae99bdd6039a4f87"}, + {file = "bitarray-2.8.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c936d73deca901b600fb73c9aaf3630dd358f5ce35c5d5e1ea804b33796ecb5"}, + {file = "bitarray-2.8.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc936c0cea105c7773e6b8cc58ed2a3b168a3da9bbdec7466cee9725198607a9"}, + {file = "bitarray-2.8.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9978b0968acbc2d9160758e9f63af0fbda62f121ae596ad56cb06a8afd3d5aea"}, + {file = "bitarray-2.8.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dcae87cbf2058a33286ce50e627bdd1a4875579103f6b933546ffb1a34ab8c2e"}, + {file = "bitarray-2.8.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b5d1d4300706891d197cf21b39f41b3c8047d081676d82eb8dcfeb8d0073c52b"}, + {file = "bitarray-2.8.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7095d8f486435ffcc42014aebba27c05b2a3b38d5d3630ebe77734db7653b272"}, + {file = "bitarray-2.8.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bcd5c171ffedb7544ad9e5b77827cd3a3ccb0dd924ef703802743b8abcf303"}, + {file = "bitarray-2.8.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6328f73d4e623d4fff966cbe623f3e2b3378bdbfb6937ec492aba3fd9927862f"}, + {file = "bitarray-2.8.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2d20ee30ea7640df29013021d130bee932d701f01b2f1cbbc1ba14f3954a6b1f"}, + {file = "bitarray-2.8.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:91a570f291a4d7ea4473f37b5e1ce377d771a8567a7a6b5f7b482023bd81b3ef"}, + {file = "bitarray-2.8.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18561539cf8ca5d1970b2b78a44a1b12ae21a18183664a080525c081a44b3997"}, + {file = "bitarray-2.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1abea439874652c3ad6ca6a6e893cfe4f2e2c149294dbe2a5c1cf7e2e1ef200"}, + {file = "bitarray-2.8.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a0f347672c5a8b67c36937872c75baec81e351f2209dc691608d3f76fa9e44e"}, + {file = "bitarray-2.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cdd58e73a2e1bff848067a65afb77a7dcd1884050c22d18a0a7af5cf2428a3ee"}, + {file = "bitarray-2.8.4.tar.gz", hash = "sha256:2c0ba71445ee0932e510f1b0248f53b7a52926f1f78c93b868fcbe6536e61a1d"}, +] + +[[package]] +name = "bitstring" +version = "4.1.4" +description = "Simple construction, analysis and modification of binary data." +optional = false +python-versions = ">=3.7" +files = [ + {file = "bitstring-4.1.4-py3-none-any.whl", hash = "sha256:da46c4d6f8f3fb75a85566fdd33d5083ba8b8f268ed76f34eefe5a00da426192"}, + {file = "bitstring-4.1.4.tar.gz", hash = "sha256:94f3f1c45383ebe8fd4a359424ffeb75c2f290760ae8fcac421b44f89ac85213"}, +] + +[package.dependencies] +bitarray = ">=2.8.0,<3.0.0" + +[[package]] +name = "black" +version = "23.11.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, + {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, + {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, + {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, + {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, + {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, + {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, + {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, + {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, + {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, + {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, + {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, + {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, + {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, + {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, + {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, + {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, + {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, +] + +[package.dependencies] +click = ">=8.0.0" +ipython = {version = ">=7.8.0", optional = true, markers = "extra == \"jupyter\""} +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tokenize-rt = {version = ">=3.2.0", optional = true, markers = "extra == \"jupyter\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "bleach" +version = "6.1.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.3)"] + +[[package]] +name = "botocore" +version = "1.31.17" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">= 3.7" +files = [ + {file = "botocore-1.31.17-py3-none-any.whl", hash = "sha256:6ac34a1d34aa3750e78b77b8596617e2bab938964694d651939dba2cbde2c12b"}, + {file = "botocore-1.31.17.tar.gz", hash = "sha256:396459065dba4339eb4da4ec8b4e6599728eb89b7caaceea199e26f7d824a41c"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.16.26)"] + +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + +[[package]] +name = "certifi" +version = "2023.11.17" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "cloudpickle" +version = "3.0.0" +description = "Pickler class to extend the standard pickle.Pickler functionality" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, + {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "comm" +version = "0.2.0" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.8" +files = [ + {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, + {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "configparser" +version = "5.3.0" +description = "Updated configparser from stdlib for earlier Pythons." +optional = false +python-versions = ">=3.7" +files = [ + {file = "configparser-5.3.0-py3-none-any.whl", hash = "sha256:b065779fd93c6bf4cee42202fa4351b4bb842e96a3fb469440e484517a49b9fa"}, + {file = "configparser-5.3.0.tar.gz", hash = "sha256:8be267824b541c09b08db124917f48ab525a6c3e837011f3130781a224c57090"}, +] + +[package.extras] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "types-backports"] + +[[package]] +name = "contourpy" +version = "1.2.0" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0274c1cb63625972c0c007ab14dd9ba9e199c36ae1a231ce45d725cbcbfd10a8"}, + {file = "contourpy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab459a1cbbf18e8698399c595a01f6dcc5c138220ca3ea9e7e6126232d102bb4"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fdd887f17c2f4572ce548461e4f96396681212d858cae7bd52ba3310bc6f00f"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d16edfc3fc09968e09ddffada434b3bf989bf4911535e04eada58469873e28e"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c203f617abc0dde5792beb586f827021069fb6d403d7f4d5c2b543d87edceb9"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b69303ceb2e4d4f146bf82fda78891ef7bcd80c41bf16bfca3d0d7eb545448aa"}, + {file = "contourpy-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:884c3f9d42d7218304bc74a8a7693d172685c84bd7ab2bab1ee567b769696df9"}, + {file = "contourpy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a1b1208102be6e851f20066bf0e7a96b7d48a07c9b0cfe6d0d4545c2f6cadab"}, + {file = "contourpy-1.2.0-cp310-cp310-win32.whl", hash = "sha256:34b9071c040d6fe45d9826cbbe3727d20d83f1b6110d219b83eb0e2a01d79488"}, + {file = "contourpy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:bd2f1ae63998da104f16a8b788f685e55d65760cd1929518fd94cd682bf03e41"}, + {file = "contourpy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd10c26b4eadae44783c45ad6655220426f971c61d9b239e6f7b16d5cdaaa727"}, + {file = "contourpy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c6b28956b7b232ae801406e529ad7b350d3f09a4fde958dfdf3c0520cdde0dd"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebeac59e9e1eb4b84940d076d9f9a6cec0064e241818bcb6e32124cc5c3e377a"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:139d8d2e1c1dd52d78682f505e980f592ba53c9f73bd6be102233e358b401063"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e9dc350fb4c58adc64df3e0703ab076f60aac06e67d48b3848c23647ae4310e"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18fc2b4ed8e4a8fe849d18dce4bd3c7ea637758c6343a1f2bae1e9bd4c9f4686"}, + {file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:16a7380e943a6d52472096cb7ad5264ecee36ed60888e2a3d3814991a0107286"}, + {file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d8faf05be5ec8e02a4d86f616fc2a0322ff4a4ce26c0f09d9f7fb5330a35c95"}, + {file = "contourpy-1.2.0-cp311-cp311-win32.whl", hash = "sha256:67b7f17679fa62ec82b7e3e611c43a016b887bd64fb933b3ae8638583006c6d6"}, + {file = "contourpy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:99ad97258985328b4f207a5e777c1b44a83bfe7cf1f87b99f9c11d4ee477c4de"}, + {file = "contourpy-1.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:575bcaf957a25d1194903a10bc9f316c136c19f24e0985a2b9b5608bdf5dbfe0"}, + {file = "contourpy-1.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9e6c93b5b2dbcedad20a2f18ec22cae47da0d705d454308063421a3b290d9ea4"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:464b423bc2a009088f19bdf1f232299e8b6917963e2b7e1d277da5041f33a779"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68ce4788b7d93e47f84edd3f1f95acdcd142ae60bc0e5493bfd120683d2d4316"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7d1f8871998cdff5d2ff6a087e5e1780139abe2838e85b0b46b7ae6cc25399"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e739530c662a8d6d42c37c2ed52a6f0932c2d4a3e8c1f90692ad0ce1274abe0"}, + {file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:247b9d16535acaa766d03037d8e8fb20866d054d3c7fbf6fd1f993f11fc60ca0"}, + {file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:461e3ae84cd90b30f8d533f07d87c00379644205b1d33a5ea03381edc4b69431"}, + {file = "contourpy-1.2.0-cp312-cp312-win32.whl", hash = "sha256:1c2559d6cffc94890b0529ea7eeecc20d6fadc1539273aa27faf503eb4656d8f"}, + {file = "contourpy-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:491b1917afdd8638a05b611a56d46587d5a632cabead889a5440f7c638bc6ed9"}, + {file = "contourpy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5fd1810973a375ca0e097dee059c407913ba35723b111df75671a1976efa04bc"}, + {file = "contourpy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:999c71939aad2780f003979b25ac5b8f2df651dac7b38fb8ce6c46ba5abe6ae9"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7caf9b241464c404613512d5594a6e2ff0cc9cb5615c9475cc1d9b514218ae8"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:266270c6f6608340f6c9836a0fb9b367be61dde0c9a9a18d5ece97774105ff3e"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbd50d0a0539ae2e96e537553aff6d02c10ed165ef40c65b0e27e744a0f10af8"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11f8d2554e52f459918f7b8e6aa20ec2a3bce35ce95c1f0ef4ba36fbda306df5"}, + {file = "contourpy-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ce96dd400486e80ac7d195b2d800b03e3e6a787e2a522bfb83755938465a819e"}, + {file = "contourpy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6d3364b999c62f539cd403f8123ae426da946e142312a514162adb2addd8d808"}, + {file = "contourpy-1.2.0-cp39-cp39-win32.whl", hash = "sha256:1c88dfb9e0c77612febebb6ac69d44a8d81e3dc60f993215425b62c1161353f4"}, + {file = "contourpy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:78e6ad33cf2e2e80c5dfaaa0beec3d61face0fb650557100ee36db808bfa6843"}, + {file = "contourpy-1.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be16975d94c320432657ad2402f6760990cb640c161ae6da1363051805fa8108"}, + {file = "contourpy-1.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b95a225d4948b26a28c08307a60ac00fb8671b14f2047fc5476613252a129776"}, + {file = "contourpy-1.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d7e03c0f9a4f90dc18d4e77e9ef4ec7b7bbb437f7f675be8e530d65ae6ef956"}, + {file = "contourpy-1.2.0.tar.gz", hash = "sha256:171f311cb758de7da13fc53af221ae47a5877be5a0843a9fe150818c51ed276a"}, +] + +[package.dependencies] +numpy = ">=1.20,<2.0" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.6.1)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cookiecutter" +version = "2.5.0" +description = "A command-line utility that creates projects from project templates, e.g. creating a Python package project from a Python package project template." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cookiecutter-2.5.0-py3-none-any.whl", hash = "sha256:8aa2f12ed11bc05628651e9dc4353a10571dd9908aaaaeec959a2b9ea465a5d2"}, + {file = "cookiecutter-2.5.0.tar.gz", hash = "sha256:e61e9034748e3f41b8bd2c11f00d030784b48711c4d5c42363c50989a65331ec"}, +] + +[package.dependencies] +arrow = "*" +binaryornot = ">=0.4.4" +click = ">=7.0,<9.0.0" +Jinja2 = ">=2.7,<4.0.0" +python-slugify = ">=4.0.0" +pyyaml = ">=5.3.1" +requests = ">=2.23.0" +rich = "*" + +[[package]] +name = "coverage" +version = "7.3.2" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "croniter" +version = "2.0.1" +description = "croniter provides iteration for datetime object with cron like format" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "croniter-2.0.1-py2.py3-none-any.whl", hash = "sha256:4cb064ce2d8f695b3b078be36ff50115cf8ac306c10a7e8653ee2a5b534673d7"}, + {file = "croniter-2.0.1.tar.gz", hash = "sha256:d199b2ec3ea5e82988d1f72022433c5f9302b3b3ea9e6bfd6a1518f6ea5e700a"}, +] + +[package.dependencies] +python-dateutil = "*" +pytz = ">2021.1" + +[[package]] +name = "cryptography" +version = "41.0.7" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "dataclasses-json" +version = "0.5.9" +description = "Easily serialize dataclasses to and from JSON" +optional = false +python-versions = ">=3.6" +files = [ + {file = "dataclasses-json-0.5.9.tar.gz", hash = "sha256:e9ac87b73edc0141aafbce02b44e93553c3123ad574958f0fe52a534b6707e8e"}, + {file = "dataclasses_json-0.5.9-py3-none-any.whl", hash = "sha256:1280542631df1c375b7bc92e5b86d39e06c44760d7e3571a537b3b8acabf2f0c"}, +] + +[package.dependencies] +marshmallow = ">=3.3.0,<4.0.0" +marshmallow-enum = ">=1.5.1,<2.0.0" +typing-inspect = ">=0.4.0" + +[package.extras] +dev = ["flake8", "hypothesis", "ipython", "mypy (>=0.710)", "portray", "pytest (>=7.2.0)", "setuptools", "simplejson", "twine", "types-dataclasses", "wheel"] + +[[package]] +name = "debugpy" +version = "1.8.0" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, + {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, + {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, + {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, + {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, + {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, + {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, + {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, + {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, + {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, + {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, + {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, + {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, + {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, + {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, + {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, + {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, + {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +optional = false +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + +[[package]] +name = "diskcache" +version = "5.6.3" +description = "Disk Cache -- Disk and file backed persistent cache." +optional = false +python-versions = ">=3" +files = [ + {file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"}, + {file = "diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc"}, +] + +[[package]] +name = "docformatter" +version = "1.7.5" +description = "Formats docstrings to follow PEP 257" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "docformatter-1.7.5-py3-none-any.whl", hash = "sha256:a24f5545ed1f30af00d106f5d85dc2fce4959295687c24c8f39f5263afaf9186"}, + {file = "docformatter-1.7.5.tar.gz", hash = "sha256:ffed3da0daffa2e77f80ccba4f0e50bfa2755e1c10e130102571c890a61b246e"}, +] + +[package.dependencies] +charset_normalizer = ">=3.0.0,<4.0.0" +tomli = {version = ">=2.0.0,<3.0.0", optional = true, markers = "python_version < \"3.11\" and extra == \"tomli\""} +untokenize = ">=0.1.1,<0.2.0" + +[package.extras] +tomli = ["tomli (>=2.0.0,<3.0.0)"] + +[[package]] +name = "docker" +version = "6.1.3" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.7" +files = [ + {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, + {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" +websocket-client = ">=0.32.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] + +[[package]] +name = "docker-pycreds" +version = "0.4.0" +description = "Python bindings for the docker credentials store API" +optional = false +python-versions = "*" +files = [ + {file = "docker-pycreds-0.4.0.tar.gz", hash = "sha256:6ce3270bcaf404cc4c3e27e4b6c70d3521deae82fb508767870fdbf772d584d4"}, + {file = "docker_pycreds-0.4.0-py2.py3-none-any.whl", hash = "sha256:7266112468627868005106ec19cd0d722702d2b7d5912a28e19b826c3d37af49"}, +] + +[package.dependencies] +six = ">=1.4.0" + +[[package]] +name = "docstring-parser" +version = "0.15" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "docstring_parser-0.15-py3-none-any.whl", hash = "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9"}, + {file = "docstring_parser-0.15.tar.gz", hash = "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682"}, +] + +[[package]] +name = "einops" +version = "0.7.0" +description = "A new flavour of deep learning operations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "einops-0.7.0-py3-none-any.whl", hash = "sha256:0f3096f26b914f465f6ff3c66f5478f9a5e380bb367ffc6493a68143fbbf1fd1"}, + {file = "einops-0.7.0.tar.gz", hash = "sha256:b2b04ad6081a3b227080c9bf5e3ace7160357ff03043cd66cc5b2319eb7031d1"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + +[[package]] +name = "fastjsonschema" +version = "2.19.0" +description = "Fastest Python implementation of JSON schema" +optional = false +python-versions = "*" +files = [ + {file = "fastjsonschema-2.19.0-py3-none-any.whl", hash = "sha256:b9fd1a2dd6971dbc7fee280a95bd199ae0dd9ce22beb91cc75e9c1c528a5170e"}, + {file = "fastjsonschema-2.19.0.tar.gz", hash = "sha256:e25df6647e1bc4a26070b700897b07b542ec898dd4f1f6ea013e7f6a88417225"}, +] + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "feather-format" +version = "0.4.1" +description = "Simple wrapper library to the Apache Arrow-based Feather File Format" +optional = false +python-versions = "*" +files = [ + {file = "feather-format-0.4.1.tar.gz", hash = "sha256:45f67e3745d394d4f160ca6d636bbfd4f8b68d01199dc1649b6e487d3e878903"}, +] + +[package.dependencies] +pyarrow = ">=0.4.0" + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "flyteidl" +version = "1.10.6" +description = "IDL for Flyte Platform" +optional = false +python-versions = "*" +files = [ + {file = "flyteidl-1.10.6-py3-none-any.whl", hash = "sha256:ec2c4e9726bac302d6f76aed3be662ff713eea31391294f8bd617abd6d134098"}, + {file = "flyteidl-1.10.6.tar.gz", hash = "sha256:00cdcb38b62e92d0180d00e6680baeff00819770f7d23ad82a2d9d6d60ab3619"}, +] + +[package.dependencies] +googleapis-common-protos = "*" +protobuf = ">=4.21.1,<5.0.0" +protoc-gen-swagger = "*" + +[[package]] +name = "flytekit" +version = "0.0.0+develop" +description = "Flyte SDK for Python" +optional = false +python-versions = ">=3.8,<3.12" +files = [] +develop = false + +[package.dependencies] +adlfs = "*" +click = ">=6.6,<9.0" +cloudpickle = ">=2.0.0" +cookiecutter = ">=1.7.3" +croniter = ">=0.3.20,<4.0.0" +dataclasses-json = ">=0.5.2,<0.5.12" +diskcache = ">=5.2.1" +docker = ">=4.0.0,<7.0.0" +docstring-parser = ">=0.9.0" +flyteidl = ">=1.10.0" +fsspec = ">=2023.3.0,<=2023.9.2" +gcsfs = "*" +googleapis-common-protos = ">=1.57" +grpcio = "*" +grpcio-status = "*" +importlib-metadata = "*" +joblib = "*" +jsonpickle = "*" +keyring = ">=18.0.1" +kubernetes = ">=12.0.1" +marshmallow-enum = "*" +marshmallow-jsonschema = ">=0.12.0" +mashumaro = ">=3.9.1" +numpy = "*" +pandas = ">=1.0.0,<3.0.0" +protobuf = "<4.25.0" +pyarrow = ">=4.0.0" +python-json-logger = ">=2.0.0" +pytimeparse = ">=1.1.8,<2.0.0" +pyyaml = "<5.4.0 || >5.4.0,<5.4.1 || >5.4.1,<6.0.0 || >6.0.0" +requests = ">=2.18.4,<3.0.0" +rich = "*" +rich_click = "*" +s3fs = ">=0.6.0" +statsd = ">=3.0.0,<4.0.0" +typing_extensions = "*" +urllib3 = ">=1.22,<2.0.0" + +[package.source] +type = "git" +url = "https://github.com/cameronraysmith/flytekit.git" +reference = "3928-pandas-2" +resolved_reference = "b1e64ba20f1ddd83723e455d2bf893d8f3fc84b7" + +[[package]] +name = "fonttools" +version = "4.46.0" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.46.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d4e69e2c7f93b695d2e6f18f709d501d945f65c1d237dafaabdd23cd935a5276"}, + {file = "fonttools-4.46.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:25852f0c63df0af022f698464a4a80f7d1d5bd974bcd22f995f6b4ad198e32dd"}, + {file = "fonttools-4.46.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adab73618d0a328b203a0e242b3eba60a2b5662d9cb2bd16ed9c52af8a7d86af"}, + {file = "fonttools-4.46.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf923a4a556ab4cc4c52f69a4a2db624cf5a2cf360394368b40c5152fe3321e"}, + {file = "fonttools-4.46.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:87c214197712cc14fd2a4621efce2a9c501a77041232b789568149a8a3161517"}, + {file = "fonttools-4.46.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:156ae342a1ed1fe38e180de471e98fbf5b2b6ae280fa3323138569c4ca215844"}, + {file = "fonttools-4.46.0-cp310-cp310-win32.whl", hash = "sha256:c506e3d3a9e898caee4dc094f34b49c5566870d5a2d1ca2125f0a9f35ecc2205"}, + {file = "fonttools-4.46.0-cp310-cp310-win_amd64.whl", hash = "sha256:f8bc3973ed58893c4107993e0a7ae34901cb572b5e798249cbef35d30801ffd4"}, + {file = "fonttools-4.46.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:982f69855ac258260f51048d9e0c53c5f19881138cc7ca06deb38dc4b97404b6"}, + {file = "fonttools-4.46.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c23c59d321d62588620f2255cf951270bf637d88070f38ed8b5e5558775b86c"}, + {file = "fonttools-4.46.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0e94244ec24a940ecfbe5b31c975c8a575d5ed2d80f9a280ce3b21fa5dc9c34"}, + {file = "fonttools-4.46.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a9f9cdd7ef63d1b8ac90db335762451452426b3207abd79f60da510cea62da5"}, + {file = "fonttools-4.46.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ca9eceebe70035b057ce549e2054cad73e95cac3fe91a9d827253d1c14618204"}, + {file = "fonttools-4.46.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8be6adfa4e15977075278dd0a0bae74dec59be7b969b5ceed93fb86af52aa5be"}, + {file = "fonttools-4.46.0-cp311-cp311-win32.whl", hash = "sha256:7b5636f5706d49f13b6d610fe54ee662336cdf56b5a6f6683c0b803e23d826d2"}, + {file = "fonttools-4.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:49ea0983e55fd7586a809787cd4644a7ae471e53ab8ddc016f9093b400e32646"}, + {file = "fonttools-4.46.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7b460720ce81773da1a3e7cc964c48e1e11942b280619582a897fa0117b56a62"}, + {file = "fonttools-4.46.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8bee9f4fc8c99824a424ae45c789ee8c67cb84f8e747afa7f83b7d3cef439c3b"}, + {file = "fonttools-4.46.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3d7b96aba96e05e8c911ce2dfc5acc6a178b8f44f6aa69371ab91aa587563da"}, + {file = "fonttools-4.46.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e6aeb5c340416d11a3209d75c48d13e72deea9e1517837dd1522c1fd1f17c11"}, + {file = "fonttools-4.46.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c779f8701deedf41908f287aeb775b8a6f59875ad1002b98ac6034ae4ddc1b7b"}, + {file = "fonttools-4.46.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce199227ce7921eaafdd4f96536f16b232d6b580ce74ce337de544bf06cb2752"}, + {file = "fonttools-4.46.0-cp312-cp312-win32.whl", hash = "sha256:1c9937c4dd1061afd22643389445fabda858af5e805860ec3082a4bc07c7a720"}, + {file = "fonttools-4.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:a9fa52ef8fd14d7eb3d813e1451e7ace3e1eebfa9b7237d3f81fee8f3de6a114"}, + {file = "fonttools-4.46.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c94564b1f3b5dd87e73577610d85115b1936edcc596deaf84a31bbe70e17456b"}, + {file = "fonttools-4.46.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4a50a1dfad7f7ba5ca3f99cc73bf5cdac67ceade8e4b355a877521f20ad1b63"}, + {file = "fonttools-4.46.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89c2c520f9492844ecd6316d20c6c7a157b5c0cb73a1411b3db28ee304f30122"}, + {file = "fonttools-4.46.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5b7905fd68eacb7cc56a13139da5c312c45baae6950dd00b02563c54508a041"}, + {file = "fonttools-4.46.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8485cc468288e213f31afdaf1fdda3c79010f542559fbba936a54f4644df2570"}, + {file = "fonttools-4.46.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:87c3299da7da55394fb324349db0ede38114a46aafd0e7dfcabfecd28cdd94c3"}, + {file = "fonttools-4.46.0-cp38-cp38-win32.whl", hash = "sha256:f5f1423a504ccc329efb5aa79738de83d38c072be5308788dde6bd419969d7f5"}, + {file = "fonttools-4.46.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d4a4ebcc76e30898ff3296ea786491c70e183f738319ae2629e0d44f17ece42"}, + {file = "fonttools-4.46.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9a0e422ab79e5cb2b47913be6a4b5fd20c4c7ac34a24f3691a4e099e965e0b8"}, + {file = "fonttools-4.46.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:13ac0cba2fc63fa4b232f2a7971f35f35c6eaf10bd1271fa96d4ce6253a8acfd"}, + {file = "fonttools-4.46.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:795150d5edc595e1a2cfb3d65e8f4f3d027704fc2579f8990d381bef6b188eb6"}, + {file = "fonttools-4.46.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d00fc63131dcac6b25f50a5a129758438317e54e3ce5587163f7058de4b0e933"}, + {file = "fonttools-4.46.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3033b55f401a622de2630b3982234d97219d89b058607b87927eccb0f922313c"}, + {file = "fonttools-4.46.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e26e7fb908ae4f622813e7cb32cd2db6c24e3122bb3b98f25e832a2fe0e7e228"}, + {file = "fonttools-4.46.0-cp39-cp39-win32.whl", hash = "sha256:2d0eba685938c603f2f648dfc0aadbf8c6a4fe1c7ca608c2970a6ef39e00f254"}, + {file = "fonttools-4.46.0-cp39-cp39-win_amd64.whl", hash = "sha256:5200b01f463d97cc2b7ff8a1e3584151f4413e98cb8419da5f17d1dbb84cc214"}, + {file = "fonttools-4.46.0-py3-none-any.whl", hash = "sha256:5b627ed142398ea9202bd752c04311592558964d1a765fb2f78dc441a05633f4"}, + {file = "fonttools-4.46.0.tar.gz", hash = "sha256:2ae45716c27a41807d58a9f3f59983bdc8c0a46cb259e4450ab7e196253a9853"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "scipy"] +lxml = ["lxml (>=4.0,<5)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +files = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] + +[[package]] +name = "frozenlist" +version = "1.4.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, + {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, + {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, + {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, + {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, + {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, + {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, + {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, + {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, + {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, +] + +[[package]] +name = "fsspec" +version = "2023.9.2" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2023.9.2-py3-none-any.whl", hash = "sha256:603dbc52c75b84da501b9b2ec8c11e1f61c25984c4a0dda1f129ef391fbfc9b4"}, + {file = "fsspec-2023.9.2.tar.gz", hash = "sha256:80bfb8c70cc27b2178cc62a935ecf242fc6e8c3fb801f9c571fc01b1e715ba7d"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "gcsfs" +version = "2023.9.2" +description = "Convenient Filesystem interface over GCS" +optional = false +python-versions = ">=3.8" +files = [ + {file = "gcsfs-2023.9.2-py2.py3-none-any.whl", hash = "sha256:b3e61d07b0ecf3e04627b0cc0df30ee728bc49e31d42de180815601041e62c1b"}, + {file = "gcsfs-2023.9.2.tar.gz", hash = "sha256:7ca430816fa99b3df428506b557f08dbafab563a048393747507d0809fa4576b"}, +] + +[package.dependencies] +aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" +decorator = ">4.1.2" +fsspec = "2023.9.2" +google-auth = ">=1.2" +google-auth-oauthlib = "*" +google-cloud-storage = "*" +requests = "*" + +[package.extras] +crc = ["crcmod"] +gcsfuse = ["fusepy"] + +[[package]] +name = "genomepy" +version = "0.16.1" +description = "Genes and genomes at your fingertips" +optional = false +python-versions = ">=3.7" +files = [ + {file = "genomepy-0.16.1-py3-none-any.whl", hash = "sha256:820d46bce1503f66aa82e795a9a33e53a89e4d4f3f79b5c105ae452164f47635"}, + {file = "genomepy-0.16.1.tar.gz", hash = "sha256:22e81827acfdb4d9e6adda1f8e4cfafbb97f1c1788348e86b930c9daa51088c5"}, +] + +[package.dependencies] +appdirs = "*" +biopython = ">=1.73" +click = "*" +colorama = "*" +diskcache = "*" +filelock = ">=3.5" +loguru = "*" +mygene = "*" +mysql-connector-python = "*" +norns = ">=0.1.6" +numpy = "*" +pandas = "*" +pyfaidx = ">=0.7.2.1" +requests = "*" +tqdm = ">=4.51" + +[[package]] +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." +optional = false +python-versions = "*" +files = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["flake8", "markdown", "twine", "wheel"] + +[[package]] +name = "gimmemotifs" +version = "0+untagged.1966.gac6ed7b" +description = "GimmeMotifs is a motif prediction pipeline." +optional = false +python-versions = "*" +files = [] +develop = false + +[package.dependencies] +biofluff = ">=3.0.4" +configparser = "<6.0.0" +diskcache = "*" +feather-format = "*" +genomepy = ">=0.13.0" +iteround = "*" +jinja2 = "*" +logomaker = "*" +loguru = "*" +matplotlib = ">=3.3" +numpy = ">=1.18" +pandas = ">=1.3.0" +pybedtools = ">=0.9.0" +pysam = ">=0.16" +qnorm = ">=0.8.1" +scikit-learn = ">=0.23.2" +scipy = ">=1.5" +seaborn = ">=0.10.1" +setuptools = ">=0.7" +statsmodels = "*" +tqdm = ">=4.46.1" +xdg = "*" +xxhash = "*" + +[package.extras] +ete3 = ["ete3"] +notebook = ["ipywidgets"] +xgboost = ["xgboost (>=1.0.2)"] + +[package.source] +type = "git" +url = "https://github.com/cameronraysmith/gimmemotifs.git" +reference = "bound-configparser-6" +resolved_reference = "72ae59bfa4967807871858b02275bc267bbce6b3" + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.40" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.40-py3-none-any.whl", hash = "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"}, + {file = "GitPython-3.1.40.tar.gz", hash = "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] + +[[package]] +name = "google-api-core" +version = "2.14.0" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.14.0.tar.gz", hash = "sha256:5368a4502b793d9bbf812a5912e13e4e69f9bd87f6efb508460c43f5bbd1ce41"}, + {file = "google_api_core-2.14.0-py3-none-any.whl", hash = "sha256:de2fb50ed34d47ddbb2bd2dcf680ee8fead46279f4ed6b16de362aca23a18952"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-auth" +version = "2.25.1" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.25.1.tar.gz", hash = "sha256:d5d66b8f4f6e3273740d7bb73ddefa6c2d1ff691704bd407d51c6b5800e7c97b"}, + {file = "google_auth-2.25.1-py2.py3-none-any.whl", hash = "sha256:dfd7b44935d498e106c08883b2dac0ad36d8aa10402a6412e9a1c9d74b4773f1"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-auth-oauthlib" +version = "1.1.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "google-auth-oauthlib-1.1.0.tar.gz", hash = "sha256:83ea8c3b0881e453790baff4448e8a6112ac8778d1de9da0b68010b843937afb"}, + {file = "google_auth_oauthlib-1.1.0-py2.py3-none-any.whl", hash = "sha256:089c6e587d36f4803ac7e0720c045c6a8b1fd1790088b8424975b90d0ee61c12"}, +] + +[package.dependencies] +google-auth = ">=2.15.0" +requests-oauthlib = ">=0.7.0" + +[package.extras] +tool = ["click (>=6.0.0)"] + +[[package]] +name = "google-cloud-core" +version = "2.3.3" +description = "Google Cloud API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"}, + {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"}, +] + +[package.dependencies] +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.25.0,<3.0dev" + +[package.extras] +grpc = ["grpcio (>=1.38.0,<2.0dev)"] + +[[package]] +name = "google-cloud-storage" +version = "2.13.0" +description = "Google Cloud Storage API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-storage-2.13.0.tar.gz", hash = "sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7"}, + {file = "google_cloud_storage-2.13.0-py2.py3-none-any.whl", hash = "sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=2.23.3,<3.0dev" +google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" +google-resumable-media = ">=2.6.0" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +protobuf = ["protobuf (<5.0.0dev)"] + +[[package]] +name = "google-crc32c" +version = "1.5.0" +description = "A python wrapper of the C library 'Google CRC32C'" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, +] + +[package.extras] +testing = ["pytest"] + +[[package]] +name = "google-resumable-media" +version = "2.6.0" +description = "Utilities for Google Media Downloads and Resumable Uploads" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "google-resumable-media-2.6.0.tar.gz", hash = "sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7"}, + {file = "google_resumable_media-2.6.0-py2.py3-none-any.whl", hash = "sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b"}, +] + +[package.dependencies] +google-crc32c = ">=1.0,<2.0dev" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] +requests = ["requests (>=2.18.0,<3.0.0dev)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.61.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.61.0.tar.gz", hash = "sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b"}, + {file = "googleapis_common_protos-1.61.0-py2.py3-none-any.whl", hash = "sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "griffe" +version = "0.38.1" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "griffe-0.38.1-py3-none-any.whl", hash = "sha256:334c79d3b5964ade65c05dfcaf53518c576dedd387aaba5c9fd71212f34f1483"}, + {file = "griffe-0.38.1.tar.gz", hash = "sha256:bd68d7da7f3d87bc57eb9962b250db123efd9bbcc06c11c1a91b6e583b2a9361"}, +] + +[package.dependencies] +colorama = ">=0.4" + +[[package]] +name = "grpcio" +version = "1.59.3" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.59.3-cp310-cp310-linux_armv7l.whl", hash = "sha256:aca028a6c7806e5b61e5f9f4232432c52856f7fcb98e330b20b6bc95d657bdcc"}, + {file = "grpcio-1.59.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:19ad26a7967f7999c8960d2b9fe382dae74c55b0c508c613a6c2ba21cddf2354"}, + {file = "grpcio-1.59.3-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:72b71dad2a3d1650e69ad42a5c4edbc59ee017f08c32c95694172bc501def23c"}, + {file = "grpcio-1.59.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0f0a11d82d0253656cc42e04b6a149521e02e755fe2e4edd21123de610fd1d4"}, + {file = "grpcio-1.59.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60cddafb70f9a2c81ba251b53b4007e07cca7389e704f86266e22c4bffd8bf1d"}, + {file = "grpcio-1.59.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6c75a1fa0e677c1d2b6d4196ad395a5c381dfb8385f07ed034ef667cdcdbcc25"}, + {file = "grpcio-1.59.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e1d8e01438d5964a11167eec1edb5f85ed8e475648f36c834ed5db4ffba24ac8"}, + {file = "grpcio-1.59.3-cp310-cp310-win32.whl", hash = "sha256:c4b0076f0bf29ee62335b055a9599f52000b7941f577daa001c7ef961a1fbeab"}, + {file = "grpcio-1.59.3-cp310-cp310-win_amd64.whl", hash = "sha256:b1f00a3e6e0c3dccccffb5579fc76ebfe4eb40405ba308505b41ef92f747746a"}, + {file = "grpcio-1.59.3-cp311-cp311-linux_armv7l.whl", hash = "sha256:3996aaa21231451161dc29df6a43fcaa8b332042b6150482c119a678d007dd86"}, + {file = "grpcio-1.59.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:cb4e9cbd9b7388fcb06412da9f188c7803742d06d6f626304eb838d1707ec7e3"}, + {file = "grpcio-1.59.3-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8022ca303d6c694a0d7acfb2b472add920217618d3a99eb4b14edc7c6a7e8fcf"}, + {file = "grpcio-1.59.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b36683fad5664283755a7f4e2e804e243633634e93cd798a46247b8e54e3cb0d"}, + {file = "grpcio-1.59.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8239b853226e4824e769517e1b5232e7c4dda3815b200534500338960fcc6118"}, + {file = "grpcio-1.59.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0511af8653fbda489ff11d542a08505d56023e63cafbda60e6e00d4e0bae86ea"}, + {file = "grpcio-1.59.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e78dc982bda74cef2ddfce1c91d29b96864c4c680c634e279ed204d51e227473"}, + {file = "grpcio-1.59.3-cp311-cp311-win32.whl", hash = "sha256:6a5c3a96405966c023e139c3bcccb2c7c776a6f256ac6d70f8558c9041bdccc3"}, + {file = "grpcio-1.59.3-cp311-cp311-win_amd64.whl", hash = "sha256:ed26826ee423b11477297b187371cdf4fa1eca874eb1156422ef3c9a60590dd9"}, + {file = "grpcio-1.59.3-cp312-cp312-linux_armv7l.whl", hash = "sha256:45dddc5cb5227d30fa43652d8872dc87f086d81ab4b500be99413bad0ae198d7"}, + {file = "grpcio-1.59.3-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:1736496d74682e53dd0907fd515f2694d8e6a96c9a359b4080b2504bf2b2d91b"}, + {file = "grpcio-1.59.3-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ddbd1a16138e52e66229047624de364f88a948a4d92ba20e4e25ad7d22eef025"}, + {file = "grpcio-1.59.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcfa56f8d031ffda902c258c84c4b88707f3a4be4827b4e3ab8ec7c24676320d"}, + {file = "grpcio-1.59.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2eb8f0c7c0c62f7a547ad7a91ba627a5aa32a5ae8d930783f7ee61680d7eb8d"}, + {file = "grpcio-1.59.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8d993399cc65e3a34f8fd48dd9ad7a376734564b822e0160dd18b3d00c1a33f9"}, + {file = "grpcio-1.59.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0bd141f4f41907eb90bda74d969c3cb21c1c62779419782a5b3f5e4b5835718"}, + {file = "grpcio-1.59.3-cp312-cp312-win32.whl", hash = "sha256:33b8fd65d4e97efa62baec6171ce51f9cf68f3a8ba9f866f4abc9d62b5c97b79"}, + {file = "grpcio-1.59.3-cp312-cp312-win_amd64.whl", hash = "sha256:0e735ed002f50d4f3cb9ecfe8ac82403f5d842d274c92d99db64cfc998515e07"}, + {file = "grpcio-1.59.3-cp37-cp37m-linux_armv7l.whl", hash = "sha256:ea40ce4404e7cca0724c91a7404da410f0144148fdd58402a5942971e3469b94"}, + {file = "grpcio-1.59.3-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83113bcc393477b6f7342b9f48e8a054330c895205517edc66789ceea0796b53"}, + {file = "grpcio-1.59.3-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:73afbac602b8f1212a50088193601f869b5073efa9855b3e51aaaec97848fc8a"}, + {file = "grpcio-1.59.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:575d61de1950b0b0699917b686b1ca108690702fcc2df127b8c9c9320f93e069"}, + {file = "grpcio-1.59.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd76057b5c9a4d68814610ef9226925f94c1231bbe533fdf96f6181f7d2ff9e"}, + {file = "grpcio-1.59.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:95d6fd804c81efe4879e38bfd84d2b26e339a0a9b797e7615e884ef4686eb47b"}, + {file = "grpcio-1.59.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0d42048b8a3286ea4134faddf1f9a59cf98192b94aaa10d910a25613c5eb5bfb"}, + {file = "grpcio-1.59.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4619fea15c64bcdd9d447cdbdde40e3d5f1da3a2e8ae84103d94a9c1df210d7e"}, + {file = "grpcio-1.59.3-cp38-cp38-linux_armv7l.whl", hash = "sha256:95b5506e70284ac03b2005dd9ffcb6708c9ae660669376f0192a710687a22556"}, + {file = "grpcio-1.59.3-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:9e17660947660ccfce56c7869032910c179a5328a77b73b37305cd1ee9301c2e"}, + {file = "grpcio-1.59.3-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:00912ce19914d038851be5cd380d94a03f9d195643c28e3ad03d355cc02ce7e8"}, + {file = "grpcio-1.59.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e58b3cadaa3c90f1efca26ba33e0d408b35b497307027d3d707e4bcd8de862a6"}, + {file = "grpcio-1.59.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d787ecadea865bdf78f6679f6f5bf4b984f18f659257ba612979df97a298b3c3"}, + {file = "grpcio-1.59.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0814942ba1bba269db4e760a34388640c601dece525c6a01f3b4ff030cc0db69"}, + {file = "grpcio-1.59.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fb111aa99d3180c361a35b5ae1e2c63750220c584a1344229abc139d5c891881"}, + {file = "grpcio-1.59.3-cp38-cp38-win32.whl", hash = "sha256:eb8ba504c726befe40a356ecbe63c6c3c64c9a439b3164f5a718ec53c9874da0"}, + {file = "grpcio-1.59.3-cp38-cp38-win_amd64.whl", hash = "sha256:cdbc6b32fadab9bebc6f49d3e7ec4c70983c71e965497adab7f87de218e84391"}, + {file = "grpcio-1.59.3-cp39-cp39-linux_armv7l.whl", hash = "sha256:c82ca1e4be24a98a253d6dbaa216542e4163f33f38163fc77964b0f0d255b552"}, + {file = "grpcio-1.59.3-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:36636babfda14f9e9687f28d5b66d349cf88c1301154dc71c6513de2b6c88c59"}, + {file = "grpcio-1.59.3-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f9b2e591da751ac7fdd316cc25afafb7a626dededa9b414f90faad7f3ccebdb"}, + {file = "grpcio-1.59.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a93a82876a4926bf451db82ceb725bd87f42292bacc94586045261f501a86994"}, + {file = "grpcio-1.59.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce31fa0bfdd1f2bb15b657c16105c8652186eab304eb512e6ae3b99b2fdd7d13"}, + {file = "grpcio-1.59.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:16da0e40573962dab6cba16bec31f25a4f468e6d05b658e589090fe103b03e3d"}, + {file = "grpcio-1.59.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1d1a17372fd425addd5812049fa7374008ffe689585f27f802d0935522cf4b7"}, + {file = "grpcio-1.59.3-cp39-cp39-win32.whl", hash = "sha256:52cc38a7241b5f7b4a91aaf9000fdd38e26bb00d5e8a71665ce40cfcee716281"}, + {file = "grpcio-1.59.3-cp39-cp39-win_amd64.whl", hash = "sha256:b491e5bbcad3020a96842040421e508780cade35baba30f402df9d321d1c423e"}, + {file = "grpcio-1.59.3.tar.gz", hash = "sha256:7800f99568a74a06ebdccd419dd1b6e639b477dcaf6da77ea702f8fb14ce5f80"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.59.3)"] + +[[package]] +name = "grpcio-status" +version = "1.59.3" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.6" +files = [ + {file = "grpcio-status-1.59.3.tar.gz", hash = "sha256:65c394ba43380d6bdf8c04c61efc493104b5535552aed35817a1b4dc66598a1f"}, + {file = "grpcio_status-1.59.3-py3-none-any.whl", hash = "sha256:2fd2eb39ca4e9afb3c874c0878ff75b258db0b7dcc25570fc521f16ae0ab942a"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.59.3" +protobuf = ">=4.21.6" + +[[package]] +name = "gtfparse" +version = "1.3.0" +description = "GTF Parsing" +optional = false +python-versions = "*" +files = [ + {file = "gtfparse-1.3.0.tar.gz", hash = "sha256:d957f18e5f70413f89a28ef83068c461b6407eb38fd30e99b8da3d69143527b1"}, +] + +[package.dependencies] +numpy = ">=1.7" +pandas = ">=0.15" + +[[package]] +name = "htseq" +version = "2.0.5" +description = "A framework to process and analyze data from high-throughput sequencing (HTS) assays" +optional = false +python-versions = "*" +files = [ + {file = "HTSeq-2.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3df9e3fe6be02b5f8c45b4af8f163017aa56291a4c9114be0042db981ac1c908"}, + {file = "HTSeq-2.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0441e9f5cc89828c71d9ecb2c3e8e653b0e9ec967c43958103891dafc4d2df0"}, + {file = "HTSeq-2.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:676a577c48102834f171b5c381265b22c22e69145302f157610c4ed2757cccb8"}, + {file = "HTSeq-2.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9de97c2e0ca9d222d05527803240d28fd921706130ee7f7147412a91c06874c8"}, + {file = "HTSeq-2.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f932ddca7d09e9c6cf5a0d47b36f1309abd080bc9d6c28a85cbdc43e594d44c8"}, + {file = "HTSeq-2.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8e8b130e82049c850f434cf7f143cd2e5c56cdc0bb3bb92cb4412d1496b3c310"}, + {file = "HTSeq-2.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b337bca3e4ade6333b42debca0ffce6644d0384c43ec505c0aa52669792fab"}, + {file = "HTSeq-2.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:43b4ea95bbea9608deab032eee6d07aa33b27fa64f06198ef6a405b30e6515f6"}, + {file = "HTSeq-2.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a857d78fb4b826f8c4250083ac3e0ba7eeb51cfa2e4cecfdc8f675e6f4ce0a95"}, + {file = "HTSeq-2.0.5.tar.gz", hash = "sha256:2519675246dd1639115a76c9aacf19ab5bc5bed2f3598bf89cd97be5c0d066b3"}, +] + +[package.dependencies] +numpy = "*" +pysam = "*" + +[package.extras] +htseq-qa = ["matplotlib (>=1.4)"] +test = ["matplotlib (>=1.4)", "pandas (>=1.1.0)", "pytest (>=6.2.5)", "scipy (>=1.5.0)"] + +[[package]] +name = "huggingface-hub" +version = "0.19.4" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.19.4-py3-none-any.whl", hash = "sha256:dba013f779da16f14b606492828f3760600a1e1801432d09fe1c33e50b825bb5"}, + {file = "huggingface_hub-0.19.4.tar.gz", hash = "sha256:176a4fc355a851c17550e7619488f383189727eab209534d7cef2114dae77b22"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +docs = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "hf-doc-builder", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)", "watchdog"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + +[[package]] +name = "hydra-core" +version = "1.3.2" +description = "A framework for elegantly configuring complex applications" +optional = false +python-versions = "*" +files = [ + {file = "hydra-core-1.3.2.tar.gz", hash = "sha256:8a878ed67216997c3e9d88a8e72e7b4767e81af37afb4ea3334b269a4390a824"}, + {file = "hydra_core-1.3.2-py3-none-any.whl", hash = "sha256:fa0238a9e31df3373b35b0bfb672c34cc92718d21f81311d8996a16de1141d8b"}, +] + +[package.dependencies] +antlr4-python3-runtime = "==4.9.*" +omegaconf = ">=2.2,<2.4" +packaging = "*" + +[[package]] +name = "hydra-joblib-launcher" +version = "1.2.0" +description = "Joblib Launcher for Hydra apps" +optional = false +python-versions = "*" +files = [ + {file = "hydra-joblib-launcher-1.2.0.tar.gz", hash = "sha256:b4dc8b990221a0df048c094954356c6532585557e708e8198c46a877b379a6d5"}, + {file = "hydra_joblib_launcher-1.2.0-py3-none-any.whl", hash = "sha256:57bfd042b015056157297de93e8ec1c6bc75fd39bd3b300e1599db0c5d992eee"}, +] + +[package.dependencies] +hydra-core = ">=1.1.0.dev7" +joblib = ">=0.14.0" + +[[package]] +name = "hydra-zen" +version = "0.11.0" +description = "Configurable, reproducible, and scalable workflows in Python, via Hydra" +optional = false +python-versions = ">=3.8" +files = [ + {file = "hydra_zen-0.11.0-py3-none-any.whl", hash = "sha256:a99a87ec5ae758f57d43c35ae20e172d849e67c0b916c2f91daa4f90b52dc6e5"}, + {file = "hydra_zen-0.11.0.tar.gz", hash = "sha256:10f11ca0c03d069fe7653d53fcfd95c1e3512609e31c7e436bf1722cb7e7679f"}, +] + +[package.dependencies] +hydra-core = ">=1.2.0" +omegaconf = ">=2.2.1" +typing-extensions = ">=4.1.0,<4.6.0 || >4.6.0" + +[package.extras] +beartype = ["beartype (>=0.8.0)"] +pydantic = ["pydantic (>=1.8.2,<2.0.0)"] +test = ["hypothesis (>=6.28.0)", "pytest (>=3.8)"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, + {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipykernel" +version = "6.27.1" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"}, + {file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=20" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.18.1" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.9" +files = [ + {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, + {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +prompt-toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" + +[package.extras] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] + +[[package]] +name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +optional = false +python-versions = "*" +files = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +optional = false +python-versions = ">=3.7" +files = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] + +[package.dependencies] +arrow = ">=0.15.0" + +[[package]] +name = "iteround" +version = "1.0.4" +description = "Rounds iterables (arrays, lists, sets, etc) while maintaining the sum of the initial array." +optional = false +python-versions = "*" +files = [ + {file = "iteround-1.0.4-py3-none-any.whl", hash = "sha256:17947dd5479177e6fb186b0a3d5d594b55eedea14dc722c6da7e84bbed45f5b2"}, +] + +[[package]] +name = "jaraco-classes" +version = "3.3.0" +description = "Utility functions for Python class constructs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.classes-3.3.0-py3-none-any.whl", hash = "sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb"}, + {file = "jaraco.classes-3.3.0.tar.gz", hash = "sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jeepney" +version = "0.8.0" +description = "Low-level, pure Python DBus protocol wrapper." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, +] + +[package.extras] +test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] +trio = ["async_generator", "trio"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "joblib" +version = "1.3.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, + {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, +] + +[[package]] +name = "json5" +version = "0.9.14" +description = "A Python implementation of the JSON5 data format." +optional = false +python-versions = "*" +files = [ + {file = "json5-0.9.14-py2.py3-none-any.whl", hash = "sha256:740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f"}, + {file = "json5-0.9.14.tar.gz", hash = "sha256:9ed66c3a6ca3510a976a9ef9b8c0787de24802724ab1860bc0153c7fdd589b02"}, +] + +[package.extras] +dev = ["hypothesis"] + +[[package]] +name = "jsonpickle" +version = "3.0.2" +description = "Python library for serializing any arbitrary object graph into JSON" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpickle-3.0.2-py3-none-any.whl", hash = "sha256:4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f"}, + {file = "jsonpickle-3.0.2.tar.gz", hash = "sha256:e37abba4bfb3ca4a4647d28bb9f4706436f7b46c8a8333b4a718abafa8e46b37"}, +] + +[package.extras] +docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] +testing-libs = ["simplejson", "ujson"] + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonschema" +version = "4.20.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"}, + {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} +rpds-py = ">=0.7.1" +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.11.2" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.11.2-py3-none-any.whl", hash = "sha256:e74ba7c0a65e8cb49dc26837d6cfe576557084a8b423ed16a420984228104f93"}, + {file = "jsonschema_specifications-2023.11.2.tar.gz", hash = "sha256:9472fc4fea474cd74bea4a2b190daeccb5a9e4db2ea80efcf7a1b582fc9a81b8"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "jupyter-client" +version = "8.6.0" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, + {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, +] + +[package.dependencies] +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-contrib-core" +version = "0.4.2" +description = "Common utilities for jupyter-contrib projects." +optional = false +python-versions = "*" +files = [ + {file = "jupyter_contrib_core-0.4.2.tar.gz", hash = "sha256:1887212f3ca9d4487d624c0705c20dfdf03d5a0b9ea2557d3aaeeb4c38bdcabb"}, +] + +[package.dependencies] +jupyter_core = "*" +notebook = ">=4.0" +setuptools = "*" +tornado = "*" +traitlets = "*" + +[package.extras] +testing-utils = ["mock", "nose"] + +[[package]] +name = "jupyter-contrib-nbextensions" +version = "0.7.0" +description = "A collection of Jupyter nbextensions." +optional = false +python-versions = "*" +files = [ + {file = "jupyter_contrib_nbextensions-0.7.0.tar.gz", hash = "sha256:06e33f005885eb92f89cbe82711e921278201298d08ab0d886d1ba09e8c3e9ca"}, +] + +[package.dependencies] +ipython_genutils = "*" +jupyter_contrib_core = ">=0.3.3" +jupyter_core = "*" +jupyter_highlight_selected_word = ">=0.1.1" +jupyter_nbextensions_configurator = ">=0.4.0" +lxml = "*" +nbconvert = ">=6.0" +notebook = ">=6.0" +tornado = "*" +traitlets = ">=4.1" + +[package.extras] +test = ["mock", "nbformat", "nose", "pip", "requests"] + +[[package]] +name = "jupyter-core" +version = "5.5.0" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.5.0-py3-none-any.whl", hash = "sha256:e11e02cd8ae0a9de5c6c44abf5727df9f2581055afe00b22183f621ba3585805"}, + {file = "jupyter_core-5.5.0.tar.gz", hash = "sha256:880b86053bf298a8724994f95e99b99130659022a4f7f45f563084b6223861d3"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyter-events" +version = "0.9.0" +description = "Jupyter Event System library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_events-0.9.0-py3-none-any.whl", hash = "sha256:d853b3c10273ff9bc8bb8b30076d65e2c9685579db736873de6c2232dde148bf"}, + {file = "jupyter_events-0.9.0.tar.gz", hash = "sha256:81ad2e4bc710881ec274d31c6c50669d71bbaa5dd9d01e600b56faa85700d399"}, +] + +[package.dependencies] +jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +python-json-logger = ">=2.0.4" +pyyaml = ">=5.3" +referencing = "*" +rfc3339-validator = "*" +rfc3986-validator = ">=0.1.1" +traitlets = ">=5.3" + +[package.extras] +cli = ["click", "rich"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] + +[[package]] +name = "jupyter-highlight-selected-word" +version = "0.2.0" +description = "Jupyter notebook extension that enables highlighting every instance of the current word in the notebook." +optional = false +python-versions = "*" +files = [ + {file = "jupyter_highlight_selected_word-0.2.0-py2.py3-none-any.whl", hash = "sha256:9545dfa9cb057eebe3a5795604dcd3a5294ea18637e553f61a0b67c1b5903c58"}, + {file = "jupyter_highlight_selected_word-0.2.0.tar.gz", hash = "sha256:9fa740424859a807950ca08d2bfd28a35154cd32dd6d50ac4e0950022adc0e7b"}, +] + +[[package]] +name = "jupyter-lsp" +version = "2.2.1" +description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter-lsp-2.2.1.tar.gz", hash = "sha256:b17fab6d70fe83c8896b0cff59237640038247c196056b43684a0902b6a9e0fb"}, + {file = "jupyter_lsp-2.2.1-py3-none-any.whl", hash = "sha256:17a689910c5e4ae5e7d334b02f31d08ffbe98108f6f658fb05e4304b4345368b"}, +] + +[package.dependencies] +jupyter-server = ">=1.1.2" + +[[package]] +name = "jupyter-nbextensions-configurator" +version = "0.6.3" +description = "jupyter serverextension providing configuration interfaces for nbextensions." +optional = false +python-versions = "*" +files = [ + {file = "jupyter_nbextensions_configurator-0.6.3-py2.py3-none-any.whl", hash = "sha256:cece496f3f62cf80bb0b04867ea463c32ed5db19ff5814fe18a3a7f1bb9da95b"}, +] + +[package.dependencies] +jupyter-contrib-core = ">=0.3.3" +jupyter-core = "*" +notebook = ">=6.0" +pyyaml = "*" +tornado = "*" +traitlets = "*" + +[package.extras] +test = ["jupyter-contrib-core[testing-utils]", "mock", "nose", "requests", "selenium"] + +[[package]] +name = "jupyter-server" +version = "2.12.1" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server-2.12.1-py3-none-any.whl", hash = "sha256:fd030dd7be1ca572e4598203f718df6630c12bd28a599d7f1791c4d7938e1010"}, + {file = "jupyter_server-2.12.1.tar.gz", hash = "sha256:dc77b7dcc5fc0547acba2b2844f01798008667201eea27c6319ff9257d700a6d"}, +] + +[package.dependencies] +anyio = ">=3.1.0" +argon2-cffi = "*" +jinja2 = "*" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-events = ">=0.9.0" +jupyter-server-terminals = "*" +nbconvert = ">=6.4.4" +nbformat = ">=5.3.0" +overrides = "*" +packaging = "*" +prometheus-client = "*" +pywinpty = {version = "*", markers = "os_name == \"nt\""} +pyzmq = ">=24" +send2trash = ">=1.8.2" +terminado = ">=0.8.3" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" +websocket-client = "*" + +[package.extras] +docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.4.4" +description = "A Jupyter Server Extension Providing Terminals." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, + {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, +] + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] + +[[package]] +name = "jupyterlab" +version = "4.0.9" +description = "JupyterLab computational environment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab-4.0.9-py3-none-any.whl", hash = "sha256:9f6f8e36d543fdbcc3df961a1d6a3f524b4a4001be0327a398f68fa4e534107c"}, + {file = "jupyterlab-4.0.9.tar.gz", hash = "sha256:9ebada41d52651f623c0c9f069ddb8a21d6848e4c887d8e5ddc0613166ed5c0b"}, +] + +[package.dependencies] +async-lru = ">=1.0.0" +ipykernel = "*" +jinja2 = ">=3.0.3" +jupyter-core = "*" +jupyter-lsp = ">=2.0.0" +jupyter-server = ">=2.4.0,<3" +jupyterlab-server = ">=2.19.0,<3" +notebook-shim = ">=0.2" +packaging = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} +tornado = ">=6.2.0" +traitlets = "*" + +[package.extras] +dev = ["black[jupyter] (==23.10.1)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.1.4)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] +test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.3.0" +description = "Pygments theme using JupyterLab CSS variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, + {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, +] + +[[package]] +name = "jupyterlab-server" +version = "2.25.2" +description = "A set of server components for JupyterLab and JupyterLab like applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab_server-2.25.2-py3-none-any.whl", hash = "sha256:5b1798c9cc6a44f65c757de9f97fc06fc3d42535afbf47d2ace5e964ab447aaf"}, + {file = "jupyterlab_server-2.25.2.tar.gz", hash = "sha256:bd0ec7a99ebcedc8bcff939ef86e52c378e44c2707e053fcd81d046ce979ee63"}, +] + +[package.dependencies] +babel = ">=2.10" +jinja2 = ">=3.0.3" +json5 = ">=0.9.0" +jsonschema = ">=4.18.0" +jupyter-server = ">=1.21,<3" +packaging = ">=21.3" +requests = ">=2.31" + +[package.extras] +docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] +openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] + +[[package]] +name = "jupytext" +version = "1.16.0" +description = "Jupyter notebooks as Markdown documents, Julia, Python or R scripts" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupytext-1.16.0-py3-none-any.whl", hash = "sha256:c2b951ac72871f39cd6cd242b56bc43219b7ed8169598bae5359811fb1f54d28"}, + {file = "jupytext-1.16.0.tar.gz", hash = "sha256:94c7e67775e90e1792c39ab7fca4e0459bf7c35656123e8dc2e9e1b3e953baf8"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0" +mdit-py-plugins = "*" +nbformat = "*" +packaging = "*" +pyyaml = "*" +toml = "*" + +[package.extras] +dev = ["jupytext[test-cov,test-external]"] +docs = ["myst-parser", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] +test = ["pytest", "pytest-randomly", "pytest-xdist"] +test-cov = ["jupytext[test-integration]", "pytest-cov (>=2.6.1)"] +test-external = ["autopep8", "black", "flake8", "gitpython", "isort", "jupyter-fs (<0.4.0)", "jupytext[test-integration]", "pre-commit", "sphinx-gallery (<0.8)"] +test-functional = ["jupytext[test]"] +test-integration = ["ipykernel", "jupyter-server (!=2.11)", "jupytext[test-functional]", "nbconvert"] +test-ui = ["calysto-bash"] + +[[package]] +name = "keyring" +version = "24.3.0" +description = "Store and access your passwords safely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "keyring-24.3.0-py3-none-any.whl", hash = "sha256:4446d35d636e6a10b8bce7caa66913dd9eca5fd222ca03a3d42c38608ac30836"}, + {file = "keyring-24.3.0.tar.gz", hash = "sha256:e730ecffd309658a08ee82535a3b5ec4b4c8669a9be11efb66249d8e0aeb9a25"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} +"jaraco.classes" = "*" +jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} +pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} +SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} + +[package.extras] +completion = ["shtab (>=1.1.0)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + +[[package]] +name = "kubernetes" +version = "28.1.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +files = [ + {file = "kubernetes-28.1.0-py2.py3-none-any.whl", hash = "sha256:10f56f8160dcb73647f15fafda268e7f60cf7dbc9f8e46d52fcd46d3beb0c18d"}, + {file = "kubernetes-28.1.0.tar.gz", hash = "sha256:1468069a573430fb1cb5ad22876868f57977930f80a6749405da31cd6086a7e9"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +six = ">=1.9.0" +urllib3 = ">=1.24.2,<2.0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "llvmlite" +version = "0.41.1" +description = "lightweight wrapper around basic LLVM functionality" +optional = false +python-versions = ">=3.8" +files = [ + {file = "llvmlite-0.41.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1e1029d47ee66d3a0c4d6088641882f75b93db82bd0e6178f7bd744ebce42b9"}, + {file = "llvmlite-0.41.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:150d0bc275a8ac664a705135e639178883293cf08c1a38de3bbaa2f693a0a867"}, + {file = "llvmlite-0.41.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eee5cf17ec2b4198b509272cf300ee6577229d237c98cc6e63861b08463ddc6"}, + {file = "llvmlite-0.41.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd0338da625346538f1173a17cabf21d1e315cf387ca21b294ff209d176e244"}, + {file = "llvmlite-0.41.1-cp310-cp310-win32.whl", hash = "sha256:fa1469901a2e100c17eb8fe2678e34bd4255a3576d1a543421356e9c14d6e2ae"}, + {file = "llvmlite-0.41.1-cp310-cp310-win_amd64.whl", hash = "sha256:2b76acee82ea0e9304be6be9d4b3840208d050ea0dcad75b1635fa06e949a0ae"}, + {file = "llvmlite-0.41.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:210e458723436b2469d61b54b453474e09e12a94453c97ea3fbb0742ba5a83d8"}, + {file = "llvmlite-0.41.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:855f280e781d49e0640aef4c4af586831ade8f1a6c4df483fb901cbe1a48d127"}, + {file = "llvmlite-0.41.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b67340c62c93a11fae482910dc29163a50dff3dfa88bc874872d28ee604a83be"}, + {file = "llvmlite-0.41.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2181bb63ef3c607e6403813421b46982c3ac6bfc1f11fa16a13eaafb46f578e6"}, + {file = "llvmlite-0.41.1-cp311-cp311-win_amd64.whl", hash = "sha256:9564c19b31a0434f01d2025b06b44c7ed422f51e719ab5d24ff03b7560066c9a"}, + {file = "llvmlite-0.41.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5940bc901fb0325970415dbede82c0b7f3e35c2d5fd1d5e0047134c2c46b3281"}, + {file = "llvmlite-0.41.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8b0a9a47c28f67a269bb62f6256e63cef28d3c5f13cbae4fab587c3ad506778b"}, + {file = "llvmlite-0.41.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8afdfa6da33f0b4226af8e64cfc2b28986e005528fbf944d0a24a72acfc9432"}, + {file = "llvmlite-0.41.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8454c1133ef701e8c050a59edd85d238ee18bb9a0eb95faf2fca8b909ee3c89a"}, + {file = "llvmlite-0.41.1-cp38-cp38-win32.whl", hash = "sha256:2d92c51e6e9394d503033ffe3292f5bef1566ab73029ec853861f60ad5c925d0"}, + {file = "llvmlite-0.41.1-cp38-cp38-win_amd64.whl", hash = "sha256:df75594e5a4702b032684d5481db3af990b69c249ccb1d32687b8501f0689432"}, + {file = "llvmlite-0.41.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:04725975e5b2af416d685ea0769f4ecc33f97be541e301054c9f741003085802"}, + {file = "llvmlite-0.41.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf14aa0eb22b58c231243dccf7e7f42f7beec48970f2549b3a6acc737d1a4ba4"}, + {file = "llvmlite-0.41.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92c32356f669e036eb01016e883b22add883c60739bc1ebee3a1cc0249a50828"}, + {file = "llvmlite-0.41.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24091a6b31242bcdd56ae2dbea40007f462260bc9bdf947953acc39dffd54f8f"}, + {file = "llvmlite-0.41.1-cp39-cp39-win32.whl", hash = "sha256:880cb57ca49e862e1cd077104375b9d1dfdc0622596dfa22105f470d7bacb309"}, + {file = "llvmlite-0.41.1-cp39-cp39-win_amd64.whl", hash = "sha256:92f093986ab92e71c9ffe334c002f96defc7986efda18397d0f08534f3ebdc4d"}, + {file = "llvmlite-0.41.1.tar.gz", hash = "sha256:f19f767a018e6ec89608e1f6b13348fa2fcde657151137cb64e56d48598a92db"}, +] + +[[package]] +name = "logomaker" +version = "0.8" +description = "Package for making Sequence Logos" +optional = false +python-versions = "*" +files = [ + {file = "logomaker-0.8-py2.py3-none-any.whl", hash = "sha256:6766a0d83de4990ea859366a661ba72c580a7b73ac3c8b526204a0be7d65a50d"}, + {file = "logomaker-0.8.tar.gz", hash = "sha256:d8c7501a7d6d7961cd68e5a44e939000ebf1b0c4197a0c9198351e1d681d3f6d"}, +] + +[package.dependencies] +matplotlib = "*" +numpy = "*" +pandas = "*" + +[[package]] +name = "loguru" +version = "0.7.2" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] + +[[package]] +name = "lxml" +version = "4.9.3" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.35)"] + +[[package]] +name = "markdown" +version = "3.5.1" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.5.1-py3-none-any.whl", hash = "sha256:5874b47d4ee3f0b14d764324d2c94c03ea66bee56f2d929da9f2508d65e722dc"}, + {file = "Markdown-3.5.1.tar.gz", hash = "sha256:b65d7beb248dc22f2e8a31fb706d93798093c308dc1aba295aedeb9d41a813bd"}, +] + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "marshmallow" +version = "3.20.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"}, + {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "marshmallow-enum" +version = "1.5.1" +description = "Enum field for Marshmallow" +optional = false +python-versions = "*" +files = [ + {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, + {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, +] + +[package.dependencies] +marshmallow = ">=2.0.0" + +[[package]] +name = "marshmallow-jsonschema" +version = "0.13.0" +description = "JSON Schema Draft v7 (http://json-schema.org/) formatting with marshmallow" +optional = false +python-versions = ">=3.6" +files = [ + {file = "marshmallow-jsonschema-0.13.0.tar.gz", hash = "sha256:f8ce19cfc0edd909e81f141d7420c33544b849bc5ebbfae8f6a3deea5a3b1f47"}, + {file = "marshmallow_jsonschema-0.13.0-py3-none-any.whl", hash = "sha256:2814f2afb94a6e01b3c0a5795b3dfb142b628763655f20378400af5c0a2307fb"}, +] + +[package.dependencies] +marshmallow = ">=3.11" + +[package.extras] +enum = ["marshmallow-enum"] +union = ["marshmallow-union"] + +[[package]] +name = "mashumaro" +version = "3.11" +description = "Fast and well tested serialization library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mashumaro-3.11-py3-none-any.whl", hash = "sha256:8f858bdb33790db6d9f3087dce793a26d109aeae38bed3ca9c2d7f16f19db412"}, + {file = "mashumaro-3.11.tar.gz", hash = "sha256:b0b2443be4bdad29bb209d91fe4a2a918fbd7b63cccfeb457c7eeb567db02f5e"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[package.extras] +msgpack = ["msgpack (>=0.5.6)"] +orjson = ["orjson"] +toml = ["tomli (>=1.1.0)", "tomli-w (>=1.0)"] +yaml = ["pyyaml (>=3.13)"] + +[[package]] +name = "matplotlib" +version = "3.8.1" +description = "Python plotting package" +optional = false +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.8.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e11ab864323fa73ac1b7849688d9671c47a2665242e899785b4db1a375b547e1"}, + {file = "matplotlib-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:43a9d40feb63c9e31a0b8b069dcbd74a912f59bdc0095d187126694cd26977e4"}, + {file = "matplotlib-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:608ea2951838d391e45dec2e644888db6899c752d3c29e157af9dcefb3d7d8d5"}, + {file = "matplotlib-3.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82ec95b02e894561c21e066bd0c716e4b410df141ce9441aa5af6cd937e4ade2"}, + {file = "matplotlib-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e3ad1759ad4a5245172c6d32b8ada603a6020d03211524c39d78d25c9a7dc0d2"}, + {file = "matplotlib-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:20a0fdfd3ee836179047f3782be060057b878ad37f5abe29edf006a1ff3ecd73"}, + {file = "matplotlib-3.8.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7658b7073c1d6a2922ecc0ed41602410fae88586cb8a54f7a2063d537b6beaf7"}, + {file = "matplotlib-3.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf6889643d4560fcc56f9f0941f078e4df0d72a6c3e4ca548841fc13c5642664"}, + {file = "matplotlib-3.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff842e27bc6a80de08c40e0bfdce460bd08080e8a94af131162b6a1b8948f2cc"}, + {file = "matplotlib-3.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f99d07c0e753717775be7be39ab383453b4d8b629c9fa174596b970c6555890"}, + {file = "matplotlib-3.8.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f34b46dbb1db1f09bfa937cd5853e5f2af232caeeff509c3ab6e43fd33780eae"}, + {file = "matplotlib-3.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:1fcb49b6baf0375281979cbf26695ec10bd1cada1e311893e89533b3b70143e7"}, + {file = "matplotlib-3.8.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e17674ee127f78f26fea237e7f4d5cf910a8be82beb6260fedf358b88075b823"}, + {file = "matplotlib-3.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d921c0270647ab11c3ef283efaaa3d46fd005ba233bfb3aea75231cdf3656de8"}, + {file = "matplotlib-3.8.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2afe7d2f8c9e35e94fbcfcfd9b28f29cb32f0a9068cba469cf907428379c8db9"}, + {file = "matplotlib-3.8.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5a504ff40f81d6233603475a45497a6dca37a873393fa20ae6f7dd6596ef72b"}, + {file = "matplotlib-3.8.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cd54bbf089953140905768ed4626d7223e1ad1d7e2a138410a9c4d3b865ccd80"}, + {file = "matplotlib-3.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:27502d2452208ae784c19504644f09f83742809143bbeae147617640930aa344"}, + {file = "matplotlib-3.8.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f55fb5ff02d999a100be28bf6ffe826e1867a54c7b465409685332c9dd48ffa5"}, + {file = "matplotlib-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:afb72822ae410d62aa1a2920c6563cb5680de9078358f0e9474396c6c3e06be2"}, + {file = "matplotlib-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43cf368a4a1d8cbc426944806e5e183cead746647a64d2cdb786441546235967"}, + {file = "matplotlib-3.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c54c55457c7f5ea4dfdba0020004fc7667f5c10c8d9b8010d735345acc06c9b8"}, + {file = "matplotlib-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e3bb809b743653b5aab5d72ee45c8c937c28e147b0846b0826a54bece898608c"}, + {file = "matplotlib-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:c1b0ecaa0d1f4fe1e30f625a2347f0034a89a7d17c39efbb502e554d92ee2f61"}, + {file = "matplotlib-3.8.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ca84deaa38cb64b7dd160ca2046b45f7b5dbff2b0179642e1339fadc337446c9"}, + {file = "matplotlib-3.8.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed3b29f54f6bbf3eaca4cbd23bc260155153ace63b7f597c474fa6fc6f386530"}, + {file = "matplotlib-3.8.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d24c47a1bb47e392fbcd26fe322e4ff3431653ac1e8718e4e147d450ae97a44"}, + {file = "matplotlib-3.8.1.tar.gz", hash = "sha256:044df81c1f6f3a8e52d70c4cfcb44e77ea9632a10929932870dfaa90de94365d"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +kiwisolver = ">=1.3.1" +numpy = ">=1.21,<2" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mdit-py-plugins" +version = "0.4.0" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"}, + {file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "memory-efficient-attention-pytorch" +version = "0.1.6" +description = "Memory Efficient Attention - Pytorch" +optional = false +python-versions = "*" +files = [ + {file = "memory-efficient-attention-pytorch-0.1.6.tar.gz", hash = "sha256:8aaaebb095c40b6c3e18359d2f4978c95984eb77b467ac58baa503bdb92914bf"}, + {file = "memory_efficient_attention_pytorch-0.1.6-py3-none-any.whl", hash = "sha256:efbb2676f8695b21a29d96d83f84818be257a35ac4c89f94d7d93f59819d38ed"}, +] + +[package.dependencies] +einops = ">=0.4.1" +torch = ">=1.6" + +[[package]] +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +optional = false +python-versions = ">=3.6" +files = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] + +[[package]] +name = "mistune" +version = "3.0.2" +description = "A sane and fast Markdown parser with useful plugins and renderers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, +] + +[[package]] +name = "mkdocs" +version = "1.5.3" +description = "Project documentation with Markdown." +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, + {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} +ghp-import = ">=1.0" +jinja2 = ">=2.11.1" +markdown = ">=3.2.1" +markupsafe = ">=2.0.1" +mergedeep = ">=1.3.4" +packaging = ">=20.5" +pathspec = ">=0.11.1" +platformdirs = ">=2.2.0" +pyyaml = ">=5.1" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] + +[[package]] +name = "mkdocs-autorefs" +version = "0.5.0" +description = "Automatically link across pages in MkDocs." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_autorefs-0.5.0-py3-none-any.whl", hash = "sha256:7930fcb8ac1249f10e683967aeaddc0af49d90702af111a5e390e8b20b3d97ff"}, + {file = "mkdocs_autorefs-0.5.0.tar.gz", hash = "sha256:9a5054a94c08d28855cfab967ada10ed5be76e2bfad642302a610b252c3274c0"}, +] + +[package.dependencies] +Markdown = ">=3.3" +mkdocs = ">=1.1" + +[[package]] +name = "mkdocs-jupyter" +version = "0.24.6" +description = "Use Jupyter in mkdocs websites" +optional = false +python-versions = ">=3.9" +files = [ + {file = "mkdocs_jupyter-0.24.6-py3-none-any.whl", hash = "sha256:56fb7ad796f2414a4143d54a966b805caf315c32413e97f85591623fa87dceca"}, + {file = "mkdocs_jupyter-0.24.6.tar.gz", hash = "sha256:89fcbe8a9523864d5416de1a60711640b6bc2972279d2adf46ed2776c2d9ff7c"}, +] + +[package.dependencies] +ipykernel = ">6.0.0,<7.0.0" +jupytext = ">1.13.8,<2" +mkdocs = ">=1.4.0,<2" +mkdocs-material = ">9.0.0" +nbconvert = ">=7.2.9,<8" +pygments = ">2.12.0" + +[[package]] +name = "mkdocs-material" +version = "9.4.14" +description = "Documentation that simply works" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_material-9.4.14-py3-none-any.whl", hash = "sha256:dbc78a4fea97b74319a6aa9a2f0be575a6028be6958f813ba367188f7b8428f6"}, + {file = "mkdocs_material-9.4.14.tar.gz", hash = "sha256:a511d3ff48fa8718b033e7e37d17abd9cc1de0fdf0244a625ca2ae2387e2416d"}, +] + +[package.dependencies] +babel = ">=2.10,<3.0" +colorama = ">=0.4,<1.0" +jinja2 = ">=3.0,<4.0" +markdown = ">=3.2,<4.0" +mkdocs = ">=1.5.3,<2.0" +mkdocs-material-extensions = ">=1.3,<2.0" +paginate = ">=0.5,<1.0" +pygments = ">=2.16,<3.0" +pymdown-extensions = ">=10.2,<11.0" +regex = ">=2022.4" +requests = ">=2.26,<3.0" + +[package.extras] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=9.4,<10.0)"] +recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] + +[[package]] +name = "mkdocs-material-extensions" +version = "1.3.1" +description = "Extension pack for Python Markdown and MkDocs Material." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, +] + +[[package]] +name = "mkdocstrings" +version = "0.24.0" +description = "Automatic documentation from sources, for MkDocs." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocstrings-0.24.0-py3-none-any.whl", hash = "sha256:f4908560c10f587326d8f5165d1908817b2e280bbf707607f601c996366a2264"}, + {file = "mkdocstrings-0.24.0.tar.gz", hash = "sha256:222b1165be41257b494a9d29b14135d2b7ca43f38161d5b10caae03b87bd4f7e"}, +] + +[package.dependencies] +click = ">=7.0" +Jinja2 = ">=2.11.1" +Markdown = ">=3.3" +MarkupSafe = ">=1.1" +mkdocs = ">=1.4" +mkdocs-autorefs = ">=0.3.1" +mkdocstrings-python = {version = ">=0.5.2", optional = true, markers = "extra == \"python\""} +platformdirs = ">=2.2.0" +pymdown-extensions = ">=6.3" + +[package.extras] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings-python" +version = "1.7.5" +description = "A Python handler for mkdocstrings." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocstrings_python-1.7.5-py3-none-any.whl", hash = "sha256:5f6246026353f0c0785135db70c3fe9a5d9318990fc7ceb11d62097b8ffdd704"}, + {file = "mkdocstrings_python-1.7.5.tar.gz", hash = "sha256:c7d143728257dbf1aa550446555a554b760dcd40a763f077189d298502b800be"}, +] + +[package.dependencies] +griffe = ">=0.37" +mkdocstrings = ">=0.20" + +[[package]] +name = "more-itertools" +version = "10.1.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.8" +files = [ + {file = "more-itertools-10.1.0.tar.gz", hash = "sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a"}, + {file = "more_itertools-10.1.0-py3-none-any.whl", hash = "sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6"}, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = false +python-versions = "*" +files = [ + {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, + {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, +] + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4)"] +tests = ["pytest (>=4.6)"] + +[[package]] +name = "msal" +version = "1.26.0" +description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." +optional = false +python-versions = ">=2.7" +files = [ + {file = "msal-1.26.0-py2.py3-none-any.whl", hash = "sha256:be77ba6a8f49c9ff598bbcdc5dfcf1c9842f3044300109af738e8c3e371065b5"}, + {file = "msal-1.26.0.tar.gz", hash = "sha256:224756079fe338be838737682b49f8ebc20a87c1c5eeaf590daae4532b83de15"}, +] + +[package.dependencies] +cryptography = ">=0.6,<44" +PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} +requests = ">=2.0.0,<3" + +[package.extras] +broker = ["pymsalruntime (>=0.13.2,<0.14)"] + +[[package]] +name = "msal-extensions" +version = "1.0.0" +description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." +optional = false +python-versions = "*" +files = [ + {file = "msal-extensions-1.0.0.tar.gz", hash = "sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354"}, + {file = "msal_extensions-1.0.0-py2.py3-none-any.whl", hash = "sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee"}, +] + +[package.dependencies] +msal = ">=0.4.1,<2.0.0" +portalocker = [ + {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""}, + {version = ">=1.0,<3", markers = "python_version >= \"3.5\" and platform_system != \"Windows\""}, +] + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "mygene" +version = "3.2.2" +description = "Python Client for MyGene.Info services." +optional = false +python-versions = "*" +files = [ + {file = "mygene-3.2.2-py2.py3-none-any.whl", hash = "sha256:18d85d1b28ecee2be31d844607fb0c5f7d7c58573278432df819ee2a5e88fe46"}, + {file = "mygene-3.2.2.tar.gz", hash = "sha256:e729cabbc28cf5afb221bca1ab637883b375cb1a3e2f067587ec79f71affdaea"}, +] + +[package.dependencies] +biothings-client = ">=0.2.6" + +[[package]] +name = "mypy" +version = "1.7.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, + {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, + {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, + {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, + {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, + {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, + {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, + {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, + {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, + {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, + {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, + {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, + {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, + {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, + {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, + {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, + {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, + {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "mysql-connector-python" +version = "8.0.23" +description = "MySQL driver written in Python" +optional = false +python-versions = "*" +files = [ + {file = "mysql-connector-python-8.0.23.tar.gz", hash = "sha256:5e84dcc25d4b505118e0c28f07de496a41bc0ab121232a2aba83d0248cd49257"}, + {file = "mysql_connector_python-8.0.23-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:f42516ef6b80ce70322f1fda63a8762f898df60df8ba621b2308d933216a8598"}, + {file = "mysql_connector_python-8.0.23-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:eb5d409df6e54c9a8f47dc901334d5e8b30a7967775d6eb3158494bf0534995d"}, + {file = "mysql_connector_python-8.0.23-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:320a5929a39cae75a1cf0603b3baffdd7f36a0e520bfdd471d456f70973fc0ac"}, + {file = "mysql_connector_python-8.0.23-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:dd7ed6baa35c3a89c6c8c8d6be0ebe1b089e77f41bb874b9dc4bc6db83ad654f"}, + {file = "mysql_connector_python-8.0.23-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:2ed23de6c0a9e5769fc14f2930492b81629cb40f79381d79c679ddd04d5f3ddb"}, + {file = "mysql_connector_python-8.0.23-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:114ab771b6a1a98ea1e2e88c44fcf3ff713c65ac9fa8d3e88c0034e1fc0aed5d"}, + {file = "mysql_connector_python-8.0.23-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7d55a446700bec300ec6a5b928b4700384bc2add43fa3b9ca124b03640db4e19"}, + {file = "mysql_connector_python-8.0.23-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:a37dff9d748d92134ea47ddbaafdfadb7f3bf9c8a66429ae247fb1ccaf6f7337"}, + {file = "mysql_connector_python-8.0.23-cp35-cp35m-win_amd64.whl", hash = "sha256:2184f6b86c3981e5b2ffa82e946f12368a5f1758f022d9a30f43077950136114"}, + {file = "mysql_connector_python-8.0.23-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:76758ee1d57b712bfbdae358506d578b955fda9dd91ae068a59e0fe02138ef0d"}, + {file = "mysql_connector_python-8.0.23-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0f41b3142fe5325e830a3420915a6cf652b9da1d3a8e0df11bade1c120c0fc2c"}, + {file = "mysql_connector_python-8.0.23-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:451120a964d3c56d983c7664f7c799bd92d5660085e3d8058defdec3e8a3b93a"}, + {file = "mysql_connector_python-8.0.23-cp36-cp36m-win_amd64.whl", hash = "sha256:efe72ce5e9d9051aeb99a4910949b3afb17570106a980a7a4f480d3894b1f426"}, + {file = "mysql_connector_python-8.0.23-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a5bf1979c126f475bc10c840d9464d5a2e4604bdc67b6be8bbaf9dbc4a3d8b89"}, + {file = "mysql_connector_python-8.0.23-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a63658c518502736979dc1a79250c5e54ebbbd5ccfe9ec040cc591fcefbf1974"}, + {file = "mysql_connector_python-8.0.23-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:aa2307ddc30dd20dbca1165ffb1cf41dcf30ae1bcf46e74cb92c06a0eeeb0aac"}, + {file = "mysql_connector_python-8.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:49394f30e792a880ca2bd6e7e9d11eb86df0c316ab629b6d19185c9f5ca0811d"}, + {file = "mysql_connector_python-8.0.23-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:7b7fa9ed17a43164df2e203b25bf71f8679460795c055fb23f64a66f7ecde6be"}, + {file = "mysql_connector_python-8.0.23-cp38-cp38-manylinux1_i686.whl", hash = "sha256:faf2aeb18725bb8bb6aae1e33b358d328cd35ded1b610d4c8608529b3051f634"}, + {file = "mysql_connector_python-8.0.23-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2eabe3f8a264efb3af04cb477943a8c8f04396fda0c5e2ad3450f9c044ddc603"}, + {file = "mysql_connector_python-8.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:2833d47ad04d08c1d3ab64b3b2b9203f84333f3463ca99332961c0ae1bf8101f"}, + {file = "mysql_connector_python-8.0.23-py2.py3-none-any.whl", hash = "sha256:c783e1dc8b78a1b1a9ebbf3ccb12d17e4513d91fafeb5b6c06a29f2d5619e285"}, +] + +[package.dependencies] +protobuf = ">=3.0.0" + +[package.extras] +compression = ["lz4 (>=2.1.6)", "zstandard (>=0.12.0)"] +dns-srv = ["dnspython (>=1.16.0)"] +gssapi = ["gssapi (>=1.6.9)"] + +[[package]] +name = "nbclient" +version = "0.9.0" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "nbclient-0.9.0-py3-none-any.whl", hash = "sha256:a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15"}, + {file = "nbclient-0.9.0.tar.gz", hash = "sha256:4b28c207877cf33ef3a9838cdc7a54c5ceff981194a82eac59d558f05487295e"}, +] + +[package.dependencies] +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +nbformat = ">=5.1" +traitlets = ">=5.4" + +[package.extras] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] + +[[package]] +name = "nbconvert" +version = "7.12.0" +description = "Converting Jupyter Notebooks" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbconvert-7.12.0-py3-none-any.whl", hash = "sha256:5b6c848194d270cc55fb691169202620d7b52a12fec259508d142ecbe4219310"}, + {file = "nbconvert-7.12.0.tar.gz", hash = "sha256:b1564bd89f69a74cd6398b0362da94db07aafb991b7857216a766204a71612c0"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +bleach = "!=5.0.0" +defusedxml = "*" +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<4" +nbclient = ">=0.5.0" +nbformat = ">=5.7" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.1" + +[package.extras] +all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["nbconvert[qtpng]"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pytest"] +webpdf = ["playwright"] + +[[package]] +name = "nbformat" +version = "5.9.2" +description = "The Jupyter Notebook format" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbformat-5.9.2-py3-none-any.whl", hash = "sha256:1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9"}, + {file = "nbformat-5.9.2.tar.gz", hash = "sha256:5f98b5ba1997dff175e77e0c17d5c10a96eaed2cbd1de3533d1fc35d5e111192"}, +] + +[package.dependencies] +fastjsonschema = "*" +jsonschema = ">=2.6" +jupyter-core = "*" +traitlets = ">=5.1" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] + +[[package]] +name = "nest-asyncio" +version = "1.5.8" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, + {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, +] + +[[package]] +name = "networkx" +version = "3.2.1" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.9" +files = [ + {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, + {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, +] + +[package.extras] +default = ["matplotlib (>=3.5)", "numpy (>=1.22)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] +developer = ["changelist (==0.4)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] +doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "norns" +version = "0.1.6" +description = "Simple yaml-based config module" +optional = false +python-versions = "*" +files = [ + {file = "norns-0.1.6.tar.gz", hash = "sha256:1f3c6ccbe79b2cb3076f66a352cd76462593adbabe9ebb262f879a9d0a6634e4"}, +] + +[package.dependencies] +appdirs = "*" +nose = "*" +pyyaml = "*" + +[[package]] +name = "nose" +version = "1.3.7" +description = "nose extends unittest to make testing easier" +optional = false +python-versions = "*" +files = [ + {file = "nose-1.3.7-py2-none-any.whl", hash = "sha256:dadcddc0aefbf99eea214e0f1232b94f2fa9bd98fa8353711dacb112bfcbbb2a"}, + {file = "nose-1.3.7-py3-none-any.whl", hash = "sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac"}, + {file = "nose-1.3.7.tar.gz", hash = "sha256:f1bffef9cbc82628f6e7d7b40d7e255aefaa1adb6a1b1d26c69a8b79e6208a98"}, +] + +[[package]] +name = "notebook" +version = "6.4.13" +description = "A web-based notebook environment for interactive computing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "notebook-6.4.13-py3-none-any.whl", hash = "sha256:409f8f257c1841ac2647b1e04611facb64068c7e9a4a77ba0fd129b6d2f58b7e"}, + {file = "notebook-6.4.13.tar.gz", hash = "sha256:08da6c8e2d1748c8acfa6addcfb95501d8a52ef9e7ca238b0e242bfa829cecb7"}, +] + +[package.dependencies] +argon2-cffi = "*" +ipykernel = "*" +ipython-genutils = "*" +jinja2 = "*" +jupyter-client = ">=5.3.4" +jupyter-core = ">=4.6.1" +nbconvert = ">=5" +nbformat = "*" +nest-asyncio = ">=1.5" +prometheus-client = "*" +pyzmq = ">=17" +Send2Trash = ">=1.8.0" +terminado = ">=0.8.3" +tornado = ">=6.1" +traitlets = ">=4.2.1" + +[package.extras] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +json-logging = ["json-logging"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium", "testpath"] + +[[package]] +name = "notebook-shim" +version = "0.2.3" +description = "A shim layer for notebook traits and config" +optional = false +python-versions = ">=3.7" +files = [ + {file = "notebook_shim-0.2.3-py3-none-any.whl", hash = "sha256:a83496a43341c1674b093bfcebf0fe8e74cbe7eda5fd2bbc56f8e39e1486c0c7"}, + {file = "notebook_shim-0.2.3.tar.gz", hash = "sha256:f69388ac283ae008cd506dda10d0288b09a017d822d5e8c7129a152cbd3ce7e9"}, +] + +[package.dependencies] +jupyter-server = ">=1.8,<3" + +[package.extras] +test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] + +[[package]] +name = "numba" +version = "0.58.1" +description = "compiling Python code using LLVM" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numba-0.58.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:07f2fa7e7144aa6f275f27260e73ce0d808d3c62b30cff8906ad1dec12d87bbe"}, + {file = "numba-0.58.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7bf1ddd4f7b9c2306de0384bf3854cac3edd7b4d8dffae2ec1b925e4c436233f"}, + {file = "numba-0.58.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bc2d904d0319d7a5857bd65062340bed627f5bfe9ae4a495aef342f072880d50"}, + {file = "numba-0.58.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4e79b6cc0d2bf064a955934a2e02bf676bc7995ab2db929dbbc62e4c16551be6"}, + {file = "numba-0.58.1-cp310-cp310-win_amd64.whl", hash = "sha256:81fe5b51532478149b5081311b0fd4206959174e660c372b94ed5364cfb37c82"}, + {file = "numba-0.58.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bcecd3fb9df36554b342140a4d77d938a549be635d64caf8bd9ef6c47a47f8aa"}, + {file = "numba-0.58.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1eaa744f518bbd60e1f7ccddfb8002b3d06bd865b94a5d7eac25028efe0e0ff"}, + {file = "numba-0.58.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bf68df9c307fb0aa81cacd33faccd6e419496fdc621e83f1efce35cdc5e79cac"}, + {file = "numba-0.58.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:55a01e1881120e86d54efdff1be08381886fe9f04fc3006af309c602a72bc44d"}, + {file = "numba-0.58.1-cp311-cp311-win_amd64.whl", hash = "sha256:811305d5dc40ae43c3ace5b192c670c358a89a4d2ae4f86d1665003798ea7a1a"}, + {file = "numba-0.58.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ea5bfcf7d641d351c6a80e8e1826eb4a145d619870016eeaf20bbd71ef5caa22"}, + {file = "numba-0.58.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e63d6aacaae1ba4ef3695f1c2122b30fa3d8ba039c8f517784668075856d79e2"}, + {file = "numba-0.58.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6fe7a9d8e3bd996fbe5eac0683227ccef26cba98dae6e5cee2c1894d4b9f16c1"}, + {file = "numba-0.58.1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:898af055b03f09d33a587e9425500e5be84fc90cd2f80b3fb71c6a4a17a7e354"}, + {file = "numba-0.58.1-cp38-cp38-win_amd64.whl", hash = "sha256:d3e2fe81fe9a59fcd99cc572002101119059d64d31eb6324995ee8b0f144a306"}, + {file = "numba-0.58.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c765aef472a9406a97ea9782116335ad4f9ef5c9f93fc05fd44aab0db486954"}, + {file = "numba-0.58.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e9356e943617f5e35a74bf56ff6e7cc83e6b1865d5e13cee535d79bf2cae954"}, + {file = "numba-0.58.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:240e7a1ae80eb6b14061dc91263b99dc8d6af9ea45d310751b780888097c1aaa"}, + {file = "numba-0.58.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:45698b995914003f890ad839cfc909eeb9c74921849c712a05405d1a79c50f68"}, + {file = "numba-0.58.1-cp39-cp39-win_amd64.whl", hash = "sha256:bd3dda77955be03ff366eebbfdb39919ce7c2620d86c906203bed92124989032"}, + {file = "numba-0.58.1.tar.gz", hash = "sha256:487ded0633efccd9ca3a46364b40006dbdaca0f95e99b8b83e778d1195ebcbaa"}, +] + +[package.dependencies] +llvmlite = "==0.41.*" +numpy = ">=1.22,<1.27" + +[[package]] +name = "numpy" +version = "1.26.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, + {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, + {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, + {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, + {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, + {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, + {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, + {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, + {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, + {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "omegaconf" +version = "2.3.0" +description = "A flexible configuration library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b"}, + {file = "omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7"}, +] + +[package.dependencies] +antlr4-python3-runtime = "==4.9.*" +PyYAML = ">=5.1.0" + +[[package]] +name = "overrides" +version = "7.4.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +files = [ + {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, + {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "paginate" +version = "0.5.6" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +files = [ + {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, +] + +[[package]] +name = "palettable" +version = "3.3.3" +description = "Color palettes for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "palettable-3.3.3-py2.py3-none-any.whl", hash = "sha256:74e9e7d7fe5a9be065e02397558ed1777b2df0b793a6f4ce1a5ee74f74fb0caa"}, + {file = "palettable-3.3.3.tar.gz", hash = "sha256:094dd7d9a5fc1cca4854773e5c1fc6a315b33bd5b3a8f47064928facaf0490a8"}, +] + +[[package]] +name = "pandas" +version = "2.1.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acf08a73b5022b479c1be155d4988b72f3020f308f7a87c527702c5f8966d34f"}, + {file = "pandas-2.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3cc4469ff0cf9aa3a005870cb49ab8969942b7156e0a46cc3f5abd6b11051dfb"}, + {file = "pandas-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35172bff95f598cc5866c047f43c7f4df2c893acd8e10e6653a4b792ed7f19bb"}, + {file = "pandas-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59dfe0e65a2f3988e940224e2a70932edc964df79f3356e5f2997c7d63e758b4"}, + {file = "pandas-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0296a66200dee556850d99b24c54c7dfa53a3264b1ca6f440e42bad424caea03"}, + {file = "pandas-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:465571472267a2d6e00657900afadbe6097c8e1dc43746917db4dfc862e8863e"}, + {file = "pandas-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04d4c58e1f112a74689da707be31cf689db086949c71828ef5da86727cfe3f82"}, + {file = "pandas-2.1.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fa2ad4ff196768ae63a33f8062e6838efed3a319cf938fdf8b95e956c813042"}, + {file = "pandas-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4441ac94a2a2613e3982e502ccec3bdedefe871e8cea54b8775992485c5660ef"}, + {file = "pandas-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ded6ff28abbf0ea7689f251754d3789e1edb0c4d0d91028f0b980598418a58"}, + {file = "pandas-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca5680368a5139d4920ae3dc993eb5106d49f814ff24018b64d8850a52c6ed2"}, + {file = "pandas-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:de21e12bf1511190fc1e9ebc067f14ca09fccfb189a813b38d63211d54832f5f"}, + {file = "pandas-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a5d53c725832e5f1645e7674989f4c106e4b7249c1d57549023ed5462d73b140"}, + {file = "pandas-2.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7cf4cf26042476e39394f1f86868d25b265ff787c9b2f0d367280f11afbdee6d"}, + {file = "pandas-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72c84ec1b1d8e5efcbff5312abe92bfb9d5b558f11e0cf077f5496c4f4a3c99e"}, + {file = "pandas-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f539e113739a3e0cc15176bf1231a553db0239bfa47a2c870283fd93ba4f683"}, + {file = "pandas-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc77309da3b55732059e484a1efc0897f6149183c522390772d3561f9bf96c00"}, + {file = "pandas-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:08637041279b8981a062899da0ef47828df52a1838204d2b3761fbd3e9fcb549"}, + {file = "pandas-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b99c4e51ef2ed98f69099c72c75ec904dd610eb41a32847c4fcbc1a975f2d2b8"}, + {file = "pandas-2.1.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7ea8ae8004de0381a2376662c0505bb0a4f679f4c61fbfd122aa3d1b0e5f09d"}, + {file = "pandas-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcd76d67ca2d48f56e2db45833cf9d58f548f97f61eecd3fdc74268417632b8a"}, + {file = "pandas-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1329dbe93a880a3d7893149979caa82d6ba64a25e471682637f846d9dbc10dd2"}, + {file = "pandas-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:321ecdb117bf0f16c339cc6d5c9a06063854f12d4d9bc422a84bb2ed3207380a"}, + {file = "pandas-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:11a771450f36cebf2a4c9dbd3a19dfa8c46c4b905a3ea09dc8e556626060fe71"}, + {file = "pandas-2.1.3.tar.gz", hash = "sha256:22929f84bca106921917eb73c1521317ddd0a4c71b395bcf767a106e3494209f"}, +] + +[package.dependencies] +numpy = {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""} +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] +aws = ["s3fs (>=2022.05.0)"] +clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] +compression = ["zstandard (>=0.17.0)"] +computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2022.05.0)"] +gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] +hdf5 = ["tables (>=3.7.0)"] +html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] +mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] +spss = ["pyreadstat (>=1.1.5)"] +sql-other = ["SQLAlchemy (>=1.4.36)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.8.0)"] + +[[package]] +name = "pandocfilters" +version = "1.5.0" +description = "Utilities for writing pandoc filters in python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, +] + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pastel" +version = "0.2.1" +description = "Bring colors to your terminal." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, + {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, +] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "patsy" +version = "0.5.4" +description = "A Python package for describing statistical models and for building design matrices." +optional = false +python-versions = "*" +files = [ + {file = "patsy-0.5.4-py2.py3-none-any.whl", hash = "sha256:0486413077a527db51ddea8fa94a5234d0feb17a4f4dc01b59b6086c58a70f80"}, + {file = "patsy-0.5.4.tar.gz", hash = "sha256:7dabc527597308de0e8f188faa20af7e06a89bdaa306756dfc7783693ea16af4"}, +] + +[package.dependencies] +numpy = ">=1.4" +six = "*" + +[package.extras] +test = ["pytest", "pytest-cov", "scipy"] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pillow" +version = "10.1.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Pillow-10.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106"}, + {file = "Pillow-10.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818"}, + {file = "Pillow-10.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"}, + {file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"}, + {file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212"}, + {file = "Pillow-10.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2"}, + {file = "Pillow-10.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"}, + {file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "platformdirs" +version = "4.1.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "plumbum" +version = "1.8.2" +description = "Plumbum: shell combinators library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "plumbum-1.8.2-py3-none-any.whl", hash = "sha256:3ad9e5f56c6ec98f6f7988f7ea8b52159662ea9e915868d369dbccbfca0e367e"}, + {file = "plumbum-1.8.2.tar.gz", hash = "sha256:9e6dc032f4af952665f32f3206567bc23b7858b1413611afe603a3f8ad9bfd75"}, +] + +[package.dependencies] +pywin32 = {version = "*", markers = "platform_system == \"Windows\" and platform_python_implementation != \"PyPy\""} + +[package.extras] +dev = ["paramiko", "psutil", "pytest (>=6.0)", "pytest-cov", "pytest-mock", "pytest-timeout"] +docs = ["sphinx (>=4.0.0)", "sphinx-rtd-theme (>=1.0.0)"] +ssh = ["paramiko"] + +[[package]] +name = "poethepoet" +version = "0.24.4" +description = "A task runner that works well with poetry." +optional = false +python-versions = ">=3.8" +files = [ + {file = "poethepoet-0.24.4-py3-none-any.whl", hash = "sha256:fb4ea35d7f40fe2081ea917d2e4102e2310fda2cde78974050ca83896e229075"}, + {file = "poethepoet-0.24.4.tar.gz", hash = "sha256:ff4220843a87c888cbcb5312c8905214701d0af60ac7271795baa8369b428fef"}, +] + +[package.dependencies] +pastel = ">=0.2.1,<0.3.0" +tomli = ">=1.2.2" + +[package.extras] +poetry-plugin = ["poetry (>=1.0,<2.0)"] + +[[package]] +name = "portalocker" +version = "2.8.2" +description = "Wraps the portalocker recipe for easy usage" +optional = false +python-versions = ">=3.8" +files = [ + {file = "portalocker-2.8.2-py3-none-any.whl", hash = "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e"}, + {file = "portalocker-2.8.2.tar.gz", hash = "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33"}, +] + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] + +[[package]] +name = "prometheus-client" +version = "0.19.0" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.8" +files = [ + {file = "prometheus_client-0.19.0-py3-none-any.whl", hash = "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92"}, + {file = "prometheus_client-0.19.0.tar.gz", hash = "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1"}, +] + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.41" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.41-py3-none-any.whl", hash = "sha256:f36fe301fafb7470e86aaf90f036eef600a3210be4decf461a5b1ca8403d3cb2"}, + {file = "prompt_toolkit-3.0.41.tar.gz", hash = "sha256:941367d97fc815548822aa26c2a269fdc4eb21e9ec05fc5d447cf09bad5d75f0"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "protobuf" +version = "4.21.12" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "protobuf-4.21.12-cp310-abi3-win32.whl", hash = "sha256:b135410244ebe777db80298297a97fbb4c862c881b4403b71bac9d4107d61fd1"}, + {file = "protobuf-4.21.12-cp310-abi3-win_amd64.whl", hash = "sha256:89f9149e4a0169cddfc44c74f230d7743002e3aa0b9472d8c28f0388102fc4c2"}, + {file = "protobuf-4.21.12-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:299ea899484ee6f44604deb71f424234f654606b983cb496ea2a53e3c63ab791"}, + {file = "protobuf-4.21.12-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:d1736130bce8cf131ac7957fa26880ca19227d4ad68b4888b3be0dea1f95df97"}, + {file = "protobuf-4.21.12-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:78a28c9fa223998472886c77042e9b9afb6fe4242bd2a2a5aced88e3f4422aa7"}, + {file = "protobuf-4.21.12-cp37-cp37m-win32.whl", hash = "sha256:3d164928ff0727d97022957c2b849250ca0e64777ee31efd7d6de2e07c494717"}, + {file = "protobuf-4.21.12-cp37-cp37m-win_amd64.whl", hash = "sha256:f45460f9ee70a0ec1b6694c6e4e348ad2019275680bd68a1d9314b8c7e01e574"}, + {file = "protobuf-4.21.12-cp38-cp38-win32.whl", hash = "sha256:6ab80df09e3208f742c98443b6166bcb70d65f52cfeb67357d52032ea1ae9bec"}, + {file = "protobuf-4.21.12-cp38-cp38-win_amd64.whl", hash = "sha256:1f22ac0ca65bb70a876060d96d914dae09ac98d114294f77584b0d2644fa9c30"}, + {file = "protobuf-4.21.12-cp39-cp39-win32.whl", hash = "sha256:27f4d15021da6d2b706ddc3860fac0a5ddaba34ab679dc182b60a8bb4e1121cc"}, + {file = "protobuf-4.21.12-cp39-cp39-win_amd64.whl", hash = "sha256:237216c3326d46808a9f7c26fd1bd4b20015fb6867dc5d263a493ef9a539293b"}, + {file = "protobuf-4.21.12-py2.py3-none-any.whl", hash = "sha256:a53fd3f03e578553623272dc46ac2f189de23862e68565e83dde203d41b76fc5"}, + {file = "protobuf-4.21.12-py3-none-any.whl", hash = "sha256:b98d0148f84e3a3c569e19f52103ca1feacdac0d2df8d6533cf983d1fda28462"}, + {file = "protobuf-4.21.12.tar.gz", hash = "sha256:7cd532c4566d0e6feafecc1059d04c7915aec8e182d1cf7adee8b24ef1e2e6ab"}, +] + +[[package]] +name = "protoc-gen-swagger" +version = "0.1.0" +description = "A python package for swagger annotation proto files." +optional = false +python-versions = "*" +files = [ + {file = "protoc_gen_swagger-0.1.0-py2.py3-none-any.whl", hash = "sha256:cdc043da538865f055a7f22b304a35085cef269dc33e2f3408b12d397e8d8b4b"}, +] + +[package.dependencies] +protobuf = ">=3.0.0" + +[[package]] +name = "psutil" +version = "5.9.6" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, + {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, + {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, + {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, + {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, + {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, + {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, + {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, + {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, + {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pyarrow" +version = "14.0.1" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, + {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:906b0dc25f2be12e95975722f1e60e162437023f490dbd80d0deb7375baf3171"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:78d4a77a46a7de9388b653af1c4ce539350726cd9af62e0831e4f2bd0c95a2f4"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93"}, + {file = "pyarrow-14.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:32542164d905002c42dff896efdac79b3bdd7291b1b74aa292fac8450d0e4dcd"}, + {file = "pyarrow-14.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c7331b4ed3401b7ee56f22c980608cf273f0380f77d0f73dd3c185f78f5a6220"}, + {file = "pyarrow-14.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e8b49b88da8633d6cac0e1b5a690311b6758d6f5d7c2be71acb0f1e14cd61"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c889851ca33f992ea916b48b8540735055201b177cb0dcf0596a495a667b00"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30d8494870d9916bb53b2a4384948491444741cb9a38253c590e21f836b01222"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"}, + {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"}, + {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"}, + {file = "pyarrow-14.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a8eeef015ae69d104c4c3117a6011e7e3ecd1abec79dc87fd2fac6e442f666ee"}, + {file = "pyarrow-14.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c76807540989fe8fcd02285dd15e4f2a3da0b09d27781abec3adc265ddbeba1"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:450e4605e3c20e558485f9161a79280a61c55efe585d51513c014de9ae8d393f"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323cbe60210173ffd7db78bfd50b80bdd792c4c9daca8843ef3cd70b186649db"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0140c7e2b740e08c5a459439d87acd26b747fc408bde0a8806096ee0baaa0c15"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:e592e482edd9f1ab32f18cd6a716c45b2c0f2403dc2af782f4e9674952e6dd27"}, + {file = "pyarrow-14.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d264ad13605b61959f2ae7c1d25b1a5b8505b112715c961418c8396433f213ad"}, + {file = "pyarrow-14.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01e44de9749cddc486169cb632f3c99962318e9dacac7778315a110f4bf8a450"}, + {file = "pyarrow-14.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0351fecf0e26e152542bc164c22ea2a8e8c682726fce160ce4d459ea802d69c"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c1f6110c386464fd2e5e4ea3624466055bbe681ff185fd6c9daa98f30a3f9a"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11e045dfa09855b6d3e7705a37c42e2dc2c71d608fab34d3c23df2e02df9aec3"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:097828b55321897db0e1dbfc606e3ff8101ae5725673498cbfa7754ee0da80e4"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1daab52050a1c48506c029e6fa0944a7b2436334d7e44221c16f6f1b2cc9c510"}, + {file = "pyarrow-14.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f6d5faf4f1b0d5a7f97be987cf9e9f8cd39902611e818fe134588ee99bf0283"}, + {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, +] + +[package.dependencies] +numpy = ">=1.16.6" + +[[package]] +name = "pyasn1" +version = "0.5.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.6.0" + +[[package]] +name = "pybedtools" +version = "0.9.1" +description = "Wrapper around BEDTools for bioinformatics work" +optional = false +python-versions = "*" +files = [] +develop = false + +[package.dependencies] +numpy = "*" +pysam = "*" +six = "*" + +[package.source] +type = "git" +url = "https://github.com/cameronraysmith/pybedtools.git" +reference = "cpp17-no-register" +resolved_reference = "af757d539e1020ab4b246e8c0de34139c84b7277" + +[[package]] +name = "pybigwig" +version = "0.3.22" +description = "A package for accessing bigWig files using libBigWig" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyBigWig-0.3.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55031f67de6b117d49ba191738ea9707239bdacbd623a046e03917913257ac29"}, + {file = "pyBigWig-0.3.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc8eb9f312b7ba99f90b490bcc0341f317549c2ef0bb65cd94c20241cbe67981"}, + {file = "pyBigWig-0.3.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647ae8b613d6dfc691cadd61703f81fdf3c685177c7e2e3e730c4f9c0d6f93c6"}, + {file = "pyBigWig-0.3.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b653a085d829d02154a68c438df2b5f2b0e560f06dab55cd191e7b3ca7982f2"}, + {file = "pyBigWig-0.3.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85d740f7e9148d21c1150c0c822600751573a62ccc7ea037495e915434ebefa7"}, + {file = "pyBigWig-0.3.22.tar.gz", hash = "sha256:5d4426f754bd7b7f6dc21d6c3f93b58a96a65b6eb2e578ae03b31a71272d2243"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pyfaidx" +version = "0.7.2.2" +description = "pyfaidx: efficient pythonic random access to fasta subsequences" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyfaidx-0.7.2.2-py3-none-any.whl", hash = "sha256:4e689bc09f3c5de1d2a1099d059b3b9914629c1c5c3ad08b49ff05af33392e0e"}, + {file = "pyfaidx-0.7.2.2.tar.gz", hash = "sha256:3b7693c052c82691000fe4a92475db82ffc3b5a721a12b10dfbc87119c4b4d30"}, +] + +[package.dependencies] +importlib-metadata = "*" +setuptools = "*" +six = "*" + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pymdown-extensions" +version = "10.5" +description = "Extension pack for Python Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pymdown_extensions-10.5-py3-none-any.whl", hash = "sha256:1f0ca8bb5beff091315f793ee17683bc1390731f6ac4c5eb01e27464b80fe879"}, + {file = "pymdown_extensions-10.5.tar.gz", hash = "sha256:1b60f1e462adbec5a1ed79dac91f666c9c0d241fa294de1989f29d20096cfd0b"}, +] + +[package.dependencies] +markdown = ">=3.5" +pyyaml = "*" + +[package.extras] +extra = ["pygments (>=2.12)"] + +[[package]] +name = "pyparsing" +version = "3.1.1" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, + {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyperclip" +version = "1.8.2" +description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)" +optional = false +python-versions = "*" +files = [ + {file = "pyperclip-1.8.2.tar.gz", hash = "sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57"}, +] + +[[package]] +name = "pyright" +version = "1.1.339" +description = "Command line wrapper for pyright" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyright-1.1.339-py3-none-any.whl", hash = "sha256:662f3df170fdeda76fd21b158ab20c518dad99c2f14b0a7f84c2bfd60d5a8d2a"}, + {file = "pyright-1.1.339.tar.gz", hash = "sha256:581ce4e281575814380dd67a331e75c0ccdca31eb848005ee1ae46e7bfa8b4f9"}, +] + +[package.dependencies] +nodeenv = ">=1.6.0" + +[package.extras] +all = ["twine (>=3.4.1)"] +dev = ["twine (>=3.4.1)"] + +[[package]] +name = "pysam" +version = "0.22.0" +description = "Package for reading, manipulating, and writing genomic data" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pysam-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:116278a7caa122b2b8acc56d13b3599be9b1236f27a12488bffc306858ff0d57"}, + {file = "pysam-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:da2f1af461e44d5c2c7210d458ee216f8ab98486adf1eea6c88eea5c1058a62f"}, + {file = "pysam-0.22.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:021fbf6874ad998aba19be33828ad9d23d52273643793488ac4b12917d714c68"}, + {file = "pysam-0.22.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:26199e403855b9da45341d25682e0df27013687d9cb1b4fd328136fbd506292b"}, + {file = "pysam-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9bfebf89b1dc2ff6f88d64b5f05d8630deb89562b22764f8ee7f6fa9e677bb91"}, + {file = "pysam-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:942dd4a2263996bc2daa21200886e9fde027f32ce8820e7832b20bbdb97eb393"}, + {file = "pysam-0.22.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:83776ba587eb9575a209efed1cedb49d69c5fa6cc520dd722a0a09d0bb4e9b87"}, + {file = "pysam-0.22.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:4779a99d1ece17a98724d87a5c10c455cf212b3baa3a8399d3d072e4d0ae5ba0"}, + {file = "pysam-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bb61bf30c15f6767403b423b04c293e96fd7635457b506c849aafcf48fc13242"}, + {file = "pysam-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32042e0bf3c5dd8554769442c2e1f7b6ada902c33ee44c616d0403e7acd12ee3"}, + {file = "pysam-0.22.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f23b2f47528b94e8abe3b700103fb1214c623ae1c1b8125ecf22d4d33d76720f"}, + {file = "pysam-0.22.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:cfd2b858c7405cf38c730cba779ddf9f8cff28b4842c6440e64781650dcb9a52"}, + {file = "pysam-0.22.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:87dbf72f3e61fd6d3f92b1b683d9a9e797b6cc213ffcd971899f24a16f9f6e8f"}, + {file = "pysam-0.22.0-cp36-cp36m-manylinux_2_28_aarch64.whl", hash = "sha256:9af1cd3d07fd4c84e9b3d8a46c65b25f95278185bc6d44c4a48951679d5189ac"}, + {file = "pysam-0.22.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:f73d7923c89618fb7024875ed8eddc5fb0c911f430e3495de482fcee48143e45"}, + {file = "pysam-0.22.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6ffe5c98725fea54b1b2aa8f14a60ee9ceaed32c04460d1b861a62603dcd7153"}, + {file = "pysam-0.22.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:34f5653a82138d28a8e86205785a0398eb6c89f776b4145ff42783168757323c"}, + {file = "pysam-0.22.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:9d3ebb1515c2fd9b11823469e5b211ca3cc89e976c00c284a2190804c9f11726"}, + {file = "pysam-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b8e18520e7a79bad91b44cf9199c7fa42cec5c3020024d7ef9a7161d0099bf8"}, + {file = "pysam-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a98d1ddca64943f3ead507721e52466aea2f7303e549d4960a2eb1d9fff8e3d7"}, + {file = "pysam-0.22.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:6d6aa2346b11ad35e88c65eb0067321318c25c7f35f75c98061173eabefcf8b0"}, + {file = "pysam-0.22.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4f6657a09c81333adb5545cf9a20d4c2ca1686acf8609ad58f13b3ec1b52a9cf"}, + {file = "pysam-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:93eb12be3822fb387e5438811f62a0f5e56c1edd5c830aaa316fb50d3d0bc181"}, + {file = "pysam-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9ba53f9b0b2c5cb57908855cdb35a31b34c5211d215aa01bdb3e9b3d05c659cc"}, + {file = "pysam-0.22.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:1b84f99aa04e30bd1cc35c01bd41c2b7680131f56c71a740805aff8086f24b56"}, + {file = "pysam-0.22.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:481e4efbfbc07b6b92194a005cb9a98006c8378024f41c7b66c58b14f6e77f9c"}, + {file = "pysam-0.22.0.tar.gz", hash = "sha256:ab7a46973cf0ab8c6ac327f4c3fb67698d7ccbeef8631a716898c6ba01ef3e45"}, +] + +[[package]] +name = "pytest" +version = "7.4.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.21.1" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] + +[[package]] +name = "python-slugify" +version = "8.0.1" +description = "A Python slugify application that also handles Unicode" +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-slugify-8.0.1.tar.gz", hash = "sha256:ce0d46ddb668b3be82f4ed5e503dbc33dd815d83e2eb6824211310d3fb172a27"}, + {file = "python_slugify-8.0.1-py2.py3-none-any.whl", hash = "sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395"}, +] + +[package.dependencies] +text-unidecode = ">=1.3" + +[package.extras] +unidecode = ["Unidecode (>=1.1.1)"] + +[[package]] +name = "pytimeparse" +version = "1.1.8" +description = "Time expression parser" +optional = false +python-versions = "*" +files = [ + {file = "pytimeparse-1.1.8-py2.py3-none-any.whl", hash = "sha256:04b7be6cc8bd9f5647a6325444926c3ac34ee6bc7e69da4367ba282f076036bd"}, + {file = "pytimeparse-1.1.8.tar.gz", hash = "sha256:e86136477be924d7e670646a98561957e8ca7308d44841e21f5ddea757556a0a"}, +] + +[[package]] +name = "pytz" +version = "2023.3.post1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, +] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, +] + +[[package]] +name = "pywinpty" +version = "2.0.12" +description = "Pseudo terminal support for Windows from Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, + {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, + {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, + {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, + {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, + {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] + +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "pyzmq" +version = "25.1.2" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, + {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, + {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, + {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, + {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, + {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, + {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, + {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, + {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, + {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, + {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, + {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, + {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "qnorm" +version = "0.8.1" +description = "Quantile normalization" +optional = false +python-versions = ">3.6" +files = [ + {file = "qnorm-0.8.1-py3-none-any.whl", hash = "sha256:9d6ce4e82444155922baf06aa89f9f939b54f53844e340bf2c6d9e7ff8821c41"}, + {file = "qnorm-0.8.1.tar.gz", hash = "sha256:61b2f3ef09a9c552a4f3b83dc438cb13f191fa190164361a3a508c4777eed3c7"}, +] + +[package.dependencies] +numba = "*" +numpy = "*" + +[[package]] +name = "referencing" +version = "0.31.1" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.31.1-py3-none-any.whl", hash = "sha256:c19c4d006f1757e3dd75c4f784d38f8698d87b649c54f9ace14e5e8c9667c01d"}, + {file = "referencing-0.31.1.tar.gz", hash = "sha256:81a1471c68c9d5e3831c30ad1dd9815c45b558e596653db751a2bfdd17b3b9ec"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "regex" +version = "2023.10.3" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, + {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, + {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, + {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, + {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, + {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, + {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, + {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, + {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, + {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, + {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, + {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, + {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, + {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, + {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, + {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, + {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, + {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, + {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, + {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, + {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, + {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, + {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, + {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "1.3.1" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] + +[[package]] +name = "rich" +version = "13.6.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, + {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rich-click" +version = "1.7.2" +description = "Format click help output nicely with rich" +optional = false +python-versions = ">=3.7" +files = [ + {file = "rich-click-1.7.2.tar.gz", hash = "sha256:22f93439a3d65f4a04e07cd584f4d01d132d96899766af92ed287618156abbe2"}, + {file = "rich_click-1.7.2-py3-none-any.whl", hash = "sha256:a42bcdcb8696c4ca7a3b1a39e1aba3d2cb64ad00690b4c022fdcb2cbccebc3fc"}, +] + +[package.dependencies] +click = ">=7" +rich = ">=10.7.0" +typing-extensions = "*" + +[package.extras] +dev = ["flake8", "flake8-docstrings", "mypy", "packaging", "pre-commit", "pytest", "pytest-cov", "types-setuptools"] + +[[package]] +name = "rpds-py" +version = "0.13.2" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.13.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:1ceebd0ae4f3e9b2b6b553b51971921853ae4eebf3f54086be0565d59291e53d"}, + {file = "rpds_py-0.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46e1ed994a0920f350a4547a38471217eb86f57377e9314fbaaa329b71b7dfe3"}, + {file = "rpds_py-0.13.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee353bb51f648924926ed05e0122b6a0b1ae709396a80eb583449d5d477fcdf7"}, + {file = "rpds_py-0.13.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:530190eb0cd778363bbb7596612ded0bb9fef662daa98e9d92a0419ab27ae914"}, + {file = "rpds_py-0.13.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d311e44dd16d2434d5506d57ef4d7036544fc3c25c14b6992ef41f541b10fb"}, + {file = "rpds_py-0.13.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e72f750048b32d39e87fc85c225c50b2a6715034848dbb196bf3348aa761fa1"}, + {file = "rpds_py-0.13.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db09b98c7540df69d4b47218da3fbd7cb466db0fb932e971c321f1c76f155266"}, + {file = "rpds_py-0.13.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2ac26f50736324beb0282c819668328d53fc38543fa61eeea2c32ea8ea6eab8d"}, + {file = "rpds_py-0.13.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:12ecf89bd54734c3c2c79898ae2021dca42750c7bcfb67f8fb3315453738ac8f"}, + {file = "rpds_py-0.13.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a44c8440183b43167fd1a0819e8356692bf5db1ad14ce140dbd40a1485f2dea"}, + {file = "rpds_py-0.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bcef4f2d3dc603150421de85c916da19471f24d838c3c62a4f04c1eb511642c1"}, + {file = "rpds_py-0.13.2-cp310-none-win32.whl", hash = "sha256:ee6faebb265e28920a6f23a7d4c362414b3f4bb30607141d718b991669e49ddc"}, + {file = "rpds_py-0.13.2-cp310-none-win_amd64.whl", hash = "sha256:ac96d67b37f28e4b6ecf507c3405f52a40658c0a806dffde624a8fcb0314d5fd"}, + {file = "rpds_py-0.13.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:b5f6328e8e2ae8238fc767703ab7b95785521c42bb2b8790984e3477d7fa71ad"}, + {file = "rpds_py-0.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:729408136ef8d45a28ee9a7411917c9e3459cf266c7e23c2f7d4bb8ef9e0da42"}, + {file = "rpds_py-0.13.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65cfed9c807c27dee76407e8bb29e6f4e391e436774bcc769a037ff25ad8646e"}, + {file = "rpds_py-0.13.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aefbdc934115d2f9278f153952003ac52cd2650e7313750390b334518c589568"}, + {file = "rpds_py-0.13.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48db29bd47814671afdd76c7652aefacc25cf96aad6daefa82d738ee87461e2"}, + {file = "rpds_py-0.13.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c55d7f2d817183d43220738270efd3ce4e7a7b7cbdaefa6d551ed3d6ed89190"}, + {file = "rpds_py-0.13.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6aadae3042f8e6db3376d9e91f194c606c9a45273c170621d46128f35aef7cd0"}, + {file = "rpds_py-0.13.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5feae2f9aa7270e2c071f488fab256d768e88e01b958f123a690f1cc3061a09c"}, + {file = "rpds_py-0.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51967a67ea0d7b9b5cd86036878e2d82c0b6183616961c26d825b8c994d4f2c8"}, + {file = "rpds_py-0.13.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d0c10d803549427f427085ed7aebc39832f6e818a011dcd8785e9c6a1ba9b3e"}, + {file = "rpds_py-0.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:603d5868f7419081d616dab7ac3cfa285296735e7350f7b1e4f548f6f953ee7d"}, + {file = "rpds_py-0.13.2-cp311-none-win32.whl", hash = "sha256:b8996ffb60c69f677245f5abdbcc623e9442bcc91ed81b6cd6187129ad1fa3e7"}, + {file = "rpds_py-0.13.2-cp311-none-win_amd64.whl", hash = "sha256:5379e49d7e80dca9811b36894493d1c1ecb4c57de05c36f5d0dd09982af20211"}, + {file = "rpds_py-0.13.2-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8a776a29b77fe0cc28fedfd87277b0d0f7aa930174b7e504d764e0b43a05f381"}, + {file = "rpds_py-0.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2a1472956c5bcc49fb0252b965239bffe801acc9394f8b7c1014ae9258e4572b"}, + {file = "rpds_py-0.13.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f252dfb4852a527987a9156cbcae3022a30f86c9d26f4f17b8c967d7580d65d2"}, + {file = "rpds_py-0.13.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f0d320e70b6b2300ff6029e234e79fe44e9dbbfc7b98597ba28e054bd6606a57"}, + {file = "rpds_py-0.13.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ade2ccb937060c299ab0dfb2dea3d2ddf7e098ed63ee3d651ebfc2c8d1e8632a"}, + {file = "rpds_py-0.13.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9d121be0217787a7d59a5c6195b0842d3f701007333426e5154bf72346aa658"}, + {file = "rpds_py-0.13.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fa6bd071ec6d90f6e7baa66ae25820d57a8ab1b0a3c6d3edf1834d4b26fafa2"}, + {file = "rpds_py-0.13.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c918621ee0a3d1fe61c313f2489464f2ae3d13633e60f520a8002a5e910982ee"}, + {file = "rpds_py-0.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:25b28b3d33ec0a78e944aaaed7e5e2a94ac811bcd68b557ca48a0c30f87497d2"}, + {file = "rpds_py-0.13.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:31e220a040b89a01505128c2f8a59ee74732f666439a03e65ccbf3824cdddae7"}, + {file = "rpds_py-0.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:15253fff410873ebf3cfba1cc686a37711efcd9b8cb30ea21bb14a973e393f60"}, + {file = "rpds_py-0.13.2-cp312-none-win32.whl", hash = "sha256:b981a370f8f41c4024c170b42fbe9e691ae2dbc19d1d99151a69e2c84a0d194d"}, + {file = "rpds_py-0.13.2-cp312-none-win_amd64.whl", hash = "sha256:4c4e314d36d4f31236a545696a480aa04ea170a0b021e9a59ab1ed94d4c3ef27"}, + {file = "rpds_py-0.13.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:80e5acb81cb49fd9f2d5c08f8b74ffff14ee73b10ca88297ab4619e946bcb1e1"}, + {file = "rpds_py-0.13.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:efe093acc43e869348f6f2224df7f452eab63a2c60a6c6cd6b50fd35c4e075ba"}, + {file = "rpds_py-0.13.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c2a61c0e4811012b0ba9f6cdcb4437865df5d29eab5d6018ba13cee1c3064a0"}, + {file = "rpds_py-0.13.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:751758d9dd04d548ec679224cc00e3591f5ebf1ff159ed0d4aba6a0746352452"}, + {file = "rpds_py-0.13.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ba8858933f0c1a979781272a5f65646fca8c18c93c99c6ddb5513ad96fa54b1"}, + {file = "rpds_py-0.13.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bfdfbe6a36bc3059fff845d64c42f2644cf875c65f5005db54f90cdfdf1df815"}, + {file = "rpds_py-0.13.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0379c1935c44053c98826bc99ac95f3a5355675a297ac9ce0dfad0ce2d50ca"}, + {file = "rpds_py-0.13.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5593855b5b2b73dd8413c3fdfa5d95b99d657658f947ba2c4318591e745d083"}, + {file = "rpds_py-0.13.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2a7bef6977043673750a88da064fd513f89505111014b4e00fbdd13329cd4e9a"}, + {file = "rpds_py-0.13.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:3ab96754d23372009638a402a1ed12a27711598dd49d8316a22597141962fe66"}, + {file = "rpds_py-0.13.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e06cfea0ece444571d24c18ed465bc93afb8c8d8d74422eb7026662f3d3f779b"}, + {file = "rpds_py-0.13.2-cp38-none-win32.whl", hash = "sha256:5493569f861fb7b05af6d048d00d773c6162415ae521b7010197c98810a14cab"}, + {file = "rpds_py-0.13.2-cp38-none-win_amd64.whl", hash = "sha256:b07501b720cf060c5856f7b5626e75b8e353b5f98b9b354a21eb4bfa47e421b1"}, + {file = "rpds_py-0.13.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:881df98f0a8404d32b6de0fd33e91c1b90ed1516a80d4d6dc69d414b8850474c"}, + {file = "rpds_py-0.13.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d79c159adea0f1f4617f54aa156568ac69968f9ef4d1e5fefffc0a180830308e"}, + {file = "rpds_py-0.13.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38d4f822ee2f338febcc85aaa2547eb5ba31ba6ff68d10b8ec988929d23bb6b4"}, + {file = "rpds_py-0.13.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5d75d6d220d55cdced2f32cc22f599475dbe881229aeddba6c79c2e9df35a2b3"}, + {file = "rpds_py-0.13.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d97e9ae94fb96df1ee3cb09ca376c34e8a122f36927230f4c8a97f469994bff"}, + {file = "rpds_py-0.13.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67a429520e97621a763cf9b3ba27574779c4e96e49a27ff8a1aa99ee70beb28a"}, + {file = "rpds_py-0.13.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:188435794405c7f0573311747c85a96b63c954a5f2111b1df8018979eca0f2f0"}, + {file = "rpds_py-0.13.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:38f9bf2ad754b4a45b8210a6c732fe876b8a14e14d5992a8c4b7c1ef78740f53"}, + {file = "rpds_py-0.13.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a6ba2cb7d676e9415b9e9ac7e2aae401dc1b1e666943d1f7bc66223d3d73467b"}, + {file = "rpds_py-0.13.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:eaffbd8814bb1b5dc3ea156a4c5928081ba50419f9175f4fc95269e040eff8f0"}, + {file = "rpds_py-0.13.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a4c1058cdae6237d97af272b326e5f78ee7ee3bbffa6b24b09db4d828810468"}, + {file = "rpds_py-0.13.2-cp39-none-win32.whl", hash = "sha256:b5267feb19070bef34b8dea27e2b504ebd9d31748e3ecacb3a4101da6fcb255c"}, + {file = "rpds_py-0.13.2-cp39-none-win_amd64.whl", hash = "sha256:ddf23960cb42b69bce13045d5bc66f18c7d53774c66c13f24cf1b9c144ba3141"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:97163a1ab265a1073a6372eca9f4eeb9f8c6327457a0b22ddfc4a17dcd613e74"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:25ea41635d22b2eb6326f58e608550e55d01df51b8a580ea7e75396bafbb28e9"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d59d4d451ba77f08cb4cd9268dec07be5bc65f73666302dbb5061989b17198"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7c564c58cf8f248fe859a4f0fe501b050663f3d7fbc342172f259124fb59933"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61dbc1e01dc0c5875da2f7ae36d6e918dc1b8d2ce04e871793976594aad8a57a"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdb82eb60d31b0c033a8e8ee9f3fc7dfbaa042211131c29da29aea8531b4f18f"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d204957169f0b3511fb95395a9da7d4490fb361763a9f8b32b345a7fe119cb45"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c45008ca79bad237cbc03c72bc5205e8c6f66403773929b1b50f7d84ef9e4d07"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:79bf58c08f0756adba691d480b5a20e4ad23f33e1ae121584cf3a21717c36dfa"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e86593bf8637659e6a6ed58854b6c87ec4e9e45ee8a4adfd936831cef55c2d21"}, + {file = "rpds_py-0.13.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d329896c40d9e1e5c7715c98529e4a188a1f2df51212fd65102b32465612b5dc"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4a5375c5fff13f209527cd886dc75394f040c7d1ecad0a2cb0627f13ebe78a12"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:06d218e4464d31301e943b65b2c6919318ea6f69703a351961e1baaf60347276"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1f41d32a2ddc5a94df4b829b395916a4b7f103350fa76ba6de625fcb9e773ac"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6bc568b05e02cd612be53900c88aaa55012e744930ba2eeb56279db4c6676eb3"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d94d78418203904730585efa71002286ac4c8ac0689d0eb61e3c465f9e608ff"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bed0252c85e21cf73d2d033643c945b460d6a02fc4a7d644e3b2d6f5f2956c64"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:244e173bb6d8f3b2f0c4d7370a1aa341f35da3e57ffd1798e5b2917b91731fd3"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7f55cd9cf1564b7b03f238e4c017ca4794c05b01a783e9291065cb2858d86ce4"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:f03a1b3a4c03e3e0161642ac5367f08479ab29972ea0ffcd4fa18f729cd2be0a"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:f5f4424cb87a20b016bfdc157ff48757b89d2cc426256961643d443c6c277007"}, + {file = "rpds_py-0.13.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c82bbf7e03748417c3a88c1b0b291288ce3e4887a795a3addaa7a1cfd9e7153e"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:c0095b8aa3e432e32d372e9a7737e65b58d5ed23b9620fea7cb81f17672f1fa1"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4c2d26aa03d877c9730bf005621c92da263523a1e99247590abbbe252ccb7824"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96f2975fb14f39c5fe75203f33dd3010fe37d1c4e33177feef1107b5ced750e3"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4dcc5ee1d0275cb78d443fdebd0241e58772a354a6d518b1d7af1580bbd2c4e8"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61d42d2b08430854485135504f672c14d4fc644dd243a9c17e7c4e0faf5ed07e"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d3a61e928feddc458a55110f42f626a2a20bea942ccedb6fb4cee70b4830ed41"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7de12b69d95072394998c622cfd7e8cea8f560db5fca6a62a148f902a1029f8b"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87a90f5545fd61f6964e65eebde4dc3fa8660bb7d87adb01d4cf17e0a2b484ad"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:9c95a1a290f9acf7a8f2ebbdd183e99215d491beea52d61aa2a7a7d2c618ddc6"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:35f53c76a712e323c779ca39b9a81b13f219a8e3bc15f106ed1e1462d56fcfe9"}, + {file = "rpds_py-0.13.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:96fb0899bb2ab353f42e5374c8f0789f54e0a94ef2f02b9ac7149c56622eaf31"}, + {file = "rpds_py-0.13.2.tar.gz", hash = "sha256:f8eae66a1304de7368932b42d801c67969fd090ddb1a7a24f27b435ed4bed68f"}, +] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "ruff" +version = "0.1.5" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.5-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:32d47fc69261c21a4c48916f16ca272bf2f273eb635d91c65d5cd548bf1f3d96"}, + {file = "ruff-0.1.5-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:171276c1df6c07fa0597fb946139ced1c2978f4f0b8254f201281729981f3c17"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ef33cd0bb7316ca65649fc748acc1406dfa4da96a3d0cde6d52f2e866c7b39"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2c205827b3f8c13b4a432e9585750b93fd907986fe1aec62b2a02cf4401eee6"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb408e3a2ad8f6881d0f2e7ad70cddb3ed9f200eb3517a91a245bbe27101d379"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f20dc5e5905ddb407060ca27267c7174f532375c08076d1a953cf7bb016f5a24"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aafb9d2b671ed934998e881e2c0f5845a4295e84e719359c71c39a5363cccc91"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4894dddb476597a0ba4473d72a23151b8b3b0b5f958f2cf4d3f1c572cdb7af7"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00a7ec893f665ed60008c70fe9eeb58d210e6b4d83ec6654a9904871f982a2a"}, + {file = "ruff-0.1.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8c11206b47f283cbda399a654fd0178d7a389e631f19f51da15cbe631480c5b"}, + {file = "ruff-0.1.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fa29e67b3284b9a79b1a85ee66e293a94ac6b7bb068b307a8a373c3d343aa8ec"}, + {file = "ruff-0.1.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9b97fd6da44d6cceb188147b68db69a5741fbc736465b5cea3928fdac0bc1aeb"}, + {file = "ruff-0.1.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:721f4b9d3b4161df8dc9f09aa8562e39d14e55a4dbaa451a8e55bdc9590e20f4"}, + {file = "ruff-0.1.5-py3-none-win32.whl", hash = "sha256:f80c73bba6bc69e4fdc73b3991db0b546ce641bdcd5b07210b8ad6f64c79f1ab"}, + {file = "ruff-0.1.5-py3-none-win_amd64.whl", hash = "sha256:c21fe20ee7d76206d290a76271c1af7a5096bc4c73ab9383ed2ad35f852a0087"}, + {file = "ruff-0.1.5-py3-none-win_arm64.whl", hash = "sha256:82bfcb9927e88c1ed50f49ac6c9728dab3ea451212693fe40d08d314663e412f"}, + {file = "ruff-0.1.5.tar.gz", hash = "sha256:5cbec0ef2ae1748fb194f420fb03fb2c25c3258c86129af7172ff8f198f125ab"}, +] + +[[package]] +name = "s3fs" +version = "2023.9.2" +description = "Convenient Filesystem interface over S3" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "s3fs-2023.9.2-py3-none-any.whl", hash = "sha256:d0e0ad0267820f4e9ff16556e004e6759010e92378aebe2ac5d71419a6ff5387"}, + {file = "s3fs-2023.9.2.tar.gz", hash = "sha256:64cccead32a816422dd9ae1d693c5d6354d99f64ae26c56388f1d8e1c7858321"}, +] + +[package.dependencies] +aiobotocore = ">=2.5.4,<2.6.0" +aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" +fsspec = "2023.9.2" + +[package.extras] +awscli = ["aiobotocore[awscli] (>=2.5.4,<2.6.0)"] +boto3 = ["aiobotocore[boto3] (>=2.5.4,<2.6.0)"] + +[[package]] +name = "scikit-learn" +version = "1.3.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.8" +files = [ + {file = "scikit-learn-1.3.2.tar.gz", hash = "sha256:a2f54c76accc15a34bfb9066e6c7a56c1e7235dda5762b990792330b52ccfb05"}, + {file = "scikit_learn-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e326c0eb5cf4d6ba40f93776a20e9a7a69524c4db0757e7ce24ba222471ee8a1"}, + {file = "scikit_learn-1.3.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:535805c2a01ccb40ca4ab7d081d771aea67e535153e35a1fd99418fcedd1648a"}, + {file = "scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1215e5e58e9880b554b01187b8c9390bf4dc4692eedeaf542d3273f4785e342c"}, + {file = "scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ee107923a623b9f517754ea2f69ea3b62fc898a3641766cb7deb2f2ce450161"}, + {file = "scikit_learn-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:35a22e8015048c628ad099da9df5ab3004cdbf81edc75b396fd0cff8699ac58c"}, + {file = "scikit_learn-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6fb6bc98f234fda43163ddbe36df8bcde1d13ee176c6dc9b92bb7d3fc842eb66"}, + {file = "scikit_learn-1.3.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:18424efee518a1cde7b0b53a422cde2f6625197de6af36da0b57ec502f126157"}, + {file = "scikit_learn-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3271552a5eb16f208a6f7f617b8cc6d1f137b52c8a1ef8edf547db0259b2c9fb"}, + {file = "scikit_learn-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4144a5004a676d5022b798d9e573b05139e77f271253a4703eed295bde0433"}, + {file = "scikit_learn-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:67f37d708f042a9b8d59551cf94d30431e01374e00dc2645fa186059c6c5d78b"}, + {file = "scikit_learn-1.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8db94cd8a2e038b37a80a04df8783e09caac77cbe052146432e67800e430c028"}, + {file = "scikit_learn-1.3.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:61a6efd384258789aa89415a410dcdb39a50e19d3d8410bd29be365bcdd512d5"}, + {file = "scikit_learn-1.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb06f8dce3f5ddc5dee1715a9b9f19f20d295bed8e3cd4fa51e1d050347de525"}, + {file = "scikit_learn-1.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b2de18d86f630d68fe1f87af690d451388bb186480afc719e5f770590c2ef6c"}, + {file = "scikit_learn-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:0402638c9a7c219ee52c94cbebc8fcb5eb9fe9c773717965c1f4185588ad3107"}, + {file = "scikit_learn-1.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a19f90f95ba93c1a7f7924906d0576a84da7f3b2282ac3bfb7a08a32801add93"}, + {file = "scikit_learn-1.3.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b8692e395a03a60cd927125eef3a8e3424d86dde9b2370d544f0ea35f78a8073"}, + {file = "scikit_learn-1.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e1e94cc23d04d39da797ee34236ce2375ddea158b10bee3c343647d615581d"}, + {file = "scikit_learn-1.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:785a2213086b7b1abf037aeadbbd6d67159feb3e30263434139c98425e3dcfcf"}, + {file = "scikit_learn-1.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:64381066f8aa63c2710e6b56edc9f0894cc7bf59bd71b8ce5613a4559b6145e0"}, + {file = "scikit_learn-1.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6c43290337f7a4b969d207e620658372ba3c1ffb611f8bc2b6f031dc5c6d1d03"}, + {file = "scikit_learn-1.3.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:dc9002fc200bed597d5d34e90c752b74df516d592db162f756cc52836b38fe0e"}, + {file = "scikit_learn-1.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d08ada33e955c54355d909b9c06a4789a729977f165b8bae6f225ff0a60ec4a"}, + {file = "scikit_learn-1.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763f0ae4b79b0ff9cca0bf3716bcc9915bdacff3cebea15ec79652d1cc4fa5c9"}, + {file = "scikit_learn-1.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ed932ea780517b00dae7431e031faae6b49b20eb6950918eb83bd043237950e0"}, +] + +[package.dependencies] +joblib = ">=1.1.1" +numpy = ">=1.17.3,<2.0" +scipy = ">=1.5.0" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.10.1)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.1.3)", "pandas (>=1.0.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=23.3.0)", "matplotlib (>=3.1.3)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.16.2)"] + +[[package]] +name = "scipy" +version = "1.10.1" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = "<3.12,>=3.8" +files = [ + {file = "scipy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019"}, + {file = "scipy-1.10.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e"}, + {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f"}, + {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2"}, + {file = "scipy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1"}, + {file = "scipy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd"}, + {file = "scipy-1.10.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5"}, + {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35"}, + {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d"}, + {file = "scipy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f"}, + {file = "scipy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35"}, + {file = "scipy-1.10.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88"}, + {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1"}, + {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f"}, + {file = "scipy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415"}, + {file = "scipy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9"}, + {file = "scipy-1.10.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6"}, + {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353"}, + {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601"}, + {file = "scipy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea"}, + {file = "scipy-1.10.1.tar.gz", hash = "sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5"}, +] + +[package.dependencies] +numpy = ">=1.19.5,<1.27.0" + +[package.extras] +dev = ["click", "doit (>=0.36.0)", "flake8", "mypy", "pycodestyle", "pydevtool", "rich-click", "typing_extensions"] +doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] +test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "screed" +version = "1.1.2" +description = "a Python library for loading FASTA and FASTQ sequences" +optional = false +python-versions = ">=3.7" +files = [ + {file = "screed-1.1.2-py2.py3-none-any.whl", hash = "sha256:413e9cfce4b4908d0fa1fe69dcd2c523641a02a856eb196f9ce2183657f342dc"}, + {file = "screed-1.1.2.tar.gz", hash = "sha256:734ffa7a8a645286496d895b736f91d6b2988956e2fd42358123d93ec8519b6a"}, +] + +[package.extras] +all = ["importlib-resources", "pycodestyle", "pytest (>=6.2.2)", "pytest-cov"] +test = ["importlib-resources", "pycodestyle", "pytest (>=6.2.2)", "pytest-cov"] + +[[package]] +name = "seaborn" +version = "0.13.0" +description = "Statistical data visualization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "seaborn-0.13.0-py3-none-any.whl", hash = "sha256:70d740828c48de0f402bb17234e475eda687e3c65f4383ea25d0cc4728f7772e"}, + {file = "seaborn-0.13.0.tar.gz", hash = "sha256:0e76abd2ec291c655b516703c6a022f0fd5afed26c8e714e8baef48150f73598"}, +] + +[package.dependencies] +matplotlib = ">=3.3,<3.6.1 || >3.6.1" +numpy = ">=1.20,<1.24.0 || >1.24.0" +pandas = ">=1.2" + +[package.extras] +dev = ["flake8", "flit", "mypy", "pandas-stubs", "pre-commit", "pytest", "pytest-cov", "pytest-xdist"] +docs = ["ipykernel", "nbconvert", "numpydoc", "pydata_sphinx_theme (==0.10.0rc2)", "pyyaml", "sphinx (<6.0.0)", "sphinx-copybutton", "sphinx-design", "sphinx-issues"] +stats = ["scipy (>=1.7)", "statsmodels (>=0.12)"] + +[[package]] +name = "secretstorage" +version = "3.3.3" +description = "Python bindings to FreeDesktop.org Secret Service API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, +] + +[package.dependencies] +cryptography = ">=2.0" +jeepney = ">=0.6" + +[[package]] +name = "send2trash" +version = "1.8.2" +description = "Send file to trash natively under Mac OS X, Windows and Linux" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "Send2Trash-1.8.2-py3-none-any.whl", hash = "sha256:a384719d99c07ce1eefd6905d2decb6f8b7ed054025bb0e618919f945de4f679"}, + {file = "Send2Trash-1.8.2.tar.gz", hash = "sha256:c132d59fa44b9ca2b1699af5c86f57ce9f4c5eb56629d5d55fbb7a35f84e2312"}, +] + +[package.extras] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] +win32 = ["pywin32"] + +[[package]] +name = "sentry-sdk" +version = "1.38.0" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = "*" +files = [ + {file = "sentry-sdk-1.38.0.tar.gz", hash = "sha256:8feab81de6bbf64f53279b085bd3820e3e737403b0a0d9317f73a2c3374ae359"}, + {file = "sentry_sdk-1.38.0-py2.py3-none-any.whl", hash = "sha256:0017fa73b8ae2d4e57fd2522ee3df30453715b29d2692142793ec5d5f90b94a6"}, +] + +[package.dependencies] +certifi = "*" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +loguru = ["loguru (>=0.5)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] +pure-eval = ["asttokens", "executing", "pure-eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +tornado = ["tornado (>=5)"] + +[[package]] +name = "setproctitle" +version = "1.3.3" +description = "A Python module to customize the process title" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:897a73208da48db41e687225f355ce993167079eda1260ba5e13c4e53be7f754"}, + {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c331e91a14ba4076f88c29c777ad6b58639530ed5b24b5564b5ed2fd7a95452"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbbd6c7de0771c84b4aa30e70b409565eb1fc13627a723ca6be774ed6b9d9fa3"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c05ac48ef16ee013b8a326c63e4610e2430dbec037ec5c5b58fcced550382b74"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1342f4fdb37f89d3e3c1c0a59d6ddbedbde838fff5c51178a7982993d238fe4f"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc74e84fdfa96821580fb5e9c0b0777c1c4779434ce16d3d62a9c4d8c710df39"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9617b676b95adb412bb69645d5b077d664b6882bb0d37bfdafbbb1b999568d85"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6a249415f5bb88b5e9e8c4db47f609e0bf0e20a75e8d744ea787f3092ba1f2d0"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:38da436a0aaace9add67b999eb6abe4b84397edf4a78ec28f264e5b4c9d53cd5"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:da0d57edd4c95bf221b2ebbaa061e65b1788f1544977288bdf95831b6e44e44d"}, + {file = "setproctitle-1.3.3-cp310-cp310-win32.whl", hash = "sha256:a1fcac43918b836ace25f69b1dca8c9395253ad8152b625064415b1d2f9be4fb"}, + {file = "setproctitle-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:200620c3b15388d7f3f97e0ae26599c0c378fdf07ae9ac5a13616e933cbd2086"}, + {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:334f7ed39895d692f753a443102dd5fed180c571eb6a48b2a5b7f5b3564908c8"}, + {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:950f6476d56ff7817a8fed4ab207727fc5260af83481b2a4b125f32844df513a"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:195c961f54a09eb2acabbfc90c413955cf16c6e2f8caa2adbf2237d1019c7dd8"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f05e66746bf9fe6a3397ec246fe481096664a9c97eb3fea6004735a4daf867fd"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5901a31012a40ec913265b64e48c2a4059278d9f4e6be628441482dd13fb8b5"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64286f8a995f2cd934082b398fc63fca7d5ffe31f0e27e75b3ca6b4efda4e353"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:184239903bbc6b813b1a8fc86394dc6ca7d20e2ebe6f69f716bec301e4b0199d"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:664698ae0013f986118064b6676d7dcd28fefd0d7d5a5ae9497cbc10cba48fa5"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e5119a211c2e98ff18b9908ba62a3bd0e3fabb02a29277a7232a6fb4b2560aa0"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:417de6b2e214e837827067048f61841f5d7fc27926f2e43954567094051aff18"}, + {file = "setproctitle-1.3.3-cp311-cp311-win32.whl", hash = "sha256:6a143b31d758296dc2f440175f6c8e0b5301ced3b0f477b84ca43cdcf7f2f476"}, + {file = "setproctitle-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a680d62c399fa4b44899094027ec9a1bdaf6f31c650e44183b50d4c4d0ccc085"}, + {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d4460795a8a7a391e3567b902ec5bdf6c60a47d791c3b1d27080fc203d11c9dc"}, + {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bdfd7254745bb737ca1384dee57e6523651892f0ea2a7344490e9caefcc35e64"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477d3da48e216d7fc04bddab67b0dcde633e19f484a146fd2a34bb0e9dbb4a1e"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ab2900d111e93aff5df9fddc64cf51ca4ef2c9f98702ce26524f1acc5a786ae7"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088b9efc62d5aa5d6edf6cba1cf0c81f4488b5ce1c0342a8b67ae39d64001120"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6d50252377db62d6a0bb82cc898089916457f2db2041e1d03ce7fadd4a07381"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:87e668f9561fd3a457ba189edfc9e37709261287b52293c115ae3487a24b92f6"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:287490eb90e7a0ddd22e74c89a92cc922389daa95babc833c08cf80c84c4df0a"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe1c49486109f72d502f8be569972e27f385fe632bd8895f4730df3c87d5ac8"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4a6ba2494a6449b1f477bd3e67935c2b7b0274f2f6dcd0f7c6aceae10c6c6ba3"}, + {file = "setproctitle-1.3.3-cp312-cp312-win32.whl", hash = "sha256:2df2b67e4b1d7498632e18c56722851ba4db5d6a0c91aaf0fd395111e51cdcf4"}, + {file = "setproctitle-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:f38d48abc121263f3b62943f84cbaede05749047e428409c2c199664feb6abc7"}, + {file = "setproctitle-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:816330675e3504ae4d9a2185c46b573105d2310c20b19ea2b4596a9460a4f674"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68f960bc22d8d8e4ac886d1e2e21ccbd283adcf3c43136161c1ba0fa509088e0"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e6e7adff74796ef12753ff399491b8827f84f6c77659d71bd0b35870a17d8f"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53bc0d2358507596c22b02db079618451f3bd720755d88e3cccd840bafb4c41c"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6d20f9541f5f6ac63df553b6d7a04f313947f550eab6a61aa758b45f0d5657"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c1c84beab776b0becaa368254801e57692ed749d935469ac10e2b9b825dbdd8e"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:507e8dc2891021350eaea40a44ddd887c9f006e6b599af8d64a505c0f718f170"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b1067647ac7aba0b44b591936118a22847bda3c507b0a42d74272256a7a798e9"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2e71f6365744bf53714e8bd2522b3c9c1d83f52ffa6324bd7cbb4da707312cd8"}, + {file = "setproctitle-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:7f1d36a1e15a46e8ede4e953abb104fdbc0845a266ec0e99cc0492a4364f8c44"}, + {file = "setproctitle-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9a402881ec269d0cc9c354b149fc29f9ec1a1939a777f1c858cdb09c7a261df"}, + {file = "setproctitle-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ff814dea1e5c492a4980e3e7d094286077054e7ea116cbeda138819db194b2cd"}, + {file = "setproctitle-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:accb66d7b3ccb00d5cd11d8c6e07055a4568a24c95cf86109894dcc0c134cc89"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554eae5a5b28f02705b83a230e9d163d645c9a08914c0ad921df363a07cf39b1"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a911b26264dbe9e8066c7531c0591cfab27b464459c74385b276fe487ca91c12"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2982efe7640c4835f7355fdb4da313ad37fb3b40f5c69069912f8048f77b28c8"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df3f4274b80709d8bcab2f9a862973d453b308b97a0b423a501bcd93582852e3"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:af2c67ae4c795d1674a8d3ac1988676fa306bcfa1e23fddb5e0bd5f5635309ca"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af4061f67fd7ec01624c5e3c21f6b7af2ef0e6bab7fbb43f209e6506c9ce0092"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:37a62cbe16d4c6294e84670b59cf7adcc73faafe6af07f8cb9adaf1f0e775b19"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a83ca086fbb017f0d87f240a8f9bbcf0809f3b754ee01cec928fff926542c450"}, + {file = "setproctitle-1.3.3-cp38-cp38-win32.whl", hash = "sha256:059f4ce86f8cc92e5860abfc43a1dceb21137b26a02373618d88f6b4b86ba9b2"}, + {file = "setproctitle-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ab92e51cd4a218208efee4c6d37db7368fdf182f6e7ff148fb295ecddf264287"}, + {file = "setproctitle-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c7951820b77abe03d88b114b998867c0f99da03859e5ab2623d94690848d3e45"}, + {file = "setproctitle-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc94cf128676e8fac6503b37763adb378e2b6be1249d207630f83fc325d9b11"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5d9027eeda64d353cf21a3ceb74bb1760bd534526c9214e19f052424b37e42"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e4a8104db15d3462e29d9946f26bed817a5b1d7a47eabca2d9dc2b995991503"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c32c41ace41f344d317399efff4cffb133e709cec2ef09c99e7a13e9f3b9483c"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbf16381c7bf7f963b58fb4daaa65684e10966ee14d26f5cc90f07049bfd8c1e"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e18b7bd0898398cc97ce2dfc83bb192a13a087ef6b2d5a8a36460311cb09e775"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69d565d20efe527bd8a9b92e7f299ae5e73b6c0470f3719bd66f3cd821e0d5bd"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ddedd300cd690a3b06e7eac90ed4452348b1348635777ce23d460d913b5b63c3"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:415bfcfd01d1fbf5cbd75004599ef167a533395955305f42220a585f64036081"}, + {file = "setproctitle-1.3.3-cp39-cp39-win32.whl", hash = "sha256:21112fcd2195d48f25760f0eafa7a76510871bbb3b750219310cf88b04456ae3"}, + {file = "setproctitle-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:5a740f05d0968a5a17da3d676ce6afefebeeeb5ce137510901bf6306ba8ee002"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6b9e62ddb3db4b5205c0321dd69a406d8af9ee1693529d144e86bd43bcb4b6c0"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e3b99b338598de0bd6b2643bf8c343cf5ff70db3627af3ca427a5e1a1a90dd9"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ae9a02766dad331deb06855fb7a6ca15daea333b3967e214de12cfae8f0ef5"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:200ede6fd11233085ba9b764eb055a2a191fb4ffb950c68675ac53c874c22e20"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0d3a953c50776751e80fe755a380a64cb14d61e8762bd43041ab3f8cc436092f"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5e08e232b78ba3ac6bc0d23ce9e2bee8fad2be391b7e2da834fc9a45129eb87"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1da82c3e11284da4fcbf54957dafbf0655d2389cd3d54e4eaba636faf6d117a"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:aeaa71fb9568ebe9b911ddb490c644fbd2006e8c940f21cb9a1e9425bd709574"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:59335d000c6250c35989394661eb6287187854e94ac79ea22315469ee4f4c244"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3ba57029c9c50ecaf0c92bb127224cc2ea9fda057b5d99d3f348c9ec2855ad3"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d876d355c53d975c2ef9c4f2487c8f83dad6aeaaee1b6571453cb0ee992f55f6"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:224602f0939e6fb9d5dd881be1229d485f3257b540f8a900d4271a2c2aa4e5f4"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d7f27e0268af2d7503386e0e6be87fb9b6657afd96f5726b733837121146750d"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5e7266498cd31a4572378c61920af9f6b4676a73c299fce8ba93afd694f8ae7"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33c5609ad51cd99d388e55651b19148ea99727516132fb44680e1f28dd0d1de9"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:eae8988e78192fd1a3245a6f4f382390b61bce6cfcc93f3809726e4c885fa68d"}, + {file = "setproctitle-1.3.3.tar.gz", hash = "sha256:c913e151e7ea01567837ff037a23ca8740192880198b7fbb90b16d181607caae"}, +] + +[package.extras] +test = ["pytest"] + +[[package]] +name = "setuptools" +version = "69.0.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, + {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "sourmash" +version = "4.8.4" +description = "tools for comparing biological sequences with k-mer sketches" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sourmash-4.8.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b96f9bc53990e227529da08f0d04255ea7fe09845fae4cb19bb9ff09f6b74265"}, + {file = "sourmash-4.8.4-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:0f69d695b9f95734da3c4bf4f447dbf850f3cdaf2bee529b2ffb30052e575264"}, + {file = "sourmash-4.8.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b86ed68e80f481cefc839ed89d85931813304a288b0147dd455ec7b686b4254"}, + {file = "sourmash-4.8.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:914b1975ea282e64777937d32c6d19ace86d736ca386cf39f6941da6ac3b120b"}, + {file = "sourmash-4.8.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72692cc5c9cf8fd7eed1c86a9011d7a33fcd7897976b5ef57d66c08782755f3e"}, + {file = "sourmash-4.8.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07819c1a0b91bc192d8a0eabd1787cf65c4b93765f55c3fd72475777f3bbb149"}, + {file = "sourmash-4.8.4-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:ca32986b04c8d0e12bd1e901bd7927423b557c504f69cfd44dfaacb69f0bf0f2"}, + {file = "sourmash-4.8.4-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:88360f07d4ef916446d0f2d5cdaaf0b38c2e7d85e5d7e0a8d33a3688136dde21"}, + {file = "sourmash-4.8.4.tar.gz", hash = "sha256:43584c112c3310719771de175b89cba94f1c2d30b1aea46000eaf5a81efbae8a"}, +] + +[package.dependencies] +bitstring = ">=3.1.9,<5" +cachetools = ">=4,<6" +cffi = ">=1.14.0" +deprecation = ">=2.0.6" +matplotlib = "*" +numpy = "*" +scipy = "*" +screed = ">=1.1.2,<2" + +[package.extras] +all = ["sourmash[demo,doc,storage,test]"] +demo = ["ipython", "jupyter", "jupyter_client"] +doc = ["Jinja2 (==3.1.2)", "alabaster", "docutils (>=0.17.1,<0.21)", "ipython", "myst-parser (==2.0.0)", "nbsphinx", "sphinx (>=4.4.0,<8)", "sphinxcontrib-napoleon"] +storage = ["ipfshttpclient (>=0.4.13)", "redis"] +test = ["build", "hypothesis", "pytest (>=6.2.4,<7.5.0)", "pytest-cov (>=2.12,<5.0)", "pytest-xdist", "pyyaml (>=6,<7)", "recommonmark"] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "statsd" +version = "3.3.0" +description = "A simple statsd client." +optional = false +python-versions = "*" +files = [ + {file = "statsd-3.3.0-py2.py3-none-any.whl", hash = "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa"}, + {file = "statsd-3.3.0.tar.gz", hash = "sha256:e3e6db4c246f7c59003e51c9720a51a7f39a396541cb9b147ff4b14d15b5dd1f"}, +] + +[[package]] +name = "statsmodels" +version = "0.14.0" +description = "Statistical computations and models for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "statsmodels-0.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:16bfe0c96a53b20fa19067e3b6bd2f1d39e30d4891ea0d7bc20734a0ae95942d"}, + {file = "statsmodels-0.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a6a0a1a06ff79be8aa89c8494b33903442859add133f0dda1daf37c3c71682e"}, + {file = "statsmodels-0.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77b3cd3a5268ef966a0a08582c591bd29c09c88b4566c892a7c087935234f285"}, + {file = "statsmodels-0.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c64ebe9cf376cba0c31aed138e15ed179a1d128612dd241cdf299d159e5e882"}, + {file = "statsmodels-0.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:229b2f676b4a45cb62d132a105c9c06ca8a09ffba060abe34935391eb5d9ba87"}, + {file = "statsmodels-0.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb471f757fc45102a87e5d86e87dc2c8c78b34ad4f203679a46520f1d863b9da"}, + {file = "statsmodels-0.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:582f9e41092e342aaa04920d17cc3f97240e3ee198672f194719b5a3d08657d6"}, + {file = "statsmodels-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7ebe885ccaa64b4bc5ad49ac781c246e7a594b491f08ab4cfd5aa456c363a6f6"}, + {file = "statsmodels-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b587ee5d23369a0e881da6e37f78371dce4238cf7638a455db4b633a1a1c62d6"}, + {file = "statsmodels-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef7fa4813c7a73b0d8a0c830250f021c102c71c95e9fe0d6877bcfb56d38b8c"}, + {file = "statsmodels-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afe80544ef46730ea1b11cc655da27038bbaa7159dc5af4bc35bbc32982262f2"}, + {file = "statsmodels-0.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:a6ad7b8aadccd4e4dd7f315a07bef1bca41d194eeaf4ec600d20dea02d242fce"}, + {file = "statsmodels-0.14.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0eea4a0b761aebf0c355b726ac5616b9a8b618bd6e81a96b9f998a61f4fd7484"}, + {file = "statsmodels-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4c815ce7a699047727c65a7c179bff4031cff9ae90c78ca730cfd5200eb025dd"}, + {file = "statsmodels-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:575f61337c8e406ae5fa074d34bc6eb77b5a57c544b2d4ee9bc3da6a0a084cf1"}, + {file = "statsmodels-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8be53cdeb82f49c4cb0fda6d7eeeb2d67dbd50179b3e1033510e061863720d93"}, + {file = "statsmodels-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6f7d762df4e04d1dde8127d07e91aff230eae643aa7078543e60e83e7d5b40db"}, + {file = "statsmodels-0.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:fc2c7931008a911e3060c77ea8933f63f7367c0f3af04f82db3a04808ad2cd2c"}, + {file = "statsmodels-0.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3757542c95247e4ab025291a740efa5da91dc11a05990c033d40fce31c450dc9"}, + {file = "statsmodels-0.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:de489e3ed315bdba55c9d1554a2e89faa65d212e365ab81bc323fa52681fc60e"}, + {file = "statsmodels-0.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e290f4718177bffa8823a780f3b882d56dd64ad1c18cfb4bc8b5558f3f5757"}, + {file = "statsmodels-0.14.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71054f9dbcead56def14e3c9db6f66f943110fdfb19713caf0eb0f08c1ec03fd"}, + {file = "statsmodels-0.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:d7fda067837df94e0a614d93d3a38fb6868958d37f7f50afe2a534524f2660cb"}, + {file = "statsmodels-0.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c7724ad573af26139a98393ae64bc318d1b19762b13442d96c7a3e793f495c3"}, + {file = "statsmodels-0.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3b0a135f3bfdeec987e36e3b3b4c53e0bb87a8d91464d2fcc4d169d176f46fdb"}, + {file = "statsmodels-0.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce28eb1c397dba437ec39b9ab18f2101806f388c7a0cf9cdfd8f09294ad1c799"}, + {file = "statsmodels-0.14.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b1c768dd94cc5ba8398121a632b673c625491aa7ed627b82cb4c880a25563f"}, + {file = "statsmodels-0.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d1e3e10dfbfcd58119ba5a4d3c7d519182b970a2aebaf0b6f539f55ae16058d"}, + {file = "statsmodels-0.14.0.tar.gz", hash = "sha256:6875c7d689e966d948f15eb816ab5616f4928706b180cf470fd5907ab6f647a4"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.3", markers = "python_version == \"3.10\" and platform_system == \"Windows\" and platform_python_implementation != \"PyPy\""}, + {version = ">=1.18", markers = "python_version != \"3.10\" or platform_system != \"Windows\" or platform_python_implementation == \"PyPy\""}, +] +packaging = ">=21.3" +pandas = ">=1.0" +patsy = ">=0.5.2" +scipy = ">=1.4,<1.9.2 || >1.9.2" + +[package.extras] +build = ["cython (>=0.29.26)"] +develop = ["colorama", "cython (>=0.29.26)", "cython (>=0.29.28,<3.0.0)", "flake8", "isort", "joblib", "matplotlib (>=3)", "oldest-supported-numpy (>=2022.4.18)", "pytest (>=7.0.1,<7.1.0)", "pytest-randomly", "pytest-xdist", "pywinpty", "setuptools-scm[toml] (>=7.0.0,<7.1.0)"] +docs = ["ipykernel", "jupyter-client", "matplotlib", "nbconvert", "nbformat", "numpydoc", "pandas-datareader", "sphinx"] + +[[package]] +name = "sympy" +version = "1.12" +description = "Computer algebra system (CAS) in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, + {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, +] + +[package.dependencies] +mpmath = ">=0.19" + +[[package]] +name = "terminado" +version = "0.18.0" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "terminado-0.18.0-py3-none-any.whl", hash = "sha256:87b0d96642d0fe5f5abd7783857b9cab167f221a39ff98e3b9619a788a3c0f2e"}, + {file = "terminado-0.18.0.tar.gz", hash = "sha256:1ea08a89b835dd1b8c0c900d92848147cef2537243361b2e3f4dc15df9b6fded"}, +] + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] +typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] + +[[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +optional = false +python-versions = "*" +files = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] + +[[package]] +name = "threadpoolctl" +version = "3.2.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.2.0-py3-none-any.whl", hash = "sha256:2b7818516e423bdaebb97c723f86a7c6b0a83d3f3b0970328d66f4d9104dc032"}, + {file = "threadpoolctl-3.2.0.tar.gz", hash = "sha256:c96a0ba3bdddeaca37dc4cc7344aafad41cdb8c313f74fdfe387a867bba93355"}, +] + +[[package]] +name = "tinycss2" +version = "1.2.1" +description = "A tiny CSS parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, + {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, +] + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "tokenize-rt" +version = "5.2.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tokenize_rt-5.2.0-py2.py3-none-any.whl", hash = "sha256:b79d41a65cfec71285433511b50271b05da3584a1da144a0752e9c621a285289"}, + {file = "tokenize_rt-5.2.0.tar.gz", hash = "sha256:9fe80f8a5c1edad2d3ede0f37481cc0cc1538a2f442c9c2f9e4feacd2792d054"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "torch" +version = "2.1.0" +description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "torch-2.1.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:bf57f8184b2c317ef81fb33dc233ce4d850cd98ef3f4a38be59c7c1572d175db"}, + {file = "torch-2.1.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a04a0296d47f28960f51c18c5489a8c3472f624ec3b5bcc8e2096314df8c3342"}, + {file = "torch-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0bd691efea319b14ef239ede16d8a45c246916456fa3ed4f217d8af679433cc6"}, + {file = "torch-2.1.0-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:101c139152959cb20ab370fc192672c50093747906ee4ceace44d8dd703f29af"}, + {file = "torch-2.1.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:a6b7438a90a870e4cdeb15301519ae6c043c883fcd224d303c5b118082814767"}, + {file = "torch-2.1.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:2224622407ca52611cbc5b628106fde22ed8e679031f5a99ce286629fc696128"}, + {file = "torch-2.1.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:8132efb782cd181cc2dcca5e58effbe4217cdb2581206ac71466d535bf778867"}, + {file = "torch-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:5c3bfa91ce25ba10116c224c59d5b64cdcce07161321d978bd5a1f15e1ebce72"}, + {file = "torch-2.1.0-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:601b0a2a9d9233fb4b81f7d47dca9680d4f3a78ca3f781078b6ad1ced8a90523"}, + {file = "torch-2.1.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:3cd1dedff13884d890f18eea620184fb4cd8fd3c68ce3300498f427ae93aa962"}, + {file = "torch-2.1.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fb7bf0cc1a3db484eb5d713942a93172f3bac026fcb377a0cd107093d2eba777"}, + {file = "torch-2.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:761822761fffaa1c18a62c5deb13abaa780862577d3eadc428f1daa632536905"}, + {file = "torch-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:458a6d6d8f7d2ccc348ac4d62ea661b39a3592ad15be385bebd0a31ced7e00f4"}, + {file = "torch-2.1.0-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:c8bf7eaf9514465e5d9101e05195183470a6215bb50295c61b52302a04edb690"}, + {file = "torch-2.1.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:05661c32ec14bc3a157193d0f19a7b19d8e61eb787b33353cad30202c295e83b"}, + {file = "torch-2.1.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:556d8dd3e0c290ed9d4d7de598a213fb9f7c59135b4fee144364a8a887016a55"}, + {file = "torch-2.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:de7d63c6ecece118684415a3dbd4805af4a4c1ee1490cccf7405d8c240a481b4"}, + {file = "torch-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:2419cf49aaf3b2336c7aa7a54a1b949fa295b1ae36f77e2aecb3a74e3a947255"}, + {file = "torch-2.1.0-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6ad491e70dbe4288d17fdbfc7fbfa766d66cbe219bc4871c7a8096f4a37c98df"}, + {file = "torch-2.1.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:421739685eba5e0beba42cb649740b15d44b0d565c04e6ed667b41148734a75b"}, +] + +[package.dependencies] +filelock = "*" +fsspec = "*" +jinja2 = "*" +networkx = "*" +sympy = "*" +typing-extensions = "*" + +[package.extras] +opt-einsum = ["opt-einsum (>=3.3)"] + +[[package]] +name = "torchvision" +version = "0.16.0" +description = "image and video datasets and models for torch deep learning" +optional = false +python-versions = ">=3.8" +files = [ + {file = "torchvision-0.16.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:16c300fdbbe91469f5e9feef8d24c6acabd8849db502a06160dd76ba68e897a0"}, + {file = "torchvision-0.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef5dec6c48b715353781b83749efcdea03835720a71b377684453ee117aab3c7"}, + {file = "torchvision-0.16.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:9e3a2012e463f498de21f6598cc7a266b9a8c6fe15788472fdc419233ea6f3f2"}, + {file = "torchvision-0.16.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e4327e082b703921ae52caeee4f7839f7e6c73cfc5eedea468ecb5c1487ecdbf"}, + {file = "torchvision-0.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:62f01513687cce3480df8928fcc6c09b4aa0433d05ac75e82877acc773f6a568"}, + {file = "torchvision-0.16.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:31fdf289bdfb2976f65a14f79f6ddd1ee60113db34622674918e61521c2dc41f"}, + {file = "torchvision-0.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2294a6514a31a6fda562288b28cf6db57877237f4b56ff693262f237a7ed4035"}, + {file = "torchvision-0.16.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:6a24a1e83e4bc7a31b39ef05d2ca4cd2182e95ff10f525edffe1473f7ce16ca1"}, + {file = "torchvision-0.16.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9ed5f21e5a56e466667c6f9f6f93dba2a75e29921108bd70043eaf8e9ba0a7cc"}, + {file = "torchvision-0.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:9ee3d4df7d4a84f883f8ad11fb6510549f40f68dd5469eae601d7e02fb4809b2"}, + {file = "torchvision-0.16.0-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:0c6f36d00b9ce412e367ad6f42e9054cbc890cd9ddd0d200ed9b3b52dd9c225b"}, + {file = "torchvision-0.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:597f60cb03e6f758a00b36b38506f6f38b6c3f1fdfd3921bb9abd60b72d522fd"}, + {file = "torchvision-0.16.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eddd91da4603f1dbb340d9aca82344df64605a0897b17014ac8e0b54dd6e5716"}, + {file = "torchvision-0.16.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:79875f5247337723ec363762c2716bcfc13b78b3045e4e58847c696f03d9ed4d"}, + {file = "torchvision-0.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:550c9793637c5369fbcb4e4b6b0e6d53a4f6cc22389f0563ad60ab90e4f1c8ba"}, + {file = "torchvision-0.16.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:de7c7302fa2f67a2a151e595a8e7dc3865a445d952e99d5c682ba78f312fedc3"}, + {file = "torchvision-0.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f044cffd252fd293b6df46f38d7eeb2fd4fe931e0114c5263735e3b8c9c60a4f"}, + {file = "torchvision-0.16.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8cb501061f6654da494dd975acc1fa301c4b8aacf96bdbcf1553f51a53ebfd1f"}, + {file = "torchvision-0.16.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5a47108ae6a8effdf09fe35fd0c4d5414e69ca8d2334e87339de497b7b64b0c9"}, + {file = "torchvision-0.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b8f06e6a2f80576007b88846f74b680a1ad3b59d2e22b075587b430180e9cfa"}, +] + +[package.dependencies] +numpy = "*" +pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" +requests = "*" +torch = "2.1.0" + +[package.extras] +scipy = ["scipy"] + +[[package]] +name = "tornado" +version = "6.4" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, +] + +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.14.0" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, + {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "types-python-dateutil" +version = "2.8.19.14" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = "*" +files = [ + {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, + {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, +] + +[[package]] +name = "typing-extensions" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + +[[package]] +name = "untokenize" +version = "0.1.1" +description = "Transforms tokens into original source code (while preserving whitespace)." +optional = false +python-versions = "*" +files = [ + {file = "untokenize-0.1.1.tar.gz", hash = "sha256:3865dbbbb8efb4bb5eaa72f1be7f3e0be00ea8b7f125c69cbd1f5fda926f37a2"}, +] + +[[package]] +name = "uri-template" +version = "1.3.0" +description = "RFC 6570 URI Template Processor" +optional = false +python-versions = ">=3.7" +files = [ + {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, + {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, +] + +[package.extras] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wandb" +version = "0.16.0" +description = "A CLI and library for interacting with the Weights & Biases API." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wandb-0.16.0-py3-none-any.whl", hash = "sha256:e103142a5ecdb158d29441c2bf9f935ae149ed562377f7cebffd2a6f7c9de949"}, + {file = "wandb-0.16.0.tar.gz", hash = "sha256:8d9875f1d8d75fee32dc51f6727bc277ce4f3869d7319ccf5f36ce596597402a"}, +] + +[package.dependencies] +appdirs = ">=1.4.3" +Click = ">=7.1,<8.0.0 || >8.0.0" +docker-pycreds = ">=0.4.0" +GitPython = ">=1.0.0,<3.1.29 || >3.1.29" +protobuf = {version = ">=3.19.0,<4.21.0 || >4.21.0,<5", markers = "python_version > \"3.9\" or sys_platform != \"linux\""} +psutil = ">=5.0.0" +PyYAML = "*" +requests = ">=2.0.0,<3" +sentry-sdk = ">=1.0.0" +setproctitle = "*" +setuptools = "*" + +[package.extras] +async = ["httpx (>=0.23.0)"] +aws = ["boto3"] +azure = ["azure-identity", "azure-storage-blob"] +gcp = ["google-cloud-storage"] +kubeflow = ["google-cloud-storage", "kubernetes", "minio", "sh"] +launch = ["PyYAML (>=6.0.0)", "awscli", "azure-containerregistry", "azure-identity", "azure-storage-blob", "boto3", "botocore", "chardet", "google-auth", "google-cloud-aiplatform", "google-cloud-artifact-registry", "google-cloud-compute", "google-cloud-storage", "iso8601", "kubernetes", "kubernetes-asyncio", "nbconvert", "nbformat", "optuna", "typing-extensions"] +media = ["bokeh", "moviepy", "numpy", "pillow", "plotly", "rdkit-pypi", "soundfile"] +models = ["cloudpickle"] +nexus = ["wandb-core (>=0.17.0b1)"] +perf = ["orjson"] +sweeps = ["sweeps (>=0.2.0)"] + +[[package]] +name = "watchdog" +version = "3.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, + {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, + {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, + {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, + {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, + {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, + {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, + {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, + {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "wcwidth" +version = "0.2.12" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, + {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, +] + +[[package]] +name = "webcolors" +version = "1.13" +description = "A library for working with the color formats defined by HTML and CSS." +optional = false +python-versions = ">=3.7" +files = [ + {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, + {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, +] + +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] +tests = ["pytest", "pytest-cov"] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + +[[package]] +name = "websocket-client" +version = "1.7.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, + {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xdg" +version = "6.0.0" +description = "Variables defined by the XDG Base Directory Specification" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "xdg-6.0.0-py3-none-any.whl", hash = "sha256:df3510755b4395157fc04fc3b02467c777f3b3ca383257397f09ab0d4c16f936"}, + {file = "xdg-6.0.0.tar.gz", hash = "sha256:24278094f2d45e846d1eb28a2ebb92d7b67fc0cab5249ee3ce88c95f649a1c92"}, +] + +[[package]] +name = "xdoctest" +version = "1.1.2" +description = "A rewrite of the builtin doctest module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "xdoctest-1.1.2-py3-none-any.whl", hash = "sha256:ebe133222534f09597cbe461f97cc5f95ad7b36e5d31f3437caffb9baaddbddb"}, + {file = "xdoctest-1.1.2.tar.gz", hash = "sha256:267d3d4e362547fa917d3deabaf6888232bbf43c8d30298faeb957dbfa7e0ba3"}, +] + +[package.extras] +all = ["IPython (>=7.10.0)", "IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=5.2.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=6.1.5)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "pytest (>=4.6.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "tomli (>=0.2.0)", "typing (>=3.7.4)"] +all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "tomli (==0.2.0)", "typing (==3.7.4)"] +colors = ["Pygments", "Pygments", "colorama"] +jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "nbconvert"] +optional = ["IPython (>=7.10.0)", "IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=5.2.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=6.1.5)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "tomli (>=0.2.0)"] +optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] +tests = ["pytest (>=4.6.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "typing (>=3.7.4)"] +tests-binary = ["cmake", "cmake", "ninja", "ninja", "pybind11", "pybind11", "scikit-build", "scikit-build"] +tests-binary-strict = ["cmake (==3.21.2)", "cmake (==3.25.0)", "ninja (==1.10.2)", "ninja (==1.11.1)", "pybind11 (==2.10.3)", "pybind11 (==2.7.1)", "scikit-build (==0.11.1)", "scikit-build (==0.16.1)"] +tests-strict = ["pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"] + +[[package]] +name = "xxhash" +version = "3.4.1" +description = "Python binding for xxHash" +optional = false +python-versions = ">=3.7" +files = [ + {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, + {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, + {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, + {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, + {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, + {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, + {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, + {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, + {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, + {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, + {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, + {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, + {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, + {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, + {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, + {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, + {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, + {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, + {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, + {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[extras] +bioinformatics = ["genomepy", "gimmemotifs", "gtfparse", "htseq", "mysql-connector-python", "pybedtools", "pysam", "sourmash"] +dev = ["black", "jupyter_contrib_nbextensions", "jupytext", "mkdocs-jupyter", "mkdocs-material", "mkdocstrings", "mypy", "notebook", "poethepoet", "pyright", "pytest", "pytest-asyncio", "pytest-cov", "ruff", "xdoctest"] +docs = ["jupyter_contrib_nbextensions", "jupytext", "mkdocs-jupyter", "mkdocs-material", "mkdocstrings", "notebook"] +lint = ["black", "mypy", "pyright", "ruff"] +test = ["pytest", "pytest-asyncio", "pytest-cov", "xdoctest"] +workflows = ["dataclasses-json", "flytekit", "fsspec", "hydra-joblib-launcher", "hydra-zen", "mashumaro", "plumbum", "pyperclip", "python-dotenv", "rich", "scikit-learn"] + +[metadata] +lock-version = "2.0" +python-versions = "3.10.13" +content-hash = "92e9acd0ba1b110a9436af86faaca602626edfad17e66817a00e7a9d27bdea78" diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 00000000..ab1033bd --- /dev/null +++ b/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true diff --git a/pyproject.toml b/pyproject.toml index 2e9f4329..fe4f5ae1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,48 +1,299 @@ -[build-system] -requires = ["hatchling", "hatch-regex-commit"] -build-backend = "hatchling.build" - -[project] +[tool.poetry] name = "dnadiffusion" -authors = [ - { name = "dnadiffusion", email = "dnadiffusion@pinellolab.org" } +version = "0.0.0.dev1" +packages = [ + { include = "dnadiffusion", from = "src" }, ] -description = "Library for probabilistic analysis of protein-protein interaction sequencing data." +description = "Generative modeling of regulatory DNA sequences with diffusion probabilistic models." +authors = ["dnadiffusion team"] readme = "README.md" -dynamic = ["version"] +homepage = "https://pinellolab.github.io/DNA-Diffusion/" +repository = "https://github.com/pinellolab/DNA-Diffusion" +license = "AGPL-3.0-only" +documentation = "https://pinellolab.github.io/DNA-Diffusion/" classifiers = [ - "Programming Language :: Python :: 3 :: Only", + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: GNU Affero General Public License v3", + "Natural Language :: English", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", +] + +[tool.poetry.urls] +Changelog = "https://github.com/pinellolab/DNA-Diffusion/releases" + +[tool.poetry.dependencies] +python = "3.10.13" +accelerate = "0.24.1" +click = "8.1.7" +einops = "0.7.0" +jupyterlab = "4.0.9" +matplotlib = "3.8.1" +# not available in a conda channel +memory-efficient-attention-pytorch = {version = "0.1.6", source = "pypi"} +numpy = "1.26.2" +pandas = "2.1.3" +# pybedtools 0.9.1 uses the ISO C++17 eliminated `register` keyword +scipy = "1.10.1" +seaborn = "0.13.0" +torch = "2.1.0" +torchvision = "0.16.0" +# wandb 0.16.0 not yet on conda-forge +wandb = {version = "0.16.0", source = "pypi"} + +#----------------- +# dev dependencies +#----------------- + +# lint dep group +black = { version = "23.11.0", optional = true, extras = ["jupyter"] } +mypy = { version = "1.7.1", optional = true } +pyright = { version = "1.1.339", optional = true } +ruff = { version = "0.1.5", optional = true } + +# test dep group +poethepoet = { version = "0.24.4", optional = true } +pytest = { version = "7.4.3", optional = true } +pytest-asyncio = { version = "0.21.1", optional = true } +pytest-cov = { version = "4.1.0", optional = true } +xdoctest = { version = "1.1.2", optional = true } + +# docs dep group +jupytext = { version = "1.16.0", optional = true } +jupyter_contrib_nbextensions = { version = "0.7.0", optional = true } +mkdocs-jupyter = { version = "0.24.6", optional = true } +mkdocs-material = { version = "9.4.14", optional = true } +mkdocstrings = { version = "0.24.0", optional = true, extras = ["python"] } +notebook = { version = "6.4.13", optional = true } + +#----------------- +# aux dependencies +#----------------- + +# workflows dep group +dataclasses-json = { version = "0.5.9", optional = true } +# TODO: remove fork install after +# https://github.com/flyteorg/flytekit/pull/1818 +# flytekit = { version = "1.10.1", optional = true } +flytekit = { git = "https://github.com/cameronraysmith/flytekit.git", branch = "3928-pandas-2", optional = true } +# This is only utilized for issue reporting to conda-lock +# https://github.com/conda/conda-lock/issues/568 +# flytekit = { git = "https://github.com/cameronraysmith/flytekit.git", rev = "b1e64ba20f1ddd83723e455d2bf893d8f3fc84b7", optional = true } +fsspec = { version = "2023.9.2", optional = true } +hydra-zen = { version = "0.11.0", optional = true } +hydra-joblib-launcher = { version = "1.2.0", optional = true } +mashumaro = { version = "3.11", optional = true } +plumbum = { version = "1.8.2", optional = true } +pyperclip = { version = "1.8.2", optional = true } +python-dotenv = { version = "1.0.0", optional = true } +rich = { version = "13.6.0", optional = true } +scikit-learn = { version = "1.3.2", optional = true } + +# bioinformatics dep group +genomepy = {version = "0.16.1", optional = true} +# genomepy depends on mysql-connector-python which has not been +# a python package since April 20, 2021 when it began a series of +# releases starting with 8.0.24 that lacks a setup.py or pyproject.toml +# in the source distribution. Indeed, +# pip download mysql-connector-python==8.0.24 --no-binary :all: +# fails with this notice. +# https://bugs.mysql.com/bug.php?id=113396 +mysql-connector-python = {version = "8.0.23", optional = true} +# gimmemotifs 0.18.0 +# - is not compatible with configparser 6.0.0 but it does not include this bound in its setup.py +# - has minor incompatibilities with modern C compilers +# gimmemotifs = "0.18.0" +gimmemotifs = { git = "https://github.com/cameronraysmith/gimmemotifs.git", branch = "bound-configparser-6" , optional = true} +# gtfparse 1.3.0 never released to bioconda +gtfparse = {version = "1.3.0", source = "pypi", optional = true} +# Comment HTSeq for `poe conda-lock` generation +# as conda-lock forces default branch sha +# and conda-forge/bioconda installs HTSeq indirectly via biofluff <- gimmemotifs +htseq = {version = "2.0.5", optional = true} +pybedtools = { git = "https://github.com/cameronraysmith/pybedtools.git", branch = "cpp17-no-register", optional = true} +pysam = {version = "0.22.0", optional = true} +sourmash = {version = "4.8.4", optional = true} + + +[tool.poetry.group.bioinformatics] +optional = true + +[tool.poetry.group.bioinformatics.dependencies] +genomepy = "0.16.1" +mysql-connector-python = "8.0.23" +# gimmemotifs 0.18.0 +# - is not compatible with configparser 6.0.0 but it does not include this bound in its setup.py +# - has minor incompatibilities with modern C compilers +# gimmemotifs = "0.18.0" +gimmemotifs = { git = "https://github.com/cameronraysmith/gimmemotifs.git", branch = "bound-configparser-6" } +# gtfparse 1.3.0 never released to bioconda +gtfparse = {version = "1.3.0", source = "pypi"} +htseq = "2.0.5" +pybedtools = { git = "https://github.com/cameronraysmith/pybedtools.git", branch = "cpp17-no-register" } +pysam = "0.22.0" +sourmash = "4.8.4" + + +[tool.poetry.group.workflows] +optional = true + +[tool.poetry.group.workflows.dependencies] +dataclasses-json = "0.5.9" +# flytekit = "1.10.1" +flytekit = { git = "https://github.com/cameronraysmith/flytekit.git", branch = "3928-pandas-2" } +# This is only utilized for issue reporting to conda-lock +# https://github.com/conda/conda-lock/issues/568 +# flytekit = { git = "https://github.com/cameronraysmith/flytekit.git", rev = "b1e64ba20f1ddd83723e455d2bf893d8f3fc84b7" } +fsspec = "2023.9.2" +hydra-zen = "0.11.0" +hydra-joblib-launcher = "1.2.0" +mashumaro = "3.11" +plumbum = "1.8.2" +pyperclip = "1.8.2" +python-dotenv = "1.0.0" +rich = "13.6.0" +scikit-learn = "1.3.2" + + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] +black = { version = "23.11.0", extras = ["jupyter"] } +mypy = "1.7.1" +pyright = "1.1.339" +ruff = "0.1.5" +docformatter = { version = "1.7.5", extras = ["tomli"] } + + +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] +poethepoet = "0.24.4" +pytest = "7.4.3" +pytest-asyncio = "0.21.1" +pytest-cov = "4.1.0" +xdoctest = "1.1.2" + + +[tool.poetry.group.docs] +optional = true + +[tool.poetry.group.docs.dependencies] +jupytext = "1.16.0" +jupyter_contrib_nbextensions = "0.7.0" +mkdocs-jupyter = "0.24.6" +mkdocs-material = "9.4.14" +mkdocstrings = { version = "0.24.0", extras = ["python"] } +notebook = "6.4.13" + +[tool.poetry.extras] +bioinformatics = [ + "genomepy", + "mysql-connector-python", + "gimmemotifs", + "gtfparse", + "htseq", + "pybedtools", + "pysam", + "sourmash", ] -requires-python = ">=3.10" -dependencies = [ - "accelerate==0.24.1", - "click==8.1.7", - "einops==0.7.0", - "genomepy==0.16.1", - "gimmemotifs==0.18.0", - "gtfparse==1.3.0", - "jupyterlab==4.0.9", - "matplotlib==3.8.1", - "memory-efficient-attention-pytorch==0.1.6", - "pandas==2.1.3", - "pybedtools==0.9.1", - "seaborn==0.13.0", - "sourmash==4.8.4", - "torch==2.3.1", - "torchvision==0.19.0", - "wandb==0.16.0", + +workflows = [ + "dataclasses-json", + "flytekit", + "fsspec", + "hydra-zen", + "hydra-joblib-launcher", + "mashumaro", + "plumbum", + "pyperclip", + "python-dotenv", + "rich", + "scikit-learn", ] -[project.scripts] -dnadiffusion = "dnadiffusion.cli:main" +docs = [ + "jupytext", + "jupyter_contrib_nbextensions", + "mkdocs-jupyter", + "mkdocs-material", + "mkdocstrings", + "notebook", + ] + +lint = [ + "black", + "mypy", + "pyright", + "ruff", + ] -[project.urls] -Documentation = "https://pinellolab.github.io/DNA-Diffusion" -Source = "https://github.com/pinellolab/DNA-Diffusion" +test = [ + "pytest", + "pytest-asyncio", + "pytest-cov", + "xdoctest", + ] + +dev = [ + "black", + "jupytext", + "jupyter_contrib_nbextensions", + "mkdocs-jupyter", + "mkdocs-material", + "mkdocstrings", + "mypy", + "notebook", + "poethepoet", + "pyright", + "pytest", + "pytest-asyncio", + "pytest-cov", + "ruff", + "xdoctest", + ] + +[tool.poe.tasks.conda-lock] +cmd = """ + conda-lock \ + --conda mamba \ + --kind lock \ + --kind env \ + --no-dev-dependencies \ + --filter-categories \ + --category workflows \ + --category bioinformatics \ + --virtual-package-spec environments/conda/virtual-packages.yml \ + --filename-template "environments/conda/conda-{platform}.lock" \ + --lockfile environments/conda/conda-lock.yml \ + -f pyproject.toml +""" +help = "Runs conda-lock to generate a conda environment lock file derived from package dependencies." + +[tool.poe.tasks] +torch-cpu = {cmd="pip install --force-reinstall torch==2.1.0 --index-url https://download.pytorch.org/whl/cpu", help="Installs torch 2.1.0 for CPU"} + +[tool.black] +target-version = ["py37"] +line-length = 80 +skip-string-normalization = true + +[tool.docformatter] +black = true +wrap-summaries = 80 +wrap-descriptions = 80 +recursive = true +in-place = true +diff = true +make-summary-multi-line = true +pre-summary-newline = true [tool.ruff] -target-version = "py310" -line-length = 120 +target-version = "py37" +line-length = 80 select = [ "A", # "ARG", @@ -53,6 +304,7 @@ select = [ "EM", # "F", # "FBT", + "F401", "I", "ICN", "ISC", @@ -79,23 +331,15 @@ ignore = [ "S105", "S106", "S107", # Ignore complexity "C901", "PLR0911", "PLR0912", "PLR0913", "PLR0915", - # Pairwise error to ignore - "RUF007" ] unfixable = [ - # Don't touch unused imports - "F401", -] -exclude = [ - "src/refactor", + # Don't modify unused imports + # "F401", ] [tool.ruff.isort] known-first-party = ["dnadiffusion"] -[tool.ruff.format] -quote-style = "double" - [tool.ruff.flake8-tidy-imports] ban-relative-imports = "all" @@ -103,19 +347,72 @@ ban-relative-imports = "all" # Tests can use magic values, assertions, and relative imports "tests/**/*" = ["PLR2004", "S101", "TID252"] -[tool.pytest.ini_options] -addopts = "--cov=src/dnadiffusion/ --cov-report=term-missing --ignore=src/refactor" -asyncio_mode = "strict" +[tool.pyright] +include = ["src"] +exclude = [ + "**/node_modules", + "**/__pycache__", + "**/.hypothesis", + "**/docs", +] +reportUnnecessaryTypeIgnoreComment = true +reportUnnecessaryIsInstance = false -[tool.coverage.run] -source_pkgs = ["dnadiffusion", "tests"] -branch = true -parallel = true -omit = [ - "src/dnadiffusion/__about__.py", - "src/dnadiffusion/__main__.py", - "tests/conftest.py", +[tool.mypy] +python_version = "3.10" + +[[tool.mypy.overrides]] +module = [ +] +ignore_missing_imports = true +warn_unreachable = true +pretty = true +show_column_numbers = true +show_error_codes = true +show_error_context = true + +[tool.conda-lock] +channels = [ + 'nodefaults', + 'pytorch', + 'nvidia', + 'conda-forge', + 'bioconda', +] +platforms = [ + 'linux-64', + # 'linux-aarch64', + # 'osx-arm64', + # 'osx-amd64' ] +conda_executor = "mamba" +log_level = "INFO" +virtual_package_spec = "environments/conda/virtual-packages.yml" + +[tool.conda-lock.dependencies] +dataclasses-json = {source = "pypi"} +genomepy = {source = "pypi"} +# transient fork dependency to allow pandas >=2 +# unfortunately conda-lock takes the default branch sha +# even if rev is specified explicitly +flytekit = { git = "https://github.com/cameronraysmith/flytekit.git", branch = "3928-pandas-2" } +# This is only utilized for issue reporting to conda-lock +# https://github.com/conda/conda-lock/issues/568 +# flytekit = { git = "https://github.com/cameronraysmith/flytekit.git", rev = "b1e64ba20f1ddd83723e455d2bf893d8f3fc84b7" } +# not available in conda channels +htseq = {source = "pypi"} +hydra-zen = {source = "pypi"} +hydra-joblib-launcher = {source = "pypi"} +mysql-connector-python = {source = "pypi"} +pip = ">=23.2" + +[tool.poetry.scripts] +dna = "dnadiffusion.hydra:main" +dnadiffusion = "dnadiffusion.cli:main" + +[tool.pytest.ini_options] +addopts = "-rA --xdoc --cov=src/dnadiffusion/ --cov-report=term-missing --ignore=./notebooks" +asyncio_mode = "auto" [tool.coverage.paths] dnadiffusion = ["src/dnadiffusion", "*/dnadiffusion/src/dnadiffusion"] @@ -128,64 +425,19 @@ exclude_lines = [ "if TYPE_CHECKING:", ] -[tool.mypy] -python_version = "3.10" - -[[tool.mypy.overrides]] -module = [ - "accelerate", - "Bio", - "hydra_zen", - "memory_efficient_attention_pytorch", - "matplotlib", - "matplotlib.pyplot", - "pandas", - "torchvision", - "torchvision.transforms", - "seaborn", - "scipy.special", - "sourmash", +[tool.coverage.run] +source_pkgs = ["dnadiffusion", "tests"] +branch = true +parallel = true +omit = [ + "src/dnadiffusion/__init__.py", + "src/dnadiffusion/__main__.py", + "tests/conftest.py", ] -ignore_missing_imports = true -[tool.hatch] - -[tool.hatch.build.targets.wheel] -packages = ['src/dnadiffusion'] - -[tool.hatch.metadata] -allow-direct-references = true - -[tool.hatch.version] -path = "src/dnadiffusion/__about__.py" - -[tool.hatch.envs.default] -python = "3.10" -dependencies = [ - "mkdocs-material==9.4.8", - "mkdocstrings==0.23.0", - "mkdocstrings[python]", - "mypy==1.10.0", - "pytest==7.4.3", - "pytest-asyncio==0.21.1", - "pytest-cov==4.1.0", - "ruff==0.1.6", - "types-tqdm==4.66.0.4", - "xdoctest==1.1.2", -] +[tool.jupytext] +formats = "ipynb,text//py:percent,text//md" -[tool.hatch.envs.default.scripts] -test = "pytest -rA" -test-cov-xml = "pytest -rA --cov-report=xml" -lint = [ - "ruff format .", - "ruff --fix .", - # "mypy src/dnadiffusion/", -] -lint-check = [ - "ruff format --check .", - "ruff .", - # "mypy src/dnadiffusion/", -] -docs-serve = "mkdocs serve" -docs-build = "mkdocs build" +[build-system] +requires = ["poetry-core>=1.7.1"] +build-backend = "poetry.core.masonry.api" diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..ca8f1770 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,294 @@ +accelerate==0.24.1 ; python_full_version == "3.10.13" +adlfs==2023.10.0 ; python_full_version == "3.10.13" +aiobotocore==2.5.4 ; python_full_version == "3.10.13" +aiohttp==3.9.1 ; python_full_version == "3.10.13" +aioitertools==0.11.0 ; python_full_version == "3.10.13" +aiosignal==1.3.1 ; python_full_version == "3.10.13" +antlr4-python3-runtime==4.9.3 ; python_full_version == "3.10.13" +anyio==4.1.0 ; python_full_version == "3.10.13" +appdirs==1.4.4 ; python_full_version == "3.10.13" +appnope==0.1.3 ; platform_system == "Darwin" and python_full_version == "3.10.13" +argon2-cffi-bindings==21.2.0 ; python_full_version == "3.10.13" +argon2-cffi==23.1.0 ; python_full_version == "3.10.13" +arrow==1.3.0 ; python_full_version == "3.10.13" +asttokens==2.4.1 ; python_full_version == "3.10.13" +async-lru==2.0.4 ; python_full_version == "3.10.13" +async-timeout==4.0.3 ; python_full_version == "3.10.13" +attrs==23.1.0 ; python_full_version == "3.10.13" +azure-core==1.29.5 ; python_full_version == "3.10.13" +azure-datalake-store==0.0.53 ; python_full_version == "3.10.13" +azure-identity==1.15.0 ; python_full_version == "3.10.13" +azure-storage-blob==12.19.0 ; python_full_version == "3.10.13" +babel==2.13.1 ; python_full_version == "3.10.13" +beautifulsoup4==4.12.2 ; python_full_version == "3.10.13" +binaryornot==0.4.4 ; python_full_version == "3.10.13" +biofluff==3.0.4 ; python_full_version == "3.10.13" +biopython==1.81 ; python_full_version == "3.10.13" +biothings-client==0.3.1 ; python_full_version == "3.10.13" +bitarray==2.8.4 ; python_full_version == "3.10.13" +bitstring==4.1.4 ; python_full_version == "3.10.13" +black[jupyter]==23.11.0 ; python_full_version == "3.10.13" +bleach==6.1.0 ; python_full_version == "3.10.13" +botocore==1.31.17 ; python_full_version == "3.10.13" +cachetools==5.3.2 ; python_full_version == "3.10.13" +certifi==2023.11.17 ; python_full_version == "3.10.13" +cffi==1.16.0 ; python_full_version == "3.10.13" +chardet==5.2.0 ; python_full_version == "3.10.13" +charset-normalizer==3.3.2 ; python_full_version == "3.10.13" +click==8.1.7 ; python_full_version == "3.10.13" +cloudpickle==3.0.0 ; python_full_version == "3.10.13" +colorama==0.4.6 ; python_full_version == "3.10.13" +comm==0.2.0 ; python_full_version == "3.10.13" +configparser==5.3.0 ; python_full_version == "3.10.13" +contourpy==1.2.0 ; python_full_version == "3.10.13" +cookiecutter==2.5.0 ; python_full_version == "3.10.13" +coverage[toml]==7.3.2 ; python_full_version == "3.10.13" +croniter==2.0.1 ; python_full_version == "3.10.13" +cryptography==41.0.7 ; python_full_version == "3.10.13" +cycler==0.12.1 ; python_full_version == "3.10.13" +dataclasses-json==0.5.9 ; python_full_version == "3.10.13" +debugpy==1.8.0 ; python_full_version == "3.10.13" +decorator==5.1.1 ; python_full_version == "3.10.13" +defusedxml==0.7.1 ; python_full_version == "3.10.13" +deprecation==2.1.0 ; python_full_version == "3.10.13" +diskcache==5.6.3 ; python_full_version == "3.10.13" +docker-pycreds==0.4.0 ; python_full_version == "3.10.13" +docker==6.1.3 ; python_full_version == "3.10.13" +docstring-parser==0.15 ; python_full_version == "3.10.13" +einops==0.7.0 ; python_full_version == "3.10.13" +exceptiongroup==1.2.0 ; python_full_version == "3.10.13" +executing==2.0.1 ; python_full_version == "3.10.13" +fastjsonschema==2.19.0 ; python_full_version == "3.10.13" +feather-format==0.4.1 ; python_full_version == "3.10.13" +filelock==3.13.1 ; python_full_version == "3.10.13" +flyteidl==1.10.6 ; python_full_version == "3.10.13" +flytekit @ git+https://github.com/cameronraysmith/flytekit.git@b1e64ba20f1ddd83723e455d2bf893d8f3fc84b7 ; python_full_version == "3.10.13" +fonttools==4.46.0 ; python_full_version == "3.10.13" +fqdn==1.5.1 ; python_full_version == "3.10.13" +frozenlist==1.4.0 ; python_full_version == "3.10.13" +fsspec==2023.9.2 ; python_full_version == "3.10.13" +gcsfs==2023.9.2 ; python_full_version == "3.10.13" +genomepy==0.16.1 ; python_full_version == "3.10.13" +ghp-import==2.1.0 ; python_full_version == "3.10.13" +gimmemotifs @ git+https://github.com/cameronraysmith/gimmemotifs.git@72ae59bfa4967807871858b02275bc267bbce6b3 ; python_full_version == "3.10.13" +gitdb==4.0.11 ; python_full_version == "3.10.13" +gitpython==3.1.40 ; python_full_version == "3.10.13" +google-api-core==2.14.0 ; python_full_version == "3.10.13" +google-auth-oauthlib==1.1.0 ; python_full_version == "3.10.13" +google-auth==2.25.1 ; python_full_version == "3.10.13" +google-cloud-core==2.3.3 ; python_full_version == "3.10.13" +google-cloud-storage==2.13.0 ; python_full_version == "3.10.13" +google-crc32c==1.5.0 ; python_full_version == "3.10.13" +google-resumable-media==2.6.0 ; python_full_version == "3.10.13" +googleapis-common-protos==1.61.0 ; python_full_version == "3.10.13" +griffe==0.38.1 ; python_full_version == "3.10.13" +grpcio-status==1.59.3 ; python_full_version == "3.10.13" +grpcio==1.59.3 ; python_full_version == "3.10.13" +gtfparse==1.3.0 ; python_full_version == "3.10.13" +htseq==2.0.5 ; python_full_version == "3.10.13" +huggingface-hub==0.19.4 ; python_full_version == "3.10.13" +hydra-core==1.3.2 ; python_full_version == "3.10.13" +hydra-joblib-launcher==1.2.0 ; python_full_version == "3.10.13" +hydra-zen==0.11.0 ; python_full_version == "3.10.13" +idna==3.6 ; python_full_version == "3.10.13" +importlib-metadata==7.0.0 ; python_full_version == "3.10.13" +iniconfig==2.0.0 ; python_full_version == "3.10.13" +ipykernel==6.27.1 ; python_full_version == "3.10.13" +ipython-genutils==0.2.0 ; python_full_version == "3.10.13" +ipython==8.18.1 ; python_full_version == "3.10.13" +isodate==0.6.1 ; python_full_version == "3.10.13" +isoduration==20.11.0 ; python_full_version == "3.10.13" +iteround==1.0.4 ; python_full_version == "3.10.13" +jaraco-classes==3.3.0 ; python_full_version == "3.10.13" +jedi==0.19.1 ; python_full_version == "3.10.13" +jeepney==0.8.0 ; sys_platform == "linux" and python_full_version == "3.10.13" +jinja2==3.1.2 ; python_full_version == "3.10.13" +jmespath==1.0.1 ; python_full_version == "3.10.13" +joblib==1.3.2 ; python_full_version == "3.10.13" +json5==0.9.14 ; python_full_version == "3.10.13" +jsonpickle==3.0.2 ; python_full_version == "3.10.13" +jsonpointer==2.4 ; python_full_version == "3.10.13" +jsonschema-specifications==2023.11.2 ; python_full_version == "3.10.13" +jsonschema==4.20.0 ; python_full_version == "3.10.13" +jsonschema[format-nongpl]==4.20.0 ; python_full_version == "3.10.13" +jupyter-client==8.6.0 ; python_full_version == "3.10.13" +jupyter-contrib-core==0.4.2 ; python_full_version == "3.10.13" +jupyter-contrib-nbextensions==0.7.0 ; python_full_version == "3.10.13" +jupyter-core==5.5.0 ; python_full_version == "3.10.13" +jupyter-events==0.9.0 ; python_full_version == "3.10.13" +jupyter-highlight-selected-word==0.2.0 ; python_full_version == "3.10.13" +jupyter-lsp==2.2.1 ; python_full_version == "3.10.13" +jupyter-nbextensions-configurator==0.6.3 ; python_full_version == "3.10.13" +jupyter-server-terminals==0.4.4 ; python_full_version == "3.10.13" +jupyter-server==2.12.1 ; python_full_version == "3.10.13" +jupyterlab-pygments==0.3.0 ; python_full_version == "3.10.13" +jupyterlab-server==2.25.2 ; python_full_version == "3.10.13" +jupyterlab==4.0.9 ; python_full_version == "3.10.13" +jupytext==1.16.0 ; python_full_version == "3.10.13" +keyring==24.3.0 ; python_full_version == "3.10.13" +kiwisolver==1.4.5 ; python_full_version == "3.10.13" +kubernetes==28.1.0 ; python_full_version == "3.10.13" +llvmlite==0.41.1 ; python_full_version == "3.10.13" +logomaker==0.8 ; python_full_version == "3.10.13" +loguru==0.7.2 ; python_full_version == "3.10.13" +lxml==4.9.3 ; python_full_version == "3.10.13" +markdown-it-py==3.0.0 ; python_full_version == "3.10.13" +markdown==3.5.1 ; python_full_version == "3.10.13" +markupsafe==2.1.3 ; python_full_version == "3.10.13" +marshmallow-enum==1.5.1 ; python_full_version == "3.10.13" +marshmallow-jsonschema==0.13.0 ; python_full_version == "3.10.13" +marshmallow==3.20.1 ; python_full_version == "3.10.13" +mashumaro==3.11 ; python_full_version == "3.10.13" +matplotlib-inline==0.1.6 ; python_full_version == "3.10.13" +matplotlib==3.8.1 ; python_full_version == "3.10.13" +mdit-py-plugins==0.4.0 ; python_full_version == "3.10.13" +mdurl==0.1.2 ; python_full_version == "3.10.13" +memory-efficient-attention-pytorch==0.1.6 ; python_full_version == "3.10.13" +mergedeep==1.3.4 ; python_full_version == "3.10.13" +mistune==3.0.2 ; python_full_version == "3.10.13" +mkdocs-autorefs==0.5.0 ; python_full_version == "3.10.13" +mkdocs-jupyter==0.24.6 ; python_full_version == "3.10.13" +mkdocs-material-extensions==1.3.1 ; python_full_version == "3.10.13" +mkdocs-material==9.4.14 ; python_full_version == "3.10.13" +mkdocs==1.5.3 ; python_full_version == "3.10.13" +mkdocstrings-python==1.7.5 ; python_full_version == "3.10.13" +mkdocstrings==0.24.0 ; python_full_version == "3.10.13" +mkdocstrings[python]==0.24.0 ; python_full_version == "3.10.13" +more-itertools==10.1.0 ; python_full_version == "3.10.13" +mpmath==1.3.0 ; python_full_version == "3.10.13" +msal-extensions==1.0.0 ; python_full_version == "3.10.13" +msal==1.26.0 ; python_full_version == "3.10.13" +multidict==6.0.4 ; python_full_version == "3.10.13" +mygene==3.2.2 ; python_full_version == "3.10.13" +mypy-extensions==1.0.0 ; python_full_version == "3.10.13" +mypy==1.7.1 ; python_full_version == "3.10.13" +mysql-connector-python==8.0.23 ; python_full_version == "3.10.13" +nbclient==0.9.0 ; python_full_version == "3.10.13" +nbconvert==7.12.0 ; python_full_version == "3.10.13" +nbformat==5.9.2 ; python_full_version == "3.10.13" +nest-asyncio==1.5.8 ; python_full_version == "3.10.13" +networkx==3.2.1 ; python_full_version == "3.10.13" +nodeenv==1.8.0 ; python_full_version == "3.10.13" +norns==0.1.6 ; python_full_version == "3.10.13" +nose==1.3.7 ; python_full_version == "3.10.13" +notebook-shim==0.2.3 ; python_full_version == "3.10.13" +notebook==6.4.13 ; python_full_version == "3.10.13" +numba==0.58.1 ; python_full_version == "3.10.13" +numpy==1.26.2 ; python_full_version == "3.10.13" +oauthlib==3.2.2 ; python_full_version == "3.10.13" +omegaconf==2.3.0 ; python_full_version == "3.10.13" +overrides==7.4.0 ; python_full_version == "3.10.13" +packaging==23.2 ; python_full_version == "3.10.13" +paginate==0.5.6 ; python_full_version == "3.10.13" +palettable==3.3.3 ; python_full_version == "3.10.13" +pandas==2.1.3 ; python_full_version == "3.10.13" +pandocfilters==1.5.0 ; python_full_version == "3.10.13" +parso==0.8.3 ; python_full_version == "3.10.13" +pastel==0.2.1 ; python_full_version == "3.10.13" +pathspec==0.11.2 ; python_full_version == "3.10.13" +patsy==0.5.4 ; python_full_version == "3.10.13" +pexpect==4.9.0 ; sys_platform != "win32" and python_full_version == "3.10.13" +pillow==10.1.0 ; python_full_version == "3.10.13" +platformdirs==4.1.0 ; python_full_version == "3.10.13" +pluggy==1.3.0 ; python_full_version == "3.10.13" +plumbum==1.8.2 ; python_full_version == "3.10.13" +poethepoet==0.24.4 ; python_full_version == "3.10.13" +portalocker==2.8.2 ; python_full_version == "3.10.13" +prometheus-client==0.19.0 ; python_full_version == "3.10.13" +prompt-toolkit==3.0.41 ; python_full_version == "3.10.13" +protobuf==4.21.12 ; python_full_version == "3.10.13" +protoc-gen-swagger==0.1.0 ; python_full_version == "3.10.13" +psutil==5.9.6 ; python_full_version == "3.10.13" +ptyprocess==0.7.0 ; (os_name != "nt" or sys_platform != "win32") and python_full_version == "3.10.13" +pure-eval==0.2.2 ; python_full_version == "3.10.13" +pyarrow==14.0.1 ; python_full_version == "3.10.13" +pyasn1-modules==0.3.0 ; python_full_version == "3.10.13" +pyasn1==0.5.1 ; python_full_version == "3.10.13" +pybedtools @ git+https://github.com/cameronraysmith/pybedtools.git@af757d539e1020ab4b246e8c0de34139c84b7277 ; python_full_version == "3.10.13" +pybigwig==0.3.22 ; python_full_version == "3.10.13" +pycparser==2.21 ; python_full_version == "3.10.13" +pyfaidx==0.7.2.2 ; python_full_version == "3.10.13" +pygments==2.17.2 ; python_full_version == "3.10.13" +pyjwt[crypto]==2.8.0 ; python_full_version == "3.10.13" +pymdown-extensions==10.5 ; python_full_version == "3.10.13" +pyparsing==3.1.1 ; python_full_version == "3.10.13" +pyperclip==1.8.2 ; python_full_version == "3.10.13" +pyright==1.1.339 ; python_full_version == "3.10.13" +pysam==0.22.0 ; python_full_version == "3.10.13" +pytest-asyncio==0.21.1 ; python_full_version == "3.10.13" +pytest-cov==4.1.0 ; python_full_version == "3.10.13" +pytest==7.4.3 ; python_full_version == "3.10.13" +python-dateutil==2.8.2 ; python_full_version == "3.10.13" +python-dotenv==1.0.0 ; python_full_version == "3.10.13" +python-json-logger==2.0.7 ; python_full_version == "3.10.13" +python-slugify==8.0.1 ; python_full_version == "3.10.13" +pytimeparse==1.1.8 ; python_full_version == "3.10.13" +pytz==2023.3.post1 ; python_full_version == "3.10.13" +pywin32-ctypes==0.2.2 ; sys_platform == "win32" and python_full_version == "3.10.13" +pywin32==306 ; (platform_system == "Windows" or sys_platform == "win32") and python_full_version == "3.10.13" +pywinpty==2.0.12 ; os_name == "nt" and python_full_version == "3.10.13" +pyyaml-env-tag==0.1 ; python_full_version == "3.10.13" +pyyaml==6.0.1 ; python_full_version == "3.10.13" +pyzmq==25.1.2 ; python_full_version == "3.10.13" +qnorm==0.8.1 ; python_full_version == "3.10.13" +referencing==0.31.1 ; python_full_version == "3.10.13" +regex==2023.10.3 ; python_full_version == "3.10.13" +requests-oauthlib==1.3.1 ; python_full_version == "3.10.13" +requests==2.31.0 ; python_full_version == "3.10.13" +rfc3339-validator==0.1.4 ; python_full_version == "3.10.13" +rfc3986-validator==0.1.1 ; python_full_version == "3.10.13" +rich-click==1.7.2 ; python_full_version == "3.10.13" +rich==13.6.0 ; python_full_version == "3.10.13" +rpds-py==0.13.2 ; python_full_version == "3.10.13" +rsa==4.9 ; python_full_version == "3.10.13" +ruff==0.1.5 ; python_full_version == "3.10.13" +s3fs==2023.9.2 ; python_full_version == "3.10.13" +scikit-learn==1.3.2 ; python_full_version == "3.10.13" +scipy==1.10.1 ; python_full_version == "3.10.13" +screed==1.1.2 ; python_full_version == "3.10.13" +seaborn==0.13.0 ; python_full_version == "3.10.13" +secretstorage==3.3.3 ; sys_platform == "linux" and python_full_version == "3.10.13" +send2trash==1.8.2 ; python_full_version == "3.10.13" +sentry-sdk==1.38.0 ; python_full_version == "3.10.13" +setproctitle==1.3.3 ; python_full_version == "3.10.13" +setuptools==69.0.2 ; python_full_version == "3.10.13" +six==1.16.0 ; python_full_version == "3.10.13" +smmap==5.0.1 ; python_full_version == "3.10.13" +sniffio==1.3.0 ; python_full_version == "3.10.13" +soupsieve==2.5 ; python_full_version == "3.10.13" +sourmash==4.8.4 ; python_full_version == "3.10.13" +stack-data==0.6.3 ; python_full_version == "3.10.13" +statsd==3.3.0 ; python_full_version == "3.10.13" +statsmodels==0.14.0 ; python_full_version == "3.10.13" +sympy==1.12 ; python_full_version == "3.10.13" +terminado==0.18.0 ; python_full_version == "3.10.13" +text-unidecode==1.3 ; python_full_version == "3.10.13" +threadpoolctl==3.2.0 ; python_full_version == "3.10.13" +tinycss2==1.2.1 ; python_full_version == "3.10.13" +tokenize-rt==5.2.0 ; python_full_version == "3.10.13" +toml==0.10.2 ; python_full_version == "3.10.13" +tomli==2.0.1 ; python_full_version == "3.10.13" +torch==2.1.0 ; python_full_version == "3.10.13" +torchvision==0.16.0 ; python_full_version == "3.10.13" +tornado==6.4 ; python_full_version == "3.10.13" +tqdm==4.66.1 ; python_full_version == "3.10.13" +traitlets==5.14.0 ; python_full_version == "3.10.13" +types-python-dateutil==2.8.19.14 ; python_full_version == "3.10.13" +typing-extensions==4.8.0 ; python_full_version == "3.10.13" +typing-inspect==0.9.0 ; python_full_version == "3.10.13" +tzdata==2023.3 ; python_full_version == "3.10.13" +uri-template==1.3.0 ; python_full_version == "3.10.13" +urllib3==1.26.18 ; python_full_version == "3.10.13" +wandb==0.16.0 ; python_full_version == "3.10.13" +watchdog==3.0.0 ; python_full_version == "3.10.13" +wcwidth==0.2.12 ; python_full_version == "3.10.13" +webcolors==1.13 ; python_full_version == "3.10.13" +webencodings==0.5.1 ; python_full_version == "3.10.13" +websocket-client==1.7.0 ; python_full_version == "3.10.13" +win32-setctime==1.1.0 ; sys_platform == "win32" and python_full_version == "3.10.13" +wrapt==1.16.0 ; python_full_version == "3.10.13" +xdg==6.0.0 ; python_full_version == "3.10.13" +xdoctest==1.1.2 ; python_full_version == "3.10.13" +xxhash==3.4.1 ; python_full_version == "3.10.13" +yarl==1.9.4 ; python_full_version == "3.10.13" +zipp==3.17.0 ; python_full_version == "3.10.13" diff --git a/sample.py b/sample.py index 9d98c89d..4e2871f4 100644 --- a/sample.py +++ b/sample.py @@ -52,7 +52,9 @@ def sample(model_path: str, num_samples: int = 1000, heatmap: bool = False): cell_list = list(encode_data["tag_to_numeric"].keys()) for i in cell_num_list: - print(f"Generating {num_samples} samples for cell {encode_data['numeric_to_tag'][i]}") + print( + f"Generating {num_samples} samples for cell {encode_data['numeric_to_tag'][i]}" + ) create_sample( diffusion, conditional_numeric_to_tag=encode_data["numeric_to_tag"], diff --git a/scripts/flake b/scripts/flake new file mode 100755 index 00000000..fd6aacbc --- /dev/null +++ b/scripts/flake @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +set -euo pipefail + +MANUAL_DIRENV=${1:-.dirman} + +mkdir -p "$MANUAL_DIRENV" + +(source <(nix print-dev-env \ + --profile "${MANUAL_DIRENV}"/flake-profile . \ + --impure \ + --accept-flake-config);\ + zsh) + +# The above is equivalent to `use flake . --impure --accept-flake-config` in +# direnv .envrc see direnv stdlib for details of the underlying function +# definitions from a bash shell, it is also possible to manually execute: +# +# $ eval "$(nix print-dev-env --profile .dirman/flake-profile . --impure --accept-flake-config)" +# $ zsh +# +# or, equivalently: +# +# $ nix print-dev-env --profile "${MANUAL_DIRENV}"/flake-profile . --impure --accept-flake-config > envsetup +# $ source envsetup +# $ zsh diff --git a/src/refactor/models/encoders/vqvae.py b/src/.gitkeep similarity index 100% rename from src/refactor/models/encoders/vqvae.py rename to src/.gitkeep diff --git a/src/dnadiffusion/__about__.py b/src/dnadiffusion/__about__.py deleted file mode 100644 index 95f3d0ac..00000000 --- a/src/dnadiffusion/__about__.py +++ /dev/null @@ -1,4 +0,0 @@ -# SPDX-FileCopyrightText: 2023-present DNA Diffusion Team -# -# SPDX-License-Identifier: MIT -__version__ = "0.0.0.dev1" diff --git a/src/dnadiffusion/__init__.py b/src/dnadiffusion/__init__.py index e316a6f4..15950444 100644 --- a/src/dnadiffusion/__init__.py +++ b/src/dnadiffusion/__init__.py @@ -1,3 +1,17 @@ +""" +dnadiffusion. +""" + +from importlib import metadata + +try: + __version__ = metadata.version(__package__) +except metadata.PackageNotFoundError: + __version__ = "dnadiffusion package may not be installed" + +del metadata + + import pathlib # Get path to the data directory in the same level as the src directory diff --git a/src/dnadiffusion/cli/__init__.py b/src/dnadiffusion/cli/__init__.py index 7366162c..a8cabc17 100644 --- a/src/dnadiffusion/cli/__init__.py +++ b/src/dnadiffusion/cli/__init__.py @@ -3,10 +3,13 @@ # SPDX-License-Identifier: MIT import click -from dnadiffusion.__about__ import __version__ +from dnadiffusion import __version__ -@click.group(context_settings={"help_option_names": ["-h", "--help"]}, invoke_without_command=True) +@click.group( + context_settings={"help_option_names": ["-h", "--help"]}, + invoke_without_command=True, +) @click.version_option(version=__version__, prog_name="dnadiffusion") def main(): click.echo("DNA diffusion!") diff --git a/src/dnadiffusion/configuration.py b/src/dnadiffusion/configuration.py new file mode 100644 index 00000000..41f884f2 --- /dev/null +++ b/src/dnadiffusion/configuration.py @@ -0,0 +1,171 @@ +import dataclasses +import inspect +from typing import ( + Any, + Callable, + Dict, + List, + Optional, + Tuple, + Type, + get_type_hints, +) + +from mashumaro.mixins.json import DataClassJSONMixin +from sklearn.linear_model import LogisticRegression + + +def infer_type_from_default(value: Any) -> Type: + """ + Infers or imputes a type from the default value of a parameter. + Args: + value: The default value of the parameter. + Returns: + The inferred type. + """ + if value is None: + return Optional[Any] + elif value is inspect.Parameter.empty: + return Any + else: + return type(value) + + +def create_dataclass_from_callable( + callable_obj: Callable, + overrides: Optional[Dict[str, Tuple[Type, Any]]] = None, +) -> List[Tuple[str, Type, Any]]: + """ + Creates the fields of a dataclass from a `Callable` that includes all + parameters of the callable as typed fields with default values inferred or + taken from type hints. The function also accepts a dictionary containing + parameter names together with a tuple of a type and default to allow + specification of or override (un)typed defaults from the target callable. + + Args: + callable_obj (Callable): The callable object to create a dataclass from. + overrides (Optional[Dict[str, Tuple[Type, Any]]]): Dictionary to + override inferred types and default values. Each dict value is a tuple + (Type, default_value). + + Returns: + Fields that can be used to construct a new dataclass type that + represents the interface of the callable. + + Examples: + >>> from pprint import pprint + >>> custom_types_defaults: Dict[str, Tuple[Type, Any]] = { + ... "penalty": (str, "l2"), + ... "class_weight": (Optional[dict], None), + ... "random_state": (Optional[int], None), + ... "max_iter": (int, 2000), + ... "n_jobs": (Optional[int], None), + ... "l1_ratio": (Optional[float], None), + ... } + >>> fields = create_dataclass_from_callable(LogisticRegression, custom_types_defaults) + >>> LogisticRegressionInterface = dataclasses.make_dataclass( + ... "LogisticRegressionInterface", fields, bases=(DataClassJSONMixin,) + ... ) + >>> lr_instance = LogisticRegressionInterface() + >>> isinstance(lr_instance, DataClassJSONMixin) + True + >>> pprint(lr_instance) + LogisticRegressionInterface(penalty='l2', + dual=False, + tol=0.0001, + C=1.0, + fit_intercept=True, + intercept_scaling=1, + class_weight=None, + random_state=None, + solver='lbfgs', + max_iter=2000, + multi_class='auto', + verbose=0, + warm_start=False, + n_jobs=None, + l1_ratio=None) + """ + if inspect.isclass(callable_obj): + func = callable_obj.__init__ + else: + func = callable_obj + + signature = inspect.signature(func) + type_hints = get_type_hints(func) + + fields = [] + for name, param in signature.parameters.items(): + if name == "self": + continue + + if overrides and name in overrides: + field_type, default_value = overrides[name] + else: + inferred_type = infer_type_from_default(param.default) + field_type = type_hints.get(name, inferred_type) + default_value = ( + param.default + if param.default is not inspect.Parameter.empty + else dataclasses.field(default_factory=lambda: None) + ) + + fields.append((name, field_type, default_value)) + + return fields + + +if __name__ == "__main__": + # Commented code here is primarily to support CLI or IDE debugger execution. + # Otherwise, prefer to integrate tests and checks into the docstrings and + # run pytest with `--xdoc` (default in this project). + + import pprint + + custom_types_defaults: Dict[str, Tuple[Type, Any]] = { + # "penalty": (str, "l2"), + # "dual": (bool, False), + # "tol": (float, 1e-4), + # "C": (float, 1.0), + # "fit_intercept": (bool, True), + # "intercept_scaling": (int, 1), + # "class_weight": (Optional[dict], None), + # "random_state": (Optional[int], None), + # "solver": (str, "lbfgs"), + "max_iter": (int, 2000), + # "multi_class": (str, "auto"), + # "verbose": (int, 0), + # "warm_start": (bool, False), + # "n_jobs": (Optional[int], None), + # "l1_ratio": (Optional[float], None), + } + + fields = create_dataclass_from_callable( + LogisticRegression, + custom_types_defaults, + # {}, + ) + LogisticRegressionInterface = dataclasses.make_dataclass( + "LogisticRegressionInterface", fields, bases=(DataClassJSONMixin,) + ) + pprint.pprint(LogisticRegressionInterface()) + + # from dataclasses import dataclass + # from dataclasses_json import dataclass_json + # from sklearn.linear_model import LogisticRegression + # logistic_regression_custom_types = { + # "penalty": Optional[str], + # "class_weight": Optional[dict], + # "random_state": Optional[int], + # "n_jobs": Optional[int], + # "l1_ratio": Optional[float], + # } + # LogisticRegressionInterface = dataclass_json( + # dataclass( + # create_dataclass_from_callable_json( + # LogisticRegression, logistic_regression_custom_types + # ) + # ) + # ) + # print("Annotations:", LogisticRegressionInterface.__annotations__) + # print("Schema:", LogisticRegressionInterface().schema()) diff --git a/src/dnadiffusion/constants.py b/src/dnadiffusion/constants.py new file mode 100644 index 00000000..a30a6af7 --- /dev/null +++ b/src/dnadiffusion/constants.py @@ -0,0 +1,63 @@ +import os +import subprocess + +from dnadiffusion.logging import configure_logging + +logger = configure_logging("dnadiffusion.constants") + + +def get_git_repo_root(path="."): + try: + git_root = subprocess.check_output( + ["git", "rev-parse", "--show-toplevel"], cwd=path + ) + return git_root.decode("utf-8").strip() + except subprocess.CalledProcessError: + git_repo_not_found = ( + "Not inside a Git repository or Git is not installed." + ) + raise OSError(git_repo_not_found) + + +repo_root = get_git_repo_root() + +if repo_root: + REMOTE_CLUSTER_CONFIG_FILE_PATH = os.path.join( + repo_root, ".flyte", "config.yaml" + ) + LOCAL_CLUSTER_CONFIG_FILE_PATH = os.path.join( + repo_root, ".flyte", "config-local.yaml" + ) + + if not os.path.isfile(REMOTE_CLUSTER_CONFIG_FILE_PATH): + remote_cluster_config_file_not_found_message = ( + f"Remote cluster config file not found at path:\n\n" + f"{REMOTE_CLUSTER_CONFIG_FILE_PATH}\n\n" + "Verify you have run `make update_config` in the root of the repository,\n" + "or manually create the file at the path above.\n\n" + ) + raise FileNotFoundError(remote_cluster_config_file_not_found_message) + + if not os.path.isfile(LOCAL_CLUSTER_CONFIG_FILE_PATH): + local_cluster_config_file_not_found_message = ( + f"Local cluster config file not found at path:\n\n" + f"{LOCAL_CLUSTER_CONFIG_FILE_PATH}\n\n" + f"Check that you have not deleted this file from the repository.\n\n" + ) + raise FileNotFoundError(local_cluster_config_file_not_found_message) + + logger.debug( + f"Remote cluster config file path: {REMOTE_CLUSTER_CONFIG_FILE_PATH}" + ) + logger.debug( + f"Local cluster config file path: {LOCAL_CLUSTER_CONFIG_FILE_PATH}" + ) +else: + git_repo_not_found = "Not inside a Git repository or Git is not installed." + raise OSError(git_repo_not_found) + +if __name__ == "__main__": + from pprint import pprint + + pprint(REMOTE_CLUSTER_CONFIG_FILE_PATH) + pprint(LOCAL_CLUSTER_CONFIG_FILE_PATH) diff --git a/src/dnadiffusion/data/dataloader.py b/src/dnadiffusion/data/dataloader.py index 618767a1..94426541 100644 --- a/src/dnadiffusion/data/dataloader.py +++ b/src/dnadiffusion/data/dataloader.py @@ -3,12 +3,11 @@ import random from typing import Any -import matplotlib.pyplot as plt import numpy as np import pandas as pd import torch import torchvision.transforms as T -from torch.utils.data import DataLoader, Dataset +from torch.utils.data import Dataset from dnadiffusion.utils.utils import one_hot_encode @@ -47,12 +46,20 @@ def load_data( test_motifs_cell_specific = encode_data["test"]["final_subset_motifs"] shuffle_motifs = encode_data["train_shuffled"]["motifs"] - shuffle_motifs_cell_specific = encode_data["train_shuffled"]["final_subset_motifs"] + shuffle_motifs_cell_specific = encode_data["train_shuffled"][ + "final_subset_motifs" + ] # Creating sequence dataset df = encode_data["train"]["df"] nucleotides = ["A", "C", "G", "T"] - x_train_seq = np.array([one_hot_encode(x, nucleotides, 200) for x in df["sequence"] if "N" not in x]) + x_train_seq = np.array( + [ + one_hot_encode(x, nucleotides, 200) + for x in df["sequence"] + if "N" not in x + ] + ) X_train = np.array([x.T.tolist() for x in x_train_seq]) X_train[X_train == 0] = -1 @@ -82,16 +89,31 @@ def load_data( def motifs_from_fasta(fasta: str): print("Computing Motifs....") - os.system(f"gimme scan {fasta} -p JASPAR2020_vertebrates -g hg38 -n 20> train_results_motifs.bed") - df_results_seq_guime = pd.read_csv("train_results_motifs.bed", sep="\t", skiprows=5, header=None) - df_results_seq_guime["motifs"] = df_results_seq_guime[8].apply(lambda x: x.split('motif_name "')[1].split('"')[0]) + os.system( + f"gimme scan {fasta} -p JASPAR2020_vertebrates -g hg38 -n 20> train_results_motifs.bed" + ) + df_results_seq_guime = pd.read_csv( + "train_results_motifs.bed", sep="\t", skiprows=5, header=None + ) + df_results_seq_guime["motifs"] = df_results_seq_guime[8].apply( + lambda x: x.split('motif_name "')[1].split('"')[0] + ) - df_results_seq_guime[0] = df_results_seq_guime[0].apply(lambda x: "_".join(x.split("_")[:-1])) - df_results_seq_guime_count_out = df_results_seq_guime[[0, "motifs"]].groupby("motifs").count() + df_results_seq_guime[0] = df_results_seq_guime[0].apply( + lambda x: "_".join(x.split("_")[:-1]) + ) + df_results_seq_guime_count_out = ( + df_results_seq_guime[[0, "motifs"]].groupby("motifs").count() + ) return df_results_seq_guime_count_out -def save_fasta(df: pd.DataFrame, name: str, num_sequences: int, seq_to_subset_comp: bool = False) -> str: +def save_fasta( + df: pd.DataFrame, + name: str, + num_sequences: int, + seq_to_subset_comp: bool = False, +) -> str: fasta_path = f"{name}.fasta" save_fasta_file = open(fasta_path, "w") num_to_sample = df.shape[0] @@ -115,14 +137,21 @@ def save_fasta(df: pd.DataFrame, name: str, num_sequences: int, seq_to_subset_co def generate_motifs_and_fastas( - df: pd.DataFrame, name: str, num_sequences: int, subset_list: list | None = None + df: pd.DataFrame, + name: str, + num_sequences: int, + subset_list: list | None = None, ) -> dict[str, Any]: print("Generating Motifs and Fastas...", name) print("---" * 10) # Saving fasta if subset_list: - fasta_path = save_fasta(df, f"{name}_{'_'.join([str(c) for c in subset_list])}", num_sequences) + fasta_path = save_fasta( + df, + f"{name}_{'_'.join([str(c) for c in subset_list])}", + num_sequences, + ) else: fasta_path = save_fasta(df, name, num_sequences) @@ -133,7 +162,9 @@ def generate_motifs_and_fastas( final_subset_motifs = {} for comp, v_comp in df.groupby("TAG"): print(comp) - c_fasta = save_fasta(v_comp, f"{name}_{comp}", num_sequences, seq_to_subset_comp=True) + c_fasta = save_fasta( + v_comp, f"{name}_{comp}", num_sequences, seq_to_subset_comp=True + ) final_subset_motifs[comp] = motifs_from_fasta(c_fasta) return { @@ -168,15 +199,21 @@ def preprocess_data( # Creating train/test/shuffle groups df_test = df[df["chr"] == "chr1"].reset_index(drop=True) df_train_shuffled = df[df["chr"] == "chr2"].reset_index(drop=True) - df_train = df_train = df[(df["chr"] != "chr1") & (df["chr"] != "chr2")].reset_index(drop=True) + df_train = df[(df["chr"] != "chr1") & (df["chr"] != "chr2")].reset_index( + drop=True + ) df_train_shuffled["sequence"] = df_train_shuffled["sequence"].apply( lambda x: "".join(random.sample(list(x), len(x))) ) # Getting motif information from the sequences - train = generate_motifs_and_fastas(df_train, "train", number_of_sequences_to_motif_creation, subset_list) - test = generate_motifs_and_fastas(df_test, "test", number_of_sequences_to_motif_creation, subset_list) + train = generate_motifs_and_fastas( + df_train, "train", number_of_sequences_to_motif_creation, subset_list + ) + test = generate_motifs_and_fastas( + df_test, "test", number_of_sequences_to_motif_creation, subset_list + ) train_shuffled = generate_motifs_and_fastas( df_train_shuffled, "train_shuffled", @@ -184,7 +221,11 @@ def preprocess_data( subset_list, ) - combined_dict = {"train": train, "test": test, "train_shuffled": train_shuffled} + combined_dict = { + "train": train, + "test": test, + "train_shuffled": train_shuffled, + } # Writing to pickle if save_output: diff --git a/src/dnadiffusion/data/preprocessing.py b/src/dnadiffusion/data/preprocessing.py index 1cd9cdc5..617de843 100644 --- a/src/dnadiffusion/data/preprocessing.py +++ b/src/dnadiffusion/data/preprocessing.py @@ -12,7 +12,12 @@ def preprocess_data( data_path: Path = DATA_DIR, df_path: str = "/master_dataset.ftr", - cell_list: list = ["K562_ENCLB843GMH", "hESCT0_ENCLB449ZZZ", "HepG2_ENCLB029COU", "GM12878_ENCLB441ZZZ"], + cell_list: list = [ + "K562_ENCLB843GMH", + "hESCT0_ENCLB449ZZZ", + "HepG2_ENCLB029COU", + "GM12878_ENCLB441ZZZ", + ], download_data_bool: bool = True, create_master_dataset_bool: bool = True, filter_data_bool: bool = True, @@ -27,7 +32,9 @@ def preprocess_data( if create_master_dataset_bool: create_master_dataset(data_path) if filter_data_bool: - FilteringData(data_path, df_path, cell_list).filter_exclusive_replicates( + FilteringData( + data_path, df_path, cell_list + ).filter_exclusive_replicates( sort=sort_replicates, balance=balance_replicates ) @@ -44,7 +51,9 @@ def download_data(data_path: str) -> None: tmp_dir = data_path + "/tmp" # Downloading the reference genome - os.system(f"wget 'https://hgdownload.soe.ucsc.edu/goldenPath/hg38/bigZips/hg38.fa.gz' -O {tmp_dir}/hg38.fa.gz") + os.system( + f"wget 'https://hgdownload.soe.ucsc.edu/goldenPath/hg38/bigZips/hg38.fa.gz' -O {tmp_dir}/hg38.fa.gz" + ) os.system(f"gunzip {tmp_dir}/hg38.fa.gz") # Download DHS metadata and load into dataframe @@ -60,10 +69,14 @@ def download_data(data_path: str) -> None: # Converting npy file to csv basis_array = np.load(f"{tmp_dir}/2018-06-08NC16_NNDSVD_Basis.npy") - np.savetxt(f"{tmp_dir}/2018-06-08NC16_NNDSVD_Basis.csv", basis_array, delimiter=",") + np.savetxt( + f"{tmp_dir}/2018-06-08NC16_NNDSVD_Basis.csv", basis_array, delimiter="," + ) # Creating nmf_loadings matrix from csv - nmf_loadings = pd.read_csv(f"{tmp_dir}/2018-06-08NC16_NNDSVD_Basis.csv", header=None) + nmf_loadings = pd.read_csv( + f"{tmp_dir}/2018-06-08NC16_NNDSVD_Basis.csv", header=None + ) nmf_loadings.columns = ["C" + str(i) for i in range(1, 17)] # Downloading mixture array that contains 3.5M x 16 matrix of peak presence/absence decomposed into 16 components @@ -75,14 +88,20 @@ def download_data(data_path: str) -> None: # Turning npy file into csv mixture_array = np.load(f"{tmp_dir}/2018-06-08NC16_NNDSVD_Mixture.npy").T - np.savetxt(f"{tmp_dir}/2018-06-08NC16_NNDSVD_Mixture.csv", mixture_array, delimiter=",") + np.savetxt( + f"{tmp_dir}/2018-06-08NC16_NNDSVD_Mixture.csv", + mixture_array, + delimiter=",", + ) # Loading in DHS_Index_and_Vocabulary_metadata that contains the following information: # seqname, start, end, identifier, mean_signal, numsaples, summit, core_start, core_end, component os.system( f"wget 'https://www.meuleman.org/DHS_Index_and_Vocabulary_hg38_WM20190703.txt.gz' -O {tmp_dir}/DHS_Index_and_Vocabulary_hg38_WM20190703.txt.gz" ) - os.system(f"gunzip -d {tmp_dir}/DHS_Index_and_Vocabulary_hg38_WM20190703.txt.gz") + os.system( + f"gunzip -d {tmp_dir}/DHS_Index_and_Vocabulary_hg38_WM20190703.txt.gz" + ) # Downloading binary peak presence/absence matrix os.system( @@ -103,40 +122,62 @@ def create_master_dataset( genome = ReferenceGenome.from_path(f"{tmp_dir}/hg38.fa") # Redefine component columns component_columns = ["C" + str(i) for i in range(1, 17)] - DHS_Index_and_Vocabulary_metadata = pd.read_table(f"{tmp_dir}/DHS_Index_and_Vocabulary_metadata.tsv").iloc[:-1] + DHS_Index_and_Vocabulary_metadata = pd.read_table( + f"{tmp_dir}/DHS_Index_and_Vocabulary_metadata.tsv" + ).iloc[:-1] # Component columns names component_columns = ["C" + str(i) for i in range(1, 17)] # Creating nmf_loadings matrix from csv - basis_nmf_loadings = pd.read_csv(f"{tmp_dir}/2018-06-08NC16_NNDSVD_Basis.csv", header=None) + basis_nmf_loadings = pd.read_csv( + f"{tmp_dir}/2018-06-08NC16_NNDSVD_Basis.csv", header=None + ) basis_nmf_loadings.columns = component_columns # Joining metadata with component presence matrix - DHS_Index_and_Vocabulary_metadata = pd.concat([DHS_Index_and_Vocabulary_metadata, basis_nmf_loadings], axis=1) + DHS_Index_and_Vocabulary_metadata = pd.concat( + [DHS_Index_and_Vocabulary_metadata, basis_nmf_loadings], axis=1 + ) DHS_Index_and_Vocabulary_metadata["component"] = ( - DHS_Index_and_Vocabulary_metadata[component_columns].idxmax(axis=1).apply(lambda x: int(x[1:])) + DHS_Index_and_Vocabulary_metadata[component_columns] + .idxmax(axis=1) + .apply(lambda x: int(x[1:])) ) # Loading sequence metadata - sequence_metadata = pd.read_table(f"{tmp_dir}/DHS_Index_and_Vocabulary_hg38_WM20190703.txt", sep="\t") + sequence_metadata = pd.read_table( + f"{tmp_dir}/DHS_Index_and_Vocabulary_hg38_WM20190703.txt", sep="\t" + ) # Dropping component column that contains associated tissue rather than component number (We will use the component number from DHS_Index_and_Vocabulary_metadata) sequence_metadata = sequence_metadata.drop(columns=["component"], axis=1) # Creating nmf_loadings matrix from csv and renaming columns mixture_nmf_loadings = pd.read_csv( - f"{tmp_dir}/2018-06-08NC16_NNDSVD_Mixture.csv", header=None, names=component_columns + f"{tmp_dir}/2018-06-08NC16_NNDSVD_Mixture.csv", + header=None, + names=component_columns, ) # Join metadata with component presence matrix - df = pd.concat([sequence_metadata, mixture_nmf_loadings], axis=1, sort=False) + df = pd.concat( + [sequence_metadata, mixture_nmf_loadings], axis=1, sort=False + ) # Recreating some of the columns from our original dataset - df["component"] = df[component_columns].idxmax(axis=1).apply(lambda x: int(x[1:])) - df["proportion"] = df[component_columns].max(axis=1) / df[component_columns].sum(axis=1) + df["component"] = ( + df[component_columns].idxmax(axis=1).apply(lambda x: int(x[1:])) + ) + df["proportion"] = df[component_columns].max(axis=1) / df[ + component_columns + ].sum(axis=1) df["total_signal"] = df["mean_signal"] * df["numsamples"] - df["proportion"] = df[component_columns].max(axis=1) / df[component_columns].sum(axis=1) - df["dhs_id"] = df[["seqname", "start", "end", "summit"]].apply(lambda x: "_".join(map(str, x)), axis=1) + df["proportion"] = df[component_columns].max(axis=1) / df[ + component_columns + ].sum(axis=1) + df["dhs_id"] = df[["seqname", "start", "end", "summit"]].apply( + lambda x: "_".join(map(str, x)), axis=1 + ) df["DHS_width"] = df["end"] - df["start"] # Creating sequence column @@ -163,11 +204,14 @@ def create_master_dataset( ] # Opening file - binary_matrix = pd.read_table(f"{tmp_dir}/dat_bin_FDR01_hg38.txt", header=None) + binary_matrix = pd.read_table( + f"{tmp_dir}/dat_bin_FDR01_hg38.txt", header=None + ) # Collecting names of cells into a list with fromat celltype_encodeID celltype_encodeID = [ - row["Biosample name"] + "_" + row["DCC Library ID"] for _, row in DHS_Index_and_Vocabulary_metadata.iterrows() + row["Biosample name"] + "_" + row["DCC Library ID"] + for _, row in DHS_Index_and_Vocabulary_metadata.iterrows() ] # Renaming columns using celltype_encodeID list @@ -189,14 +233,20 @@ class FilteringData: def __init__(self, data_path: str, df_path: str, cell_list: list): self.df = pd.read_feather(data_path + df_path) self.cell_list = cell_list - self.output_path = data_path + "/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt" + self.output_path = ( + data_path + "/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt" + ) self._test_data_structure() def _test_data_structure(self): # Ensures all columns after the 11th are named cell names - assert all("_ENCL" in x for x in self.df.columns[11:]), "_ENCL not in all columns after 11th" + assert all( + "_ENCL" in x for x in self.df.columns[11:] + ), "_ENCL not in all columns after 11th" - def filter_exclusive_replicates(self, sort: bool = False, balance: bool = True): + def filter_exclusive_replicates( + self, sort: bool = False, balance: bool = True + ): """Given a specific set of samples (one per cell type), capture the exclusive peaks of each samples (the ones matching just one sample for the whole set) and then filter the dataset to keep only these peaks. @@ -210,7 +260,9 @@ def filter_exclusive_replicates(self, sort: bool = False, balance: bool = True): # Creating a new dataframe with only the columns corresponding to the cell types df_subset = self.df[subset_cols] # Creating a new column for each cell type with the exclusive peaks or 'NO_TAG' if not exclusive - df_subset["TAG"] = df_subset[self.cell_list].apply(lambda x: "NO_TAG" if x.sum() != 1 else x.idxmax(), axis=1) + df_subset["TAG"] = df_subset[self.cell_list].apply( + lambda x: "NO_TAG" if x.sum() != 1 else x.idxmax(), axis=1 + ) # Creating a new dataframe with only the rows with exclusive peaks new_df_list = [] @@ -218,10 +270,15 @@ def filter_exclusive_replicates(self, sort: bool = False, balance: bool = True): if k != "NO_TAG": cell, replicate = "_".join(k.split("_")[:-1]), k.split("_")[-1] v["additional_replicates_with_peak"] = ( - self.df[self.df.filter(like=cell).columns].apply(lambda x: x.sum(), axis=1) - 1 + self.df[self.df.filter(like=cell).columns].apply( + lambda x: x.sum(), axis=1 + ) + - 1 ) temp_df = self.df.filter(like=cell) - print(f"Cell type: {cell}, Replicate: {replicate}, Number of exclusive peaks: {v.shape[0]}") + print( + f"Cell type: {cell}, Replicate: {replicate}, Number of exclusive peaks: {v.shape[0]}" + ) else: v["additional_replicates_with_peak"] = 0 new_df_list.append(v) @@ -235,7 +292,10 @@ def filter_exclusive_replicates(self, sort: bool = False, balance: bool = True): new_df = pd.concat( [ x_v.sort_values( - by=["additional_replicates_with_peak", "other_samples_with_peak_not_considering_reps"], + by=[ + "additional_replicates_with_peak", + "other_samples_with_peak_not_considering_reps", + ], ascending=[False, True], ) for x_k, x_v in new_df.groupby("TAG") @@ -247,7 +307,11 @@ def filter_exclusive_replicates(self, sort: bool = False, balance: bool = True): if balance: lowest_peak_count = new_df.groupby("TAG").count()["sequence"].min() new_df = pd.concat( - [v_bal.head(lowest_peak_count) for k_bal, v_bal in new_df.groupby("TAG") if k_bal != "NO_TAG"] + [ + v_bal.head(lowest_peak_count) + for k_bal, v_bal in new_df.groupby("TAG") + if k_bal != "NO_TAG" + ] ) print("Saving filtered dataset") diff --git a/src/dnadiffusion/data/validation_preprocessing.py b/src/dnadiffusion/data/validation_preprocessing.py index e0d71875..a5c39399 100644 --- a/src/dnadiffusion/data/validation_preprocessing.py +++ b/src/dnadiffusion/data/validation_preprocessing.py @@ -2,13 +2,14 @@ import genomepy import pandas as pd -import pybedtools from pybedtools import BedTool from dnadiffusion.utils.data_util import GTFProcessing -def combine_all_seqs(cell_list: list, training_data_path: str, save_output: bool = False) -> pd.DataFrame: +def combine_all_seqs( + cell_list: list, training_data_path: str, save_output: bool = False +) -> pd.DataFrame: """A function to take the generated sequences from sample loop and combine them with the training dataset Args: @@ -40,7 +41,10 @@ def combine_all_seqs(cell_list: list, training_data_path: str, save_output: bool df["CELL_TYPE"] = file_name.upper() df["index_number"] = [str(i) for i in df.index] df["TAG"] = "GENERATED" - df["ID"] = df.apply(lambda x: "_".join([x["index_number"], x["TAG"], x["CELL_TYPE"]]), axis=1) + df["ID"] = df.apply( + lambda x: "_".join([x["index_number"], x["TAG"], x["CELL_TYPE"]]), + axis=1, + ) # Saving the modified dataframe dfs_to_save["DF_" + file_name] = df # Remove index column @@ -59,9 +63,14 @@ def combine_all_seqs(cell_list: list, training_data_path: str, save_output: bool df_slice.columns = ["SEQUENCE", "CELL_TYPE", "TAG"] df_slice["TAG"] = df_slice["TAG"].apply(lambda x: x.upper()) df_slice["CELL_TYPE"] = df_slice["CELL_TYPE"].apply(lambda x: x.upper()) - df_slice["CELL_TYPE"] = df_slice["CELL_TYPE"].apply(lambda x: x.split("_")[0]) + df_slice["CELL_TYPE"] = df_slice["CELL_TYPE"].apply( + lambda x: x.split("_")[0] + ) df_slice["index_number"] = [str(i) for i in df_slice.index] - df_slice["ID"] = df_slice.apply(lambda x: "_".join([x["index_number"], x["TAG"], x["CELL_TYPE"]]), axis=1) + df_slice["ID"] = df_slice.apply( + lambda x: "_".join([x["index_number"], x["TAG"], x["CELL_TYPE"]]), + axis=1, + ) dfs_to_save[data_in.upper() + "_" + tag_in.upper()] = df_slice # Remove index column del df_slice["index_number"] @@ -122,25 +131,35 @@ def validation_table( # Filtering chromosomes chromosomes = ["chr" + str(i) for i in range(1, 22)] + ["X", "Y"] random_seqs = random_seqs[random_seqs["chrom"].isin(chromosomes)] - filtered_random_seqs = BedTool.from_dataframe(random_seqs).sequence(f"{genome_path}/hg38/hg38.fa") + filtered_random_seqs = BedTool.from_dataframe(random_seqs).sequence( + f"{genome_path}/hg38/hg38.fa" + ) # Cleaning up the dataframe and renaming columns - random_seqs["SEQUENCE"] = [x.upper() for x in open(filtered_random_seqs.seqfn).read().split("\n") if ">" not in x][ - :-1 + random_seqs["SEQUENCE"] = [ + x.upper() + for x in open(filtered_random_seqs.seqfn).read().split("\n") + if ">" not in x + ][:-1] + random_seqs = random_seqs[ + random_seqs["SEQUENCE"].apply(lambda x: "N" not in x) ] - random_seqs = random_seqs[random_seqs["SEQUENCE"].apply(lambda x: "N" not in x)] random_seqs = random_seqs.head(num_filter_sequences) - random_seqs["ID"] = random_seqs.apply(lambda x: f"{x['chrom']}_{x['start']!s}_{x['end']!s}_random", axis=1) + random_seqs["ID"] = random_seqs.apply( + lambda x: f"{x['chrom']}_{x['start']!s}_{x['end']!s}_random", axis=1 + ) random_seqs["CELL_TYPE"] = "NO" random_seqs["TAG"] = "RANDOM_GENOME_REGIONS" - random_seqs = random_seqs[["chrom", "start", "end", "ID", "CELL_TYPE", "SEQUENCE", "TAG"]] + random_seqs = random_seqs[ + ["chrom", "start", "end", "ID", "CELL_TYPE", "SEQUENCE", "TAG"] + ] # Promoter sequences print("Generating promoter sequences dataframe") gtf = GTFProcessing(promoter_path) df_gtf = gtf.get_gtf_df() - df_gtf_filtered = df_gtf.query("feature == 'transcript' and gene_type == 'protein_coding' ").drop_duplicates( - "gene_name" - ) + df_gtf_filtered = df_gtf.query( + "feature == 'transcript' and gene_type == 'protein_coding' " + ).drop_duplicates("gene_name") df_gtf_filtered["tss_position"] = df_gtf_filtered.apply( lambda x: x["start"] if x["strand"] == "+" else x["end"], axis=1 ) @@ -154,9 +173,15 @@ def validation_table( ) df_gtf_filtered["CELL_TYPE"] = "NO" df_gtf_filtered["TAG"] = "PROMOTERS" - p_seqs = BedTool.from_dataframe(df_gtf_filtered).sequence(f"{genome_path}/hg38/hg38.fa") - df_gtf_filtered["SEQUENCE"] = [x.upper() for x in open(p_seqs.seqfn).read().split("\n") if ">" not in x][:-1] - df_gtf_filtered = df_gtf_filtered[["chrom", "start", "end", "ID", "CELL_TYPE", "SEQUENCE", "TAG"]] + p_seqs = BedTool.from_dataframe(df_gtf_filtered).sequence( + f"{genome_path}/hg38/hg38.fa" + ) + df_gtf_filtered["SEQUENCE"] = [ + x.upper() for x in open(p_seqs.seqfn).read().split("\n") if ">" not in x + ][:-1] + df_gtf_filtered = df_gtf_filtered[ + ["chrom", "start", "end", "ID", "CELL_TYPE", "SEQUENCE", "TAG"] + ] # Reading the training dataset used to train the model print("Generating training dataset dataframe") @@ -169,24 +194,60 @@ def validation_table( df_train_balanced["start"] = df_train_balanced["coord_center"] - 100 df_train_balanced["end"] = df_train_balanced["coord_center"] + 100 # Selecting only the columns we need - df_train_balanced = df_train_balanced[["chr", "start", "end", "dhs_id", "TAG", "sequence", "data_label"]] - df_train_balanced.columns = ["chrom", "start", "end", "ID", "CELL_TYPE", "SEQUENCE", "TAG"] + df_train_balanced = df_train_balanced[ + ["chr", "start", "end", "dhs_id", "TAG", "sequence", "data_label"] + ] + df_train_balanced.columns = [ + "chrom", + "start", + "end", + "ID", + "CELL_TYPE", + "SEQUENCE", + "TAG", + ] # Reading the generated sequences print("Generating synthetic sequences dataframe") - df_generated = pd.read_csv(generated_data_path, sep="\t").query("TAG == 'GENERATED'") - df_generated_balanced = pd.concat([v for k, v in df_generated.groupby("CELL_TYPE")]) + df_generated = pd.read_csv(generated_data_path, sep="\t").query( + "TAG == 'GENERATED'" + ) + df_generated_balanced = pd.concat( + [v for k, v in df_generated.groupby("CELL_TYPE")] + ) # Adding some metadata columns df_generated_balanced["chrom"] = "NO" df_generated_balanced["start"] = "NO" df_generated_balanced["end"] = "NO" - df_generated_balanced = df_generated_balanced[["chrom", "start", "end", "ID", "CELL_TYPE", "SEQUENCE", "TAG"]] - df_generated_balanced.columns = ["chrom", "start", "end", "ID", "CELL_TYPE", "SEQUENCE", "TAG"] - df_generated_balanced["CELL_TYPE"] = df_generated_balanced["CELL_TYPE"].apply(lambda x: x.split("_")[0]) + df_generated_balanced = df_generated_balanced[ + ["chrom", "start", "end", "ID", "CELL_TYPE", "SEQUENCE", "TAG"] + ] + df_generated_balanced.columns = [ + "chrom", + "start", + "end", + "ID", + "CELL_TYPE", + "SEQUENCE", + "TAG", + ] + df_generated_balanced["CELL_TYPE"] = df_generated_balanced[ + "CELL_TYPE" + ].apply(lambda x: x.split("_")[0]) # Combining all the dataframes print("Combining all the dataframes") - df_final = pd.concat([df_train_balanced, df_generated_balanced, df_gtf_filtered, random_seqs], ignore_index=True) + df_final = pd.concat( + [ + df_train_balanced, + df_generated_balanced, + df_gtf_filtered, + random_seqs, + ], + ignore_index=True, + ) if save_output: - df_final.to_csv("DNA_DIFFUSION_VALIDATION_TABLE.txt", sep="\t", index=None) + df_final.to_csv( + "DNA_DIFFUSION_VALIDATION_TABLE.txt", sep="\t", index=None + ) return df_final diff --git a/src/dnadiffusion/hydra/__init__.py b/src/dnadiffusion/hydra/__init__.py new file mode 100644 index 00000000..0014b6cd --- /dev/null +++ b/src/dnadiffusion/hydra/__init__.py @@ -0,0 +1,5 @@ +from dnadiffusion.hydra.execute import main as execute_main + + +def main(): + execute_main() diff --git a/src/dnadiffusion/hydra/execute.py b/src/dnadiffusion/hydra/execute.py new file mode 100644 index 00000000..b50de068 --- /dev/null +++ b/src/dnadiffusion/hydra/execute.py @@ -0,0 +1,751 @@ +import importlib +import os +import pathlib +import sys +import tempfile +from dataclasses import dataclass, field + +import pyperclip +import rich.syntax +import rich.tree +from dataclasses_json import dataclass_json +from dotenv import load_dotenv +from flytekit.configuration import Config as FlyteConfig +from flytekit.configuration import ( + FastSerializationSettings, + ImageConfig, + SerializationSettings, +) +from flytekit.core.base_task import PythonTask +from flytekit.core.workflow import WorkflowBase +from flytekit.remote import FlyteRemote +from hydra_zen import ZenStore, make_config, make_custom_builds_fn, to_yaml, zen +from omegaconf import DictConfig + +from dnadiffusion.constants import ( + LOCAL_CLUSTER_CONFIG_FILE_PATH, + REMOTE_CLUSTER_CONFIG_FILE_PATH, +) +from dnadiffusion.hydra.execution_config import ( + ClusterMode, + ExecutionLocation, + ExecutionMode, + LocalMode, + local_cluster_dev_config, + local_cluster_prod_config, + local_shell_config, + remote_dev_config, + remote_prod_config, +) +from dnadiffusion.hydra.execution_utils import ( + EntityConfig, + generate_entity_configs, + generate_hydra_config, + git_info_to_workflow_version, + random_alphanumeric_suffix, + wait_for_workflow_completion, +) +from dnadiffusion.logging import configure_logging + +logger = configure_logging("dnadiffusion.hydra.execute") +builds = make_custom_builds_fn(populate_full_signature=True) + + +@dataclass_json +@dataclass +class ExecutionContext: + """ + Represents the execution configuration for a workflow. + + This dataclass encapsulates settings related to the execution environment, + including the mode of execution, container image details, and workflow + versioning information. + + Attributes: + name (ExecutionMode): The execution mode, which dictates how and where + the workflow is executed. + image (str): The full name of the container image to be used in the + execution, including the registry path. + tag (str): The tag appended to the container image, usually git branch + (DEV) or commit hash (PROD). + version (str): A string representing the version of the workflow, + typically including a commit hash or other identifiers. + """ + + mode: ExecutionMode = field(default_factory=ExecutionMode) + image: str = "ghcr.io/pinellolab/dnadiffusion" + tag: str = "main" + version: str = f"dnadiffusion-main-{random_alphanumeric_suffix()}" + package_path: str = "src" + import_path: str = "dnadiffusion.workflows" + project: str = "dnadiffusion" + domain: str = "development" + wait: bool = True + + +def handle_local_execution(exec_mode, execution_context, entity, entity_config): + if exec_mode.local_config.mode == LocalMode.shell: + # https://github.com/flyteorg/flytekit/blob/dc9d26bfd29d7a3482d1d56d66a806e8fbcba036/flytekit/clis/sdk_in_container/run.py#L477 + output = entity(**entity_config.inputs) + logger.info(f"Output:\n\n{output}\n") + return True + + elif exec_mode.local_config.mode == LocalMode.cluster: + config_file_path = ( + LOCAL_CLUSTER_CONFIG_FILE_PATH + if exec_mode.local_config.cluster_config.mode == ClusterMode.dev + else REMOTE_CLUSTER_CONFIG_FILE_PATH + ) + return handle_cluster_execution( + exec_mode.local_config.cluster_config.mode, + execution_context, + entity, + entity_config, + config_file_path, + ) + + return False + + +def handle_remote_execution( + exec_mode, execution_context, entity, entity_config +): + config_file_path = REMOTE_CLUSTER_CONFIG_FILE_PATH + return handle_cluster_execution( + exec_mode.remote_config.mode, + execution_context, + entity, + entity_config, + config_file_path, + ) + + +def handle_cluster_execution( + cluster_mode, execution_context, entity, entity_config, config_file_path +): + remote = FlyteRemote( + config=FlyteConfig.auto(config_file=config_file_path), + default_project=execution_context.project, + default_domain=execution_context.domain, + ) + logger.debug(f"Remote context:\n\n{remote.context}\n") + image_config = ImageConfig.from_images( + default_image=f"{execution_context.image}:{execution_context.tag}", + m={ + "gpu": f"{execution_context.image}-gpu:{execution_context.tag}", + }, + ) + + serialization_settings = get_serialization_settings( + cluster_mode, execution_context, entity_config, remote, image_config + ) + register_and_execute_workflow( + remote, entity, entity_config, execution_context, serialization_settings + ) + return True + + +def get_serialization_settings( + cluster_mode, execution_context, entity_config, remote, image_config +): + if cluster_mode == ClusterMode.dev: + logger.warning( + "Development mode. Use 'prod' mode for production or CI environments." + ) + with tempfile.TemporaryDirectory() as tmp_dir: + _, upload_url = remote.fast_package( + pathlib.Path(execution_context.package_path), output=tmp_dir + ) + logger.info(f"Workflow package uploaded to:\n\n{upload_url}\n") + return SerializationSettings( + image_config=image_config, + fast_serialization_settings=FastSerializationSettings( + enabled=True, + destination_dir="/root", + distribution_location=upload_url, + ), + ) + elif cluster_mode == ClusterMode.prod: + logger.info( + f"Registering workflow: {entity_config.module_name}.{entity_config.entity_name}" + ) + return SerializationSettings(image_config=image_config) + else: + raise_invalid_mode_error(cluster_mode, ClusterMode) + + +def register_and_execute_workflow( + remote, entity, entity_config, execution_context, serialization_settings +): + if isinstance(entity, WorkflowBase): + remote.register_workflow( + entity=entity, + serialization_settings=serialization_settings, + version=execution_context.version, + ) + elif isinstance(entity, PythonTask): + remote.register_task( + entity=entity, + serialization_settings=serialization_settings, + version=execution_context.version, + ) + execution = remote.execute( + entity=entity, + inputs=entity_config.inputs, + version=execution_context.version, + execution_name_prefix=execution_context.version, + wait=False, + ) + execution_url = remote.generate_console_url(execution) + + try: + pyperclip.copy(execution_url) + except Exception as e: + logger.warning(f"Failed to copy execution URL to clipboard: {e}") + logger.info( + f"Execution submitted: {execution}\nExecution url:\n\n{execution_url}\n" + ) + + if execution_context.wait: + wait_for_workflow_completion(execution, remote, logger) + + +def raise_invalid_mode_error(mode, valid_modes): + logger.error( + f"Invalid mode: {mode}. Please set to one of the following: {', '.join([e.value for e in valid_modes])}." + ) + sys.exit(1) + + +def execute_workflow( + zen_cfg: DictConfig, + execution_context: ExecutionContext, + entity_config: EntityConfig, +) -> None: + """ + Executes the given workflow based on the Hydra configuration. The execution + mode is controlled by the 'mode' parameter, which is an instance of the + ExecutionContext dataclass. This dataclass encapsulates execution configuration + details including the execution environment name (local, dev, prod), + container image details, and versioning information. + + The 'execution_context.mode' parameter allows for the following execution environments: + - LOCAL: Attempts to execute the workflow locally without registering it on + the remote. + - DEV: Executes a copy of the local workflow on the remote for development + purposes. This mode allows for testing changes to the workflow code + remotely without needing to rebuild and push the container image. However, + rebuilding and pushing the image may be required for significant + dependency changes. The workflow version is appended with a random + alphanumeric string. This mode is intended for development purposes only + and should not be used in production or CI environments. + - PROD: Registers the workflow on the remote and then executes it, intended + for production or CI environments. This mode executes the workflow against + a container image that has been built and pushed to the registry specified + in the ExecutionContext image. The image used is tagged with the git short + SHA. + + In all modes, the workflow is registered with Flyte and executed. The + function logs various informational messages, including the execution URL, + and optionally waits for workflow completion based on the `wait` flag in the + workflow configuration. + + Args: + zen_cfg (DictConfig): Configuration for the execution. + execution_context (ExecutionContext): An instance of ExecutionContext + specifying the execution settings. + entity_config (EntityConfig): Configuration for the workflow entity, + including the workflow function and its inputs. + + Additional dynamic inputs for the workflow are generated based on the + entity configuration. These inputs are determined by inspecting the + signature of the workflow entity and are configured to be compatible + with dataclass_json and hydra_zen, ensuring proper instantiation and + configuration of custom types. + + Raises: + Sets exit status one if an invalid execution mode is specified. + """ + config_yaml = to_yaml(zen_cfg) + tree = rich.tree.Tree("execute_workflow", style="dim", guide_style="dim") + tree.add(rich.syntax.Syntax(config_yaml, "yaml", theme="monokai")) + rich.print(tree) + + module = importlib.import_module( + f"{execution_context.import_path}.{entity_config.module_name}" + ) + entity = getattr(module, entity_config.entity_name) + + exec_mode = execution_context.mode + + if exec_mode.location == ExecutionLocation.local: + if not handle_local_execution( + exec_mode, execution_context, entity, entity_config + ): + raise_invalid_mode_error(exec_mode.local_config.mode, LocalMode) + + elif exec_mode.location == ExecutionLocation.remote: + if not handle_remote_execution( + exec_mode, execution_context, entity, entity_config + ): + raise_invalid_mode_error(exec_mode.remote_config.mode, ClusterMode) + + else: + raise_invalid_mode_error(exec_mode.location, ExecutionLocation) + + +def main() -> None: + """ + Main function that executes the workflow in one of the three modes + determined by the config group mode (local, dev, prod): + + - In 'local' mode, it executes the workflow locally without a remote + - In 'dev' mode, it uses the container execution_context.imagewith execution_context.tag current + branch tag for execution. This allows executing a copy of updated local + workflow on the remote prior to building a new image. + - In 'prod' mode, it uses the container image with the git short SHA tag + just after building an image. This is primarily for CI execution. + + See the `execute_workflow` function for more details. + + Note this logic regarding the image tag is independent of setting domain to + "development", "staging", "production", etc. + + The workflow version is also separately determined based on the current git + repo name, branch, and commit SHA. + """ + + load_dotenv() + + # equivalent to + # hydra_zen.wrapper._implementations.store + # except in name + store = ZenStore( + name="dnadiffusion", + deferred_to_config=True, + deferred_hydra_store=True, + ) + + store(generate_hydra_config()) + + repo_name, git_branch, git_short_sha = git_info_to_workflow_version(logger) + + workflow_image = os.environ.get( + "WORKFLOW_IMAGE", + f"localhost:30000/{repo_name}", + ) + + ExecutionContextConf = builds(ExecutionContext) + + # Local Shell + local_shell_execution_context = ExecutionContextConf( + mode=local_shell_config, + image="", + tag="", + version=f"{repo_name}-{git_branch}-{git_short_sha}-local-{random_alphanumeric_suffix()}", + ) + + # Local Cluster Dev + local_cluster_dev_execution_context = ExecutionContextConf( + mode=local_cluster_dev_config, + image=f"localhost:30000/{repo_name}", + tag=git_branch, + version=f"{repo_name}-{git_branch}-{git_short_sha}-local-{random_alphanumeric_suffix()}", + ) + + # Local Cluster Prod + local_cluster_prod_execution_context = ExecutionContextConf( + mode=local_cluster_prod_config, + image=f"localhost:30000/{repo_name}", + tag=git_short_sha, + version=f"{repo_name}-{git_branch}-{git_short_sha}", + ) + + # Remote Dev + remote_dev_execution_context = ExecutionContextConf( + mode=remote_dev_config, + image=workflow_image, + tag=git_branch, + version=f"{repo_name}-{git_branch}-{git_short_sha}-dev-{random_alphanumeric_suffix()}", + ) + + # Remote Prod + remote_prod_execution_context = ExecutionContextConf( + mode=remote_prod_config, + image=workflow_image, + tag=git_short_sha, + version=f"{repo_name}-{git_branch}-{git_short_sha}", + ) + + # define the execution_context store + execution_context_store = store(group="execution_context") + + execution_context_store(local_shell_execution_context, name="local_shell") + execution_context_store( + local_cluster_dev_execution_context, name="local_cluster_dev" + ) + execution_context_store( + local_cluster_prod_execution_context, name="local_cluster_prod" + ) + execution_context_store(remote_dev_execution_context, name="remote_dev") + execution_context_store(remote_prod_execution_context, name="remote_prod") + + # define the entity_config store + entity_config_store = store(group="entity_config") + + # specify the parent module whose submodules will be inspected for workflows + parent_module_path = os.environ.get( + "WORKFLOW_PARENT_MODULE_PATH", "dnadiffusion.workflows" + ) + generate_entity_configs(parent_module_path, entity_config_store, logger) + + hydra_defaults = [ + "_self_", + # test remote workflow execution + {"execution_context": "remote_dev"}, + {"entity_config": "lrwine_training_workflow"}, + # # test local cluster task execution + # # {"execution_context": "local_cluster_dev"}, + # {"execution_context": "local_shell"}, + # {"entity_config": "lrwine_process_data"}, + ] + logger.debug(f"hydra_defaults: {hydra_defaults}") + + ExecuteWorkflowConf = make_config( + hydra_defaults=hydra_defaults, + execution_context=None, + entity_config=None, + ) + + store( + ExecuteWorkflowConf, + name="execute_workflow", + ) + + store.add_to_hydra_store(overwrite_ok=True) + + zen(execute_workflow).hydra_main( + config_path=None, + config_name="execute_workflow", + version_base="1.3", + ) + + +if __name__ == "__main__": + """ + This script executes a Flyte workflow configured with hydra-zen. + > dnadiffusion --help. + + == Configuration groups == + First override default group values (group=option) + + entity_config: example_wf, lrwine_training_workflow + execution_context: local_cluster_dev, local_cluster_prod, local_shell, + remote_dev, remote_prod + + + == Config == + Then override any element in the config (foo.bar=value) + + execution_context: + _target_: dnadiffusion.hydra.execute.ExecutionContext + mode: + _target_: dnadiffusion.hydra.execution_config.ExecutionMode + location: remote + local_config: null + remote_config: + _target_: dnadiffusion.hydra.execution_config.ClusterConfig + mode: dev + image: localhost:30000/dnadiffusion + tag: main + version: dnadiffusion-main-16323b3-dev-a8x + name: training_workflow + package_path: src + import_path: dnadiffusion.workflows + project: dnadiffusion + domain: development + wait: true + entity_config: + _target_: dnadiffusion.hydra.execution_utils.EntityConfig + inputs: + _target_: builtins.dict + _convert_: all + _args_: + - logistic_regression: + _target_: dnadiffusion.workflows.lrwine.LogisticRegressionInterface + penalty: l2 + dual: false + tol: 0.0001 + C: 1.0 + fit_intercept: true + intercept_scaling: 1 + class_weight: null + random_state: null + solver: lbfgs + max_iter: 100 + multi_class: auto + verbose: 0 + warm_start: false + n_jobs: null + l1_ratio: null + module_name: lrwine + entity_name: training_workflow + entity_type: PythonFunctionWorkflow + + Example usage: + > dnadiffusion -h + > dnadiffusion -c job + > dnadiffusion + > dnadiffusion \ + execution_context=remote_dev \ + entity_config=lrwine_training_workflow + > dnadiffusion \ + entity_config.inputs._args_.0.logistic_regression.C=0.4 \ + entity_config.inputs._args_.0.logistic_regression.max_iter=1200 + # The _args_=[] only works for local_shell execution of tasks + > dnadiffusion \ + execution_context=local_shell \ + entity_config=lrwine_process_data \ + entity_config.inputs._args_=[] + # For remote execution of tasks, stub inputs must be provided. + # This is only meant for testing purposes. + > dnadiffusion execution_context=local_cluster_dev \ + entity_config=lrwine_process_data \ + entity_config.inputs._args_.0.data.data=[[12.0, 0],[13.0, 1],[9.5, 2]] \ + entity_config.inputs._args_.0.data.columns="[ash, target]" + # TODO: update to use joblib hydra execution backend + > dnadiffusion \ + --multirun entity_config.inputs._args_.0.logistic_regression.C=0.2,0.5 + + See the the hydra config output in the git-ignored `./outputs` or + `./multirun` directories. These are also stored as an artifact of + the CI actions workflow in the `Upload config artifact` step. + + Warning: + Hydra command-line overrides are only intended to be supported for + inputs. Do not override workflow-level parameters. This will lead to + unexpected behavior. You can modify workflow parameters with `.env` or + environment variables. Note `version` and `tag` are determined + automatically in python based on `mode`. The workflow execution + parameters are stored in the hydra config output for reference. + """ + main() + +# # LOCAL execution +# if exec_mode.location == ExecutionLocation.local: +# if exec_mode.local_config.mode == LocalMode.shell: +# # https://github.com/flyteorg/flytekit/blob/dc9d26bfd29d7a3482d1d56d66a806e8fbcba036/flytekit/clis/sdk_in_container/run.py#L477 +# output = entity(**entity_config.inputs) +# logger.info(f"Workflow output:\n\n{output}\n") +# return + +# elif exec_mode.local_config.mode == LocalMode.cluster: +# remote = FlyteRemote( +# config=FlyteConfig.auto( +# config_file=LOCAL_CLUSTER_CONFIG_FILE_PATH +# ), +# default_project=execution_context.project, +# default_domain=execution_context.domain, +# ) +# image_config = ImageConfig.auto( +# img_name=f"{execution_context.image}:{execution_context.tag}" +# ) + +# if exec_mode.local_config.cluster_config.mode == ClusterMode.dev: +# logger.warning( +# "This execution_context.mode is intended for development purposes only.\n\n" +# "Please use 'prod' execution_context.mode for production or CI environments.\n\n" +# ) +# with tempfile.TemporaryDirectory() as tmp_dir: +# logger.debug( +# f"Packaged tarball temporary directory:\n\n\t{tmp_dir}\n" +# ) +# _, upload_url = remote.fast_package( +# pathlib.Path(execution_context.package_path), +# output=tmp_dir, +# ) +# logger.info( +# f"Workflow package uploaded to:\n\n {upload_url}\n" +# ) + +# serialization_settings = SerializationSettings( +# image_config=image_config, +# fast_serialization_settings=FastSerializationSettings( +# enabled=True, +# destination_dir="/root", +# distribution_location=upload_url, +# ), +# ) + +# elif exec_mode.local_config.cluster_config.mode == ClusterMode.prod: +# logger.info( +# f"Registering workflow:\n\n\t{entity_config.module_name}.{entity_config.entity_name}\n" +# ) +# serialization_settings = SerializationSettings( +# image_config=image_config +# ) + +# else: +# logger.error( +# f"Invalid cluster mode: {exec_mode.local_config.cluster_config.mode}. " +# "Please set `execution_context.mode.local_config.cluster_config.mode` to one of the following: " +# f"{', '.join([e.value for e in ClusterMode])}." +# ) +# sys.exit(1) + +# # REMOTE execution +# elif exec_mode.location == ExecutionLocation.remote: +# remote = FlyteRemote( +# config=FlyteConfig.auto( +# config_file=REMOTE_CLUSTER_CONFIG_FILE_PATH +# ), +# default_project=execution_context.project, +# default_domain=execution_context.domain, +# ) +# image_config = ImageConfig.auto( +# img_name=f"{execution_context.image}:{execution_context.tag}" +# ) + +# if exec_mode.remote_config.mode == ClusterMode.dev: +# logger.warning( +# "This execution_context.mode is intended for development purposes only.\n\n" +# "Please use 'prod' execution_context.mode for production or CI environments.\n\n" +# ) +# with tempfile.TemporaryDirectory() as tmp_dir: +# logger.debug( +# f"Packaged tarball temporary directory:\n\n\t{tmp_dir}\n" +# ) +# _, upload_url = remote.fast_package( +# pathlib.Path(execution_context.package_path), +# output=tmp_dir, +# ) +# logger.info(f"Workflow package uploaded to:\n\n {upload_url}\n") + +# serialization_settings = SerializationSettings( +# image_config=image_config, +# fast_serialization_settings=FastSerializationSettings( +# enabled=True, +# destination_dir="/root", +# distribution_location=upload_url, +# ), +# ) + +# elif exec_mode.remote_config.mode == ClusterMode.prod: +# logger.info( +# f"Registering workflow:\n\n\t{entity_config.module_name}.{entity_config.entity_name}\n" +# ) +# serialization_settings = SerializationSettings( +# image_config=image_config +# ) + +# else: +# logger.error( +# f"Invalid remote cluster mode: {exec_mode.remote_config.mode}. " +# "Please set `execution_context.mode.remote_config.mode` to one of the following: " +# f"{', '.join([e.value for e in ClusterMode])}." +# ) +# sys.exit(1) + +# else: +# logger.error( +# f"Invalid execution location: {exec_mode.location}. " +# "Please set `execution_context.mode.location` to one of the following: " +# f"{', '.join([e.value for e in ExecutionLocation])}." +# ) +# sys.exit(1) + +# remote.register_workflow( +# entity=entity, +# serialization_settings=serialization_settings, +# version=execution_context.version, +# ) +# execution = remote.execute( +# entity=entity, +# inputs=entity_config.inputs, +# version=execution_context.version, +# execution_name_prefix=execution_context.version, +# wait=False, +# ) +# logger.info(f"Execution submitted:\n\n{execution}\n") +# logger.info(f"Execution url:\n\n{remote.generate_console_url(execution)}\n") + +# if execution_context.wait: +# wait_for_workflow_completion(execution, remote, logger) + + +# class ExecutionMode(str, Enum): +# """ +# Enumerates the possible execution modes for a workflow. + +# Attributes: +# LOCAL: Represents a local execution mode, where the workflow is executed +# locally without remote registration. +# DEV: Represents a development execution mode, where the workflow is +# executed remotely for development purposes. This mode is used for +# testing workflow code changes remotely without needing to rebuild and +# push the container image so long as one with the current branch tag +# already exists. +# PROD: Represents a production execution mode, where the workflow is +# registered and executed remotely, intended for production or continuous +# integration (CI) environments. The image tag is set to the git commit +# short SHA. + +# TODO: python >=3.11, use StrEnum with auto() values +# """ + +# LOCAL = "LOCAL" +# DEV = "DEV" +# PROD = "PROD" + +# def __str__(self): +# return self.value + + +# remote = FlyteRemote( +# config=FlyteConfig.auto(), +# default_project=execution_context.project, +# default_domain=execution_context.domain, +# ) +# image_config = ImageConfig.auto( +# img_name=f"{execution_context.image}:{execution_context.tag}" +# ) + +# if execution_context.mode.remote_config.mode == ClusterMode.DEV: +# # if execution_context.mode == "dev": +# logger.warning( +# "This execution_context.mode is intended for development purposes only.\n\n" +# "Please use 'prod' execution_context.mode for production or CI environments.\n\n" +# ) +# with tempfile.TemporaryDirectory() as tmp_dir: +# logger.debug( +# f"Packaged tarball temporary directory:\n\n\t{tmp_dir}\n" +# ) +# _, upload_url = remote.fast_package( +# pathlib.Path(execution_context.package_path), +# output=tmp_dir, +# ) +# logger.info(f"Workflow package uploaded to:\n\n {upload_url}\n") + +# serialization_settings = SerializationSettings( +# image_config=image_config, +# fast_serialization_settings=FastSerializationSettings( +# enabled=True, +# destination_dir="/root", +# distribution_location=upload_url, +# ), +# ) +# elif execution_context.mode.remote_config.mode == ClusterMode.PROD: +# # elif execution_context.mode == "prod": +# logger.info( +# f"Registering workflow:\n\n\t{entity_config.module_name}.{entity_config.entity_name}\n" +# ) +# serialization_settings = SerializationSettings( +# image_config=image_config +# ) +# else: +# logger.error( +# f"Invalid workflow registration mode: {execution_context.mode}. " +# "Please set `execution_context.mode.remote_config.mode` to one of the following: " +# f"{', '.join([e.value for e in ClusterMode])}." +# ) +# sys.exit(1) diff --git a/src/dnadiffusion/hydra/execution_config.py b/src/dnadiffusion/hydra/execution_config.py new file mode 100644 index 00000000..f04ccd3c --- /dev/null +++ b/src/dnadiffusion/hydra/execution_config.py @@ -0,0 +1,148 @@ +from dataclasses import dataclass, field +from enum import Enum + +from hydra_zen import make_custom_builds_fn + + +class ClusterMode(Enum): + dev = "DEV" + prod = "PROD" + + +class LocalMode(Enum): + shell = "SHELL" + cluster = "CLUSTER" + + +class ExecutionLocation(Enum): + local = "LOCAL" + remote = "REMOTE" + + +@dataclass +class ClusterConfig: + mode: ClusterMode = field(default_factory=lambda: ClusterMode.dev) + + +@dataclass +class LocalConfig: + mode: LocalMode = field(default_factory=lambda: LocalMode.shell) + cluster_config: ClusterConfig = field(default_factory=ClusterConfig) + + +@dataclass +class ExecutionMode: + """ + Constructs configurations for each leaf node marked with a `#` in the supported + execution config tree: + + execution_config = { + "LOCAL": { + "SHELL": "LOCAL_SHELL", # + "CLUSTER": { + "DEV": "LOCAL_CLUSTER_DEV", # + "PROD": "LOCAL_CLUSTER_PROD" # + } + }, + "REMOTE": { + "DEV": "REMOTE_DEV", # + "PROD": "REMOTE_PROD" # + } + } + """ + + location: ExecutionLocation = field( + default_factory=lambda: ExecutionLocation.remote + ) + local_config: LocalConfig = field(default_factory=LocalConfig) + remote_config: ClusterConfig = field(default_factory=ClusterConfig) + + +fbuilds = make_custom_builds_fn(populate_full_signature=True) +ClusterConfigConf = fbuilds(ClusterConfig) +LocalConfigConf = fbuilds(LocalConfig) +ExecutionModeConf = fbuilds(ExecutionMode) + + +# Default Execution Configuration +default_execution_config = ExecutionModeConf() + +# Local Shell Configuration +local_shell_config = ExecutionModeConf( + location=ExecutionLocation.local, + local_config=LocalConfigConf( + mode=LocalMode.shell, + cluster_config=None, + ), + remote_config=None, +) + +# Local Cluster Dev Configuration +local_cluster_dev_config = ExecutionModeConf( + location=ExecutionLocation.local, + local_config=LocalConfigConf( + mode=LocalMode.cluster, + cluster_config=ClusterConfigConf(mode=ClusterMode.dev), + ), + remote_config=None, +) + +# Local Cluster Prod Configuration +local_cluster_prod_config = ExecutionModeConf( + location=ExecutionLocation.local, + local_config=LocalConfigConf( + mode=LocalMode.cluster, + cluster_config=ClusterConfigConf(mode=ClusterMode.prod), + ), + remote_config=None, +) + +# Remote Dev Configuration +remote_dev_config = ExecutionModeConf( + location=ExecutionLocation.remote, + local_config=None, + remote_config=ClusterConfigConf(mode=ClusterMode.dev), +) + +# Remote Prod Configuration +remote_prod_config = ExecutionModeConf( + location=ExecutionLocation.remote, + local_config=None, + remote_config=ClusterConfigConf(mode=ClusterMode.prod), +) + +if __name__ == "__main__": + from pprint import pprint + + from hydra_zen import instantiate + + def ipprint(x): + pprint(instantiate(x)) + + ipprint(default_execution_config) + ipprint(local_shell_config) + ipprint(local_cluster_dev_config) + ipprint(local_cluster_prod_config) + ipprint(remote_dev_config) + ipprint(remote_prod_config) + +""" +Permissively typed version of the dataclasses above for debugging purposes + +@dataclass +class ClusterConfig: + mode: Any + + +@dataclass +class LocalConfig: + mode: Any + cluster_config: Any = MISSING + + +@dataclass +class ExecutionMode: + location: Any + local_config: Any = MISSING + remote_config: Any = MISSING +""" diff --git a/src/dnadiffusion/hydra/execution_utils.py b/src/dnadiffusion/hydra/execution_utils.py new file mode 100644 index 00000000..367c9edc --- /dev/null +++ b/src/dnadiffusion/hydra/execution_utils.py @@ -0,0 +1,485 @@ +import importlib +import inspect +import logging +import os +import pkgutil +import queue +import re +import secrets +import sys +import threading +import time +from dataclasses import dataclass +from datetime import timedelta +from textwrap import dedent +from typing import Any, Dict, List, Tuple, Union + +import plumbum +from dataclasses_json import dataclass_json +from flytekit import WorkflowExecutionPhase +from flytekit.core.base_task import PythonTask +from flytekit.core.workflow import WorkflowBase +from flytekit.exceptions.system import FlyteSystemException +from flytekit.exceptions.user import FlyteTimeout +from flytekit.remote import FlyteRemote +from flytekit.remote.executions import FlyteWorkflowExecution +from hydra.conf import HelpConf, HydraConf, JobConf +from hydra_zen import ZenStore, builds, make_custom_builds_fn + + +@dataclass_json +@dataclass +class EntityConfig: + inputs: Dict[str, Any] + module_name: str = "lrwine" + entity_name: str = "training_workflow" + entity_type: str = "WorkflowBase" + + +# @dataclass_json +# @dataclass +# class EntityConfigs: +# entities: Dict[str, Any] = MISSING + + +fbuilds = make_custom_builds_fn(populate_full_signature=True) + + +def generate_entity_configs( + parent_module_path: str, entity_store: ZenStore, logger: logging.Logger +) -> None: + """ + Generates and stores configurations for entities found in a specified + module. + + This function iterates over all submodules in the parent module defined by + `parent_module_path`. For each submodule, it looks for entities that are + instances of either `WorkflowBase` or `PythonTask` and generates + configurations for them using `EntityConfig`. These configurations are then + stored in `entity_store`. + + Args: + parent_module_path (str): The import path of the parent module to search + for entities. + entity_store (ZenStore): The store where generated entity configurations + will be kept. + logger (logging.Logger): Logger for debugging and logging information. + + Returns: + None: The function does not return anything. It populates the + `entity_store` with configurations for each entity of type EntityTypes. + """ + parent_module = importlib.import_module(parent_module_path) + EntityTypes = (WorkflowBase, PythonTask) + + # iterate over submodules in the parent module + for submodule_info in pkgutil.iter_modules( + parent_module.__path__, parent_module.__name__ + "." + ): + # import the submodule + submodule = importlib.import_module(submodule_info.name) + logger.debug(f"Checking submodule: {submodule_info.name}") + + # import entities that are instances of EntityTypes + entities = inspect.getmembers( + submodule, + # TODO: validate that PythonTasks function as expected + lambda member: isinstance(member, EntityTypes), + # lambda member: isinstance(member, WorkflowBase), + ) + + for entity_name, entity in entities: + logger.debug(f"Found entity: {entity_name}") + + # construct an instance (or a configuration) of the entity + module_name = submodule_info.name.split(".")[-1] + entity_inputs = generate_entity_inputs(entity) + entity_instance = fbuilds( + EntityConfig, + inputs=builds(dict, entity_inputs, hydra_convert="all"), + module_name=module_name, + entity_name=entity_name, + entity_type=type(entity).__name__, + ) + + # store the entity instance in the entity_store + composed_name = module_name + "_" + entity_name + entity_store(entity_instance, name=composed_name) + logger.debug(f"Stored entity: {composed_name} in entity_store") + + +def generate_entity_inputs( + entity: Union[WorkflowBase, PythonTask], +) -> Dict[str, Any]: + """ + Generates a dictionary of inputs for a given entity. + + This function inspects the signature of the provided `entity`, which can be + either a `WorkflowBase` or a `PythonTask`. For each parameter in the + signature, it determines the type and default value (if any). If the type is + a built-in type, it directly uses the default value. For custom types, it + dynamically imports and constructs a configuration object using `fbuilds`. + + Args: + entity (Union[WorkflowBase, PythonTask]): The entity for which to + generate input configurations. + + Returns: + Dict[str, Any]: A dictionary with keys that are the names of the entity + inputs and values hydra-zen configurations that will build their + respective default values. + """ + inputs = {} + + for name, param in inspect.signature(entity).parameters.items(): + param_type = param.annotation + default = param.default + + # check if the type is a built-in type + if isinstance(param_type, type) and param_type.__module__ == "builtins": + inputs[name] = default + else: + # dynamically import the type if it's not a built-in type + type_module = importlib.import_module(param_type.__module__) + custom_type = getattr(type_module, param_type.__name__) + + inputs[name] = fbuilds(custom_type) + + return inputs + + +def random_alphanumeric_suffix(input_string: str = "", length: int = 3) -> str: + return input_string.join( + secrets.choice("abcdefghijklmnopqrstuvwxyz0123456789") + for _ in range(length) + ) + + +def check_required_env_vars( + required_vars: List[str], logger: logging.Logger +) -> bool: + """ + Checks required environment variables for workflow configuration. + """ + + missing_vars = [var for var in required_vars if os.environ.get(var) is None] + if missing_vars: + missing_vars_str = ", ".join(missing_vars) + logger.error( + f"Missing required environment variables: {missing_vars_str}" + ) + return False + return True + + +def git_info_to_workflow_version( + logger: logging.Logger, +) -> Tuple[str, str, str]: + """ + Retrieves git information for workflow versioning using plumbum. + + This function extracts repository name, current branch name, and short SHA. + It handles the case where the Git repository is in a detached HEAD state, + common in CI environments like GitHub Actions for pull requests. + + Args: + logger (logging.Logger): Logger object for logging messages. + + Returns: + Tuple[str, str, str]: A tuple containing the repository name, + branch name, and short SHA commit. + + Raises: + ValueError: If unable to extract source commit SHA from commit message. + ProcessExecutionError: If a git command fails. + + Example: + >>> import logging + >>> logger = logging.getLogger() + >>> # Assuming this test is run in a Git repository + >>> repo_name, branch, short_sha = git_info_to_workflow_version(logger) + >>> print(isinstance(repo_name, str), isinstance(branch, str), isinstance(short_sha, str)) + True True True + """ + try: + git = plumbum.local["git"] + git_branch = git("rev-parse", "--abbrev-ref", "HEAD").strip() + + if git_branch.lower() == "head": + git("fetch", "origin", "+refs/heads/*:refs/remotes/origin/*") + + commit_message = git("log", "-1", "--pretty=%B") + match = re.search(r"Merge ([0-9a-f]{40}) into", commit_message) + if match: + source_commit_sha = match.group(1) + + git_branch_list = ( + git("branch", "-r", "--contains", source_commit_sha) + .strip() + .split("\n") + ) + git_branch = ( + next( + ( + branch + for branch in git_branch_list + if "HEAD" not in branch + ), + "", + ) + .replace("origin/", "") + .strip() + ) + else: + git_branch_from_detached_head_failure = ( + "Unable to extract source commit SHA from commit message." + ) + raise ValueError(git_branch_from_detached_head_failure) + + git_short_sha = git("rev-parse", "--short", "HEAD").strip() + remote_url = git("config", "--get", "remote.origin.url").strip() + repo_name = remote_url.split("/")[-1].rstrip(".git") + + for string in [repo_name, git_branch, git_short_sha]: + if any(char.isupper() for char in string): + logger.warning( + f"String '{string}' contains capitalized characters. Converting to lowercase." + ) + + return repo_name.lower(), git_branch.lower(), git_short_sha.lower() + + except plumbum.commands.processes.ProcessExecutionError as e: + logger.error(f"Error obtaining git information: {e}") + raise + + +def generate_hydra_config() -> HydraConf: + return HydraConf( + defaults=[ + {"output": "default"}, + {"launcher": "basic"}, # joblib + {"sweeper": "basic"}, + {"help": "default"}, + {"hydra_help": "default"}, + {"hydra_logging": "none"}, # default + {"job_logging": "none"}, # default + {"callbacks": None}, + {"env": "default"}, + ], + help=HelpConf( + header=dedent( + """ + This is the ${hydra.help.app_name} help accessible via `${hydra.help.app_name} -h`. + + Use `${hydra.help.app_name} -c job` to view the ${hydra.help.app_name} configuration alone. + See the end of this help page for instructions on how to install shell tab completion for + configuration overrides. + + ${hydra.help.app_name} is the CLI of a template designed to illustrate the integration of: + + * hydra-zen (https://mit-ll-responsible-ai.github.io/hydra-zen/), + * hydra (https://hydra.cc/), and + * omegaconf (https://omegaconf.readthedocs.io/), + + which provide configuration management, with + + * flyte(kit) (https://flyte.org/), + + which manages the registration and execution of Flyte workflows. + ${hydra.help.app_name} can be adapted as an auxiliary component of any python package, + enhancing its capabilities in managing complex workflow configuration + and execution. + + Running `${hydra.help.app_name} -c job` displays the current configuration of ${hydra.help.app_name}. + This reflects what will be executed if `dnadiffusion` is run without arguments. + + Use `${hydra.help.app_name} -c hydra` to view the associated hydra configuration. + + """ + ), + footer=dedent( + """ + You can test CLI configuration overrides after `-c job`, e.g.: + + * `${hydra.help.app_name} -c job execution_context=prod` + * `${hydra.help.app_name} -c job entity_config==example_wf` + # The inputs arguments in the following example must correspond + # to those supported by the specified entity_config. + * `${hydra.help.app_name} -c job \\ + entity_config=lrwine_training_workflow \\ + entity_config.inputs._args_.0.logistic_regression.max_iter=1200` + # The following two examples are only meant for task testing. + # This example only works in the local_shell execution context. + # See: https://github.com/flyteorg/flyte/issues/4275 + # https://github.com/flyteorg/flyte/issues/1312 + * `${hydra.help.app_name} -c job \\ + execution_context=local_shell \\ + entity_config=lrwine_process_data \\ + entity_config.inputs._args_=[]"` + # This will fail without specifying the inputs as it does not + # automatically instantiate the default arguments of the task. + * `${hydra.help.app_name} -c job \\ + execution_context=local_cluster_dev \\ + entity_config=lrwine_process_data \\ + entity_config.inputs._args_.0.data.data="[[12.0, 0],[13.0, 1],[9.5, 2]]" \\ + entity_config.inputs._args_.0.data.columns="[ash, target]"` + + + This will generate `== Config ==` above resolved in context of the command line overrides. + Removing the `-c job` flag will execute the workflow with the specified configuration. + The resolved configuration will be stored in the `outputs` or `multirun` directories. + + Use `${hydra.help.app_name} --hydra-help` to view the hydra + help. This contains, for example, the commands to install shell + tab completion. For example in bash or zsh, if the active + configuration has path + `entity_config.inputs._args_.0.logistic_regression` + representing the parameters of a + sklearn.linear_model.LogisticRegression instance: + + > eval "$$(dnadiffusion -sc install=bash)" + > dnadiffusion entity_config.inputs._args_.0.logistic_regression.[TAB] + entity_config.inputs._args_.0.logistic_regression.C= + entity_config.inputs._args_.0.logistic_regression._target_= + entity_config.inputs._args_.0.logistic_regression.class_weight= + entity_config.inputs._args_.0.logistic_regression.dual= + ...""" + ), + template=dedent( + """ + ${hydra.help.header} + == Configuration groups == + First override default group values (group=option) + + $APP_CONFIG_GROUPS + + == Config == + Then override any element in the config (foo.bar=value) + that is not set exclusively by an environment variable [see doc(strings)] + + $CONFIG + ${hydra.help.footer} + """ + ), + ), + job=JobConf(name="dnadiffusion"), + ) + + +def get_user_input(input_queue): + """ + Gets user input and puts it in the queue. + """ + user_input = input("Terminate workflow execution? (y/N after 1 min.): ") + input_queue.put(user_input) + + +def wait_for_workflow_completion( + execution: FlyteWorkflowExecution, + remote: FlyteRemote, + logger: logging.Logger, +) -> None: + """ + Waits for the execution to complete, checking status at regular intervals. + """ + timeout_duration = timedelta(seconds=3.0) + synced_execution = None + try: + while True: + try: + completed_execution = remote.wait( + execution, timeout=timeout_duration + ) + logger.info(f"Execution completed:\n\n{completed_execution}\n") + if completed_execution.error is None: + break + else: + logger.error( + f"Execution failed with error:\n\n{completed_execution.error}\n" + ) + sys.exit(1) + except FlyteTimeout: + synced_execution = remote.sync(execution) + logger.info(f"Current status:\n\n{synced_execution}\n") + time.sleep(timeout_duration.total_seconds()) + except KeyboardInterrupt: + if synced_execution is not None: + logger.info(f"Status at KeyboardInterrupt:\n\n{synced_execution}\n") + else: + logger.info( + "KeyboardInterrupt caught before execution status sync." + ) + + input_queue = queue.Queue() + input_thread = threading.Thread( + target=get_user_input, args=(input_queue,) + ) + input_thread.daemon = True + input_thread.start() + + try: + response = input_queue.get(timeout=60) + response = response.strip().lower() + except queue.Empty: + response = "n" + + synced_execution = remote.sync(execution) + if synced_execution.closure.phase in [WorkflowExecutionPhase.RUNNING]: + try: + if response in ["y", "yes"]: + remote.terminate( + execution, "KeyboardInterrupt confirmed termination" + ) + logger.info("Workflow execution terminated.") + else: + logger.warning( + f"\nExiting script without terminating workflow execution:\n\n{execution}\n" + ) + except FlyteSystemException as e: + logger.error( + f"Error while trying to terminate the execution: {e}" + ) + else: + logger.info( + f"Workflow execution already in terminal state: {synced_execution.closure.phase}" + ) + + sys.exit() + + +# ----------- +# DEPRECATED +# ----------- + + +def generate_workflow_inputs( + workflow_import_path: str = "dnadiffusion.workflows.lrwine", + workflow_name: str = "training_workflow", +) -> Dict[str, Any]: + """ + Deprecated in favor of `generate_entity_inputs`. + """ + module = importlib.import_module(workflow_import_path) + workflow = getattr(module, workflow_name) + + if not callable(workflow): + value_error_message = f"Workflow '{workflow_name}' is not callable" + raise ValueError(value_error_message) + + inputs = {} + + for name, param in inspect.signature(workflow).parameters.items(): + param_type = param.annotation + default = param.default + + # check if the type is a built-in type (like int, str, etc.) + if isinstance(param_type, type) and param_type.__module__ == "builtins": + inputs[name] = default + else: + # dynamically import the type if it's not a built-in type + type_module = importlib.import_module(param_type.__module__) + custom_type = getattr(type_module, param_type.__name__) + + inputs[name] = builds(custom_type) + + return inputs diff --git a/src/dnadiffusion/logging.py b/src/dnadiffusion/logging.py new file mode 100644 index 00000000..a70bc9f8 --- /dev/null +++ b/src/dnadiffusion/logging.py @@ -0,0 +1,46 @@ +import logging +import os + +from rich.console import Console +from rich.logging import RichHandler +from rich.theme import Theme + + +def configure_logging(logger_name: str = "dnadiffusion") -> logging.Logger: + """ + Configures logging with rich handler and checks for valid log level from + environment. + + Defaults to `INFO` if no valid log level is found. + """ + console_theme = Theme( + { + "logging.level.info": "dim cyan", + "logging.level.warning": "magenta", + "logging.level.error": "bold red", + "logging.level.debug": "green", + } + ) + console = Console(theme=console_theme) + rich_handler = RichHandler( + console=console, + rich_tracebacks=True, + show_time=True, + show_level=True, + show_path=False, + markup=True, + log_time_format="[%X]", + ) + valid_log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] + log_level = os.getenv("LOG_LEVEL", "INFO").upper() + + if log_level not in valid_log_levels: + log_level = "INFO" + + logging.basicConfig( + level=log_level, + format="%(name)s %(message)s", + datefmt="[%X]", + handlers=[rich_handler], + ) + return logging.getLogger(logger_name) diff --git a/src/dnadiffusion/metrics/metrics.py b/src/dnadiffusion/metrics/metrics.py index 2c4b527c..4180e0a5 100644 --- a/src/dnadiffusion/metrics/metrics.py +++ b/src/dnadiffusion/metrics/metrics.py @@ -7,13 +7,18 @@ from scipy.special import rel_entr from tqdm import tqdm -from dnadiffusion.utils.sample_util import convert_sample_to_fasta, create_sample, extract_motifs +from dnadiffusion.utils.sample_util import ( + convert_sample_to_fasta, + extract_motifs, +) from dnadiffusion.utils.utils import one_hot_encode def compare_motif_list(df_motifs_a: pd.DataFrame, df_motifs_b: pd.DataFrame): # Using KL divergence to compare motifs lists distribution - set_all_mot = set(df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist()) + set_all_mot = set( + df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist() + ) create_new_matrix = [] for x in set_all_mot: list_in = [] @@ -30,11 +35,19 @@ def compare_motif_list(df_motifs_a: pd.DataFrame, df_motifs_b: pd.DataFrame): create_new_matrix.append(list_in) - df_motifs = pd.DataFrame(create_new_matrix, columns=["motif", "motif_a", "motif_b"]) - - df_motifs["Diffusion_seqs"] = df_motifs["motif_a"] / df_motifs["motif_a"].sum() - df_motifs["Training_seqs"] = df_motifs["motif_b"] / df_motifs["motif_b"].sum() - kl_pq = rel_entr(df_motifs["Diffusion_seqs"].values, df_motifs["Training_seqs"].values) + df_motifs = pd.DataFrame( + create_new_matrix, columns=["motif", "motif_a", "motif_b"] + ) + + df_motifs["Diffusion_seqs"] = ( + df_motifs["motif_a"] / df_motifs["motif_a"].sum() + ) + df_motifs["Training_seqs"] = ( + df_motifs["motif_b"] / df_motifs["motif_b"].sum() + ) + kl_pq = rel_entr( + df_motifs["Diffusion_seqs"].values, df_motifs["Training_seqs"].values + ) return np.sum(kl_pq) @@ -66,7 +79,9 @@ def kl_heatmap( return final_comp_kl -def generate_heatmap(df_heat: pd.DataFrame, x_label: str, y_label: str, cell_list: list): +def generate_heatmap( + df_heat: pd.DataFrame, x_label: str, y_label: str, cell_list: list +): plt.clf() plt.rcdefaults() plt.rcParams["figure.figsize"] = (10, 10) @@ -74,7 +89,9 @@ def generate_heatmap(df_heat: pd.DataFrame, x_label: str, y_label: str, cell_lis df_plot.columns = [x.split("_")[0] for x in cell_list] df_plot.index = df_plot.columns sns.heatmap(df_plot, cmap="Blues_r", annot=True, lw=0.1, vmax=1, vmin=0) - plt.title(f"Kl divergence \n {x_label} sequences x {y_label} sequences \n MOTIFS probabilities") + plt.title( + f"Kl divergence \n {x_label} sequences x {y_label} sequences \n MOTIFS probabilities" + ) plt.xlabel(f"{x_label} Sequences \n(motifs dist)") plt.ylabel(f"{y_label} \n (motifs dist)") plt.grid(False) @@ -85,7 +102,11 @@ def generate_similarity_metric(): """Capture the syn_motifs.fasta and compare with the dataset motifs""" nucleotides = ["A", "C", "G", "T"] seqs_file = open("synthetic_motifs.fasta").readlines() - seqs_to_hotencoder = [one_hot_encode(s.replace("\n", ""), nucleotides, 200).T for s in seqs_file if ">" not in s] + seqs_to_hotencoder = [ + one_hot_encode(s.replace("\n", ""), nucleotides, 200).T + for s in seqs_file + if ">" not in s + ] return seqs_to_hotencoder @@ -95,7 +116,9 @@ def get_best_match(db, x_seq): # transforming in a function def calculate_mean_similarity(database, input_query_seqs, seq_len=200): - final_base_max_match = np.mean([get_best_match(database, x) for x in tqdm(input_query_seqs)]) + final_base_max_match = np.mean( + [get_best_match(database, x) for x in tqdm(input_query_seqs)] + ) return final_base_max_match / seq_len @@ -103,4 +126,6 @@ def generate_similarity_using_train(X_train_in): convert_X_train = X_train_in.copy() convert_X_train[convert_X_train == -1] = 0 generated_seqs_to_similarity = generate_similarity_metric() - return calculate_mean_similarity(convert_X_train, generated_seqs_to_similarity) + return calculate_mean_similarity( + convert_X_train, generated_seqs_to_similarity + ) diff --git a/src/dnadiffusion/metrics/motif_composition.py b/src/dnadiffusion/metrics/motif_composition.py index 4b08e4d8..0b49985e 100644 --- a/src/dnadiffusion/metrics/motif_composition.py +++ b/src/dnadiffusion/metrics/motif_composition.py @@ -32,9 +32,13 @@ def motif_composition_matrix( # Extract motifs from sequence file df_motifs = motif_composition_helper(main_df) # Parsing motifs from JASPAR2020_vertebrates.pfm - motifs_dict = parse_motif_file(file_path=motif_pfm_path, download_data=download_data) + motifs_dict = parse_motif_file( + file_path=motif_pfm_path, download_data=download_data + ) - df_motifs["motifs_id_number"] = df_motifs["motifs"].apply(lambda x: motifs_dict[x]) + df_motifs["motifs_id_number"] = df_motifs["motifs"].apply( + lambda x: motifs_dict[x] + ) motif_count = [] full_motif_list = df_motifs[0].unique().tolist() for k, v_df in df_motifs.groupby([0]): @@ -55,12 +59,19 @@ def motif_composition_matrix( main_df.index = main_df["ID"].values df_captured_motifs.index = df_captured_motifs["ID"].values output_df = pd.concat( - [main_df[[x for x in main_df.columns if x != "ID"]], df_captured_motifs.loc[main_df["ID"].values]], axis=1 + [ + main_df[[x for x in main_df.columns if x != "ID"]], + df_captured_motifs.loc[main_df["ID"].values], + ], + axis=1, ) return output_df -def parse_motif_file(file_path: str = f"{DATA_DIR}/JASPAR2020_vertebrates.pfm", download_data: bool = False) -> dict: +def parse_motif_file( + file_path: str = f"{DATA_DIR}/JASPAR2020_vertebrates.pfm", + download_data: bool = False, +) -> dict: """Given a file path to the motif pfm file, return a sorted dictionary of motifs.""" if download_data: # Download JASPAR2020_vertebrates.pfm diff --git a/src/dnadiffusion/models/diffusion.py b/src/dnadiffusion/models/diffusion.py index a4d0f4ae..6b4fa032 100644 --- a/src/dnadiffusion/models/diffusion.py +++ b/src/dnadiffusion/models/diffusion.py @@ -29,7 +29,9 @@ def __init__( self.register_buffer("alphas_cumprod_prev", alphas_cumprod_prev) self.register_buffer("sqrt_recip_alphas", torch.sqrt(1.0 / alphas)) self.register_buffer("sqrt_alphas_cumprod", torch.sqrt(alphas_cumprod)) - self.register_buffer("sqrt_one_minus_alphas_cumprod", torch.sqrt(1.0 - alphas_cumprod)) + self.register_buffer( + "sqrt_one_minus_alphas_cumprod", torch.sqrt(1.0 - alphas_cumprod) + ) self.register_buffer( "posterior_variance", betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod), @@ -57,7 +59,9 @@ def sample_cross(self, classes, shape, cond_weight): ) @torch.no_grad() - def p_sample_loop(self, classes, image_size, cond_weight, get_cross_map=False): + def p_sample_loop( + self, classes, image_size, cond_weight, get_cross_map=False + ): b = image_size[0] device = self.device @@ -84,7 +88,11 @@ def p_sample_loop(self, classes, image_size, cond_weight, get_cross_map=False): sampling_fn = partial(self.p_sample) for i in reversed(range(0, self.timesteps)): - img, cross_matrix = sampling_fn(x=img, t=torch.full((b,), i, device=device, dtype=torch.long), t_index=i) + img, cross_matrix = sampling_fn( + x=img, + t=torch.full((b,), i, device=device, dtype=torch.long), + t_index=i, + ) imgs.append(img.cpu().numpy()) cross_images_final.append(cross_matrix.cpu().numpy()) @@ -96,12 +104,17 @@ def p_sample_loop(self, classes, image_size, cond_weight, get_cross_map=False): @torch.no_grad() def p_sample(self, x, t, t_index): betas_t = extract(self.betas, t, x.shape) - sqrt_one_minus_alphas_cumprod_t = extract(self.sqrt_one_minus_alphas_cumprod, t, x.shape) + sqrt_one_minus_alphas_cumprod_t = extract( + self.sqrt_one_minus_alphas_cumprod, t, x.shape + ) sqrt_recip_alphas_t = extract(self.sqrt_recip_alphas, t, x.shape) # Equation 11 in the paper # Use our model (noise predictor) to predict the mean - model_mean = sqrt_recip_alphas_t * (x - betas_t * self.model(x, time=t) / sqrt_one_minus_alphas_cumprod_t) + model_mean = sqrt_recip_alphas_t * ( + x + - betas_t * self.model(x, time=t) / sqrt_one_minus_alphas_cumprod_t + ) if t_index == 0: return model_mean @@ -112,7 +125,9 @@ def p_sample(self, x, t, t_index): return model_mean + torch.sqrt(posterior_variance_t) * noise @torch.no_grad() - def p_sample_guided(self, x, classes, t, t_index, context_mask, cond_weight): + def p_sample_guided( + self, x, classes, t, t_index, context_mask, cond_weight + ): # adapted from: https://openreview.net/pdf?id=qw8AKxfYbI batch_size = x.shape[0] device = self.device @@ -120,15 +135,21 @@ def p_sample_guided(self, x, classes, t, t_index, context_mask, cond_weight): t_double = t.repeat(2).to(device) x_double = x.repeat(2, 1, 1, 1).to(device) betas_t = extract(self.betas, t_double, x_double.shape, device) - sqrt_one_minus_alphas_cumprod_t = extract(self.sqrt_one_minus_alphas_cumprod, t_double, x_double.shape, device) - sqrt_recip_alphas_t = extract(self.sqrt_recip_alphas, t_double, x_double.shape, device) + sqrt_one_minus_alphas_cumprod_t = extract( + self.sqrt_one_minus_alphas_cumprod, t_double, x_double.shape, device + ) + sqrt_recip_alphas_t = extract( + self.sqrt_recip_alphas, t_double, x_double.shape, device + ) # classifier free sampling interpolates between guided and non guided using `cond_weight` classes_masked = classes * context_mask classes_masked = classes_masked.type(torch.long) # model = self.accelerator.unwrap_model(self.model) self.model.output_attention = True - preds, cross_map_full = self.model(x_double, time=t_double, classes=classes_masked) + preds, cross_map_full = self.model( + x_double, time=t_double, classes=classes_masked + ) self.model.output_attention = False cross_map = cross_map_full[:batch_size] eps1 = (1 + cond_weight) * preds[:batch_size] @@ -138,33 +159,52 @@ def p_sample_guided(self, x, classes, t, t_index, context_mask, cond_weight): # Equation 11 in the paper # Use our model (noise predictor) to predict the mean model_mean = sqrt_recip_alphas_t[:batch_size] * ( - x - betas_t[:batch_size] * x_t / sqrt_one_minus_alphas_cumprod_t[:batch_size] + x + - betas_t[:batch_size] + * x_t + / sqrt_one_minus_alphas_cumprod_t[:batch_size] ) if t_index == 0: return model_mean, cross_map else: - posterior_variance_t = extract(self.posterior_variance, t, x.shape, device) + posterior_variance_t = extract( + self.posterior_variance, t, x.shape, device + ) noise = torch.randn_like(x) # Algorithm 2 line 4: - return model_mean + torch.sqrt(posterior_variance_t) * noise, cross_map + return ( + model_mean + torch.sqrt(posterior_variance_t) * noise, + cross_map, + ) def q_sample(self, x_start, t, noise=None): noise = default(noise, torch.randn_like(x_start)) device = self.device - sqrt_alphas_cumprod_t = extract(self.sqrt_alphas_cumprod, t, x_start.shape, device) - sqrt_one_minus_alphas_cumprod_t = extract(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape, device) + sqrt_alphas_cumprod_t = extract( + self.sqrt_alphas_cumprod, t, x_start.shape, device + ) + sqrt_one_minus_alphas_cumprod_t = extract( + self.sqrt_one_minus_alphas_cumprod, t, x_start.shape, device + ) - return sqrt_alphas_cumprod_t * x_start + sqrt_one_minus_alphas_cumprod_t * noise + return ( + sqrt_alphas_cumprod_t * x_start + + sqrt_one_minus_alphas_cumprod_t * noise + ) - def p_losses(self, x_start, t, classes, noise=None, loss_type="huber", p_uncond=0.1): + def p_losses( + self, x_start, t, classes, noise=None, loss_type="huber", p_uncond=0.1 + ): device = self.device noise = default(noise, torch.randn_like(x_start)) x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) - context_mask = torch.bernoulli(torch.zeros(classes.shape[0]) + (1 - p_uncond)).to(device) + context_mask = torch.bernoulli( + torch.zeros(classes.shape[0]) + (1 - p_uncond) + ).to(device) # Mask for unconditional guidance classes = classes * context_mask diff --git a/src/dnadiffusion/models/layers.py b/src/dnadiffusion/models/layers.py index 249453e6..8fa75304 100644 --- a/src/dnadiffusion/models/layers.py +++ b/src/dnadiffusion/models/layers.py @@ -1,5 +1,4 @@ import math -from typing import Optional import torch import torch.nn.functional as F @@ -91,7 +90,11 @@ def __init__(self, input_dim: int, emb_dim: int) -> None: generic one layer FC NN for embedding things """ self.input_dim = input_dim - layers = [nn.Linear(input_dim, emb_dim), nn.GELU(), nn.Linear(emb_dim, emb_dim)] + layers = [ + nn.Linear(input_dim, emb_dim), + nn.GELU(), + nn.Linear(emb_dim, emb_dim), + ] self.model = nn.Sequential(*layers) def forward(self, x: torch.Tensor): @@ -186,13 +189,26 @@ def forward(self, x: torch.Tensor, scale_shift: torch.Tensor | None = None): class ResnetBlock(nn.Module): - def __init__(self, dim: int, dim_out: int, *, time_emb_dim: int | None, groups: int = 8) -> None: + def __init__( + self, + dim: int, + dim_out: int, + *, + time_emb_dim: int | None, + groups: int = 8, + ) -> None: super().__init__() - self.mlp = (nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2))) if exists(time_emb_dim) else None + self.mlp = ( + (nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2))) + if exists(time_emb_dim) + else None + ) self.block1 = Block(dim, dim_out, groups=groups) self.block2 = Block(dim_out, dim_out, groups=groups) - self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity() + self.res_conv = ( + nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity() + ) def forward(self, x: torch.Tensor, time_emb: torch.Tensor | None = None): scale_shift = None @@ -215,12 +231,17 @@ def __init__(self, dim: int, heads: int = 4, dim_head: int = 32) -> None: self.heads = heads hidden_dim = dim_head * heads self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) - self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)) + self.to_out = nn.Sequential( + nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim) + ) def forward(self, x: torch.Tensor): b, c, h, w = x.shape qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) + q, k, v = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv + ) q = q.softmax(dim=-2) k = k.softmax(dim=-1) @@ -231,12 +252,16 @@ def forward(self, x: torch.Tensor): context = torch.einsum("b h d n, b h e n -> b h d e", k, v) out = torch.einsum("b h d e, b h d n -> b h e n", context, q) - out = rearrange(out, "b h c (x y) -> b (h c) x y", h=self.heads, x=h, y=w) + out = rearrange( + out, "b h c (x y) -> b (h c) x y", h=self.heads, x=h, y=w + ) return self.to_out(out) class Attention(nn.Module): - def __init__(self, dim: int, heads: int = 4, dim_head: int = 32, scale: int = 10) -> None: + def __init__( + self, dim: int, heads: int = 4, dim_head: int = 32, scale: int = 10 + ) -> None: super().__init__() self.scale = scale self.heads = heads @@ -247,7 +272,10 @@ def __init__(self, dim: int, heads: int = 4, dim_head: int = 32, scale: int = 10 def forward(self, x: torch.Tensor): b, c, h, w = x.shape qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) + q, k, v = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv + ) q, k = map(l2norm, (q, k)) @@ -259,7 +287,9 @@ def forward(self, x: torch.Tensor): class CrossAttention_lucas(nn.Module): - def __init__(self, dim: int, heads: int = 1, dim_head: int = 32, scale: int = 10) -> None: + def __init__( + self, dim: int, heads: int = 1, dim_head: int = 32, scale: int = 10 + ) -> None: super().__init__() self.scale = scale self.heads = heads @@ -274,9 +304,15 @@ def forward(self, x: torch.Tensor, y: torch.Tensor): qkv_x = self.to_qkv(x).chunk(3, dim=1) qkv_y = self.to_qkv(y).chunk(3, dim=1) - q_x, k_x, v_x = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv_x) + q_x, k_x, v_x = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv_x + ) - q_y, k_y, v_y = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv_y) + q_y, k_y, v_y = ( + rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) + for t in qkv_y + ) q, k = map(l2norm, (q_x, k_y)) diff --git a/src/dnadiffusion/models/unet.py b/src/dnadiffusion/models/unet.py index 64557867..962f0e02 100644 --- a/src/dnadiffusion/models/unet.py +++ b/src/dnadiffusion/models/unet.py @@ -1,6 +1,5 @@ import itertools from functools import partial -from typing import Optional from memory_efficient_attention_pytorch import Attention as EfficientAttention @@ -66,7 +65,9 @@ def __init__( block_klass(dim_in, dim_in, time_emb_dim=time_dim), block_klass(dim_in, dim_in, time_emb_dim=time_dim), Residual(PreNorm(dim_in, LinearAttention(dim_in))), - Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1), + Downsample(dim_in, dim_out) + if not is_last + else nn.Conv2d(dim_in, dim_out, 3, padding=1), ] ) ) @@ -81,10 +82,16 @@ def __init__( self.ups.append( nn.ModuleList( [ - block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim), - block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim), + block_klass( + dim_out + dim_in, dim_out, time_emb_dim=time_dim + ), + block_klass( + dim_out + dim_in, dim_out, time_emb_dim=time_dim + ), Residual(PreNorm(dim_out, LinearAttention(dim_out))), - Upsample(dim_out, dim_in) if not is_last else nn.Conv2d(dim_out, dim_in, 3, padding=1), + Upsample(dim_out, dim_in) + if not is_last + else nn.Conv2d(dim_out, dim_in, 3, padding=1), ] ) ) @@ -101,7 +108,9 @@ def __init__( ) self.norm_to_cross = nn.LayerNorm(dim * 4) - def forward(self, x: torch.Tensor, time: torch.Tensor, classes: torch.Tensor): + def forward( + self, x: torch.Tensor, time: torch.Tensor, classes: torch.Tensor + ): x = self.init_conv(x) r = x.clone() diff --git a/src/dnadiffusion/utils/data_util.py b/src/dnadiffusion/utils/data_util.py index 2f828d0c..55a335c2 100644 --- a/src/dnadiffusion/utils/data_util.py +++ b/src/dnadiffusion/utils/data_util.py @@ -4,11 +4,8 @@ import matplotlib.pyplot as plt import pandas as pd from Bio import SeqIO -from IPython.display import HTML, display from tqdm import tqdm -from dnadiffusion import DATA_DIR - class DataSource: # Sourced from https://github.com/meuleman/SynthSeqs/blob/main/make_data/source.py @@ -27,7 +24,10 @@ class ReferenceGenome(DataSource): @classmethod def from_path(cls, path): - genome_dict = {record.id: str(record.seq).upper() for record in SeqIO.parse(path, "fasta")} + genome_dict = { + record.id: str(record.seq).upper() + for record in SeqIO.parse(path, "fasta") + } return cls(genome_dict, path) @classmethod @@ -95,7 +95,9 @@ def __init__(self, data): self.data = pd.read_csv(data, sep="\t") def extract_seq(self, tag, cell_type): - return self.data.query(f'TAG == "{tag}" and CELL_TYPE == "{cell_type}" ').copy() + return self.data.query( + f'TAG == "{tag}" and CELL_TYPE == "{cell_type}" ' + ).copy() def seq_extract(data_path: str, tag: str, cell_type: str): @@ -150,7 +152,9 @@ def geneid2genename(self, gene_list): list[str] """ gtf_df = self.get_gtf_df() - dict_conversion = dict(zip(gtf_df["gene_id"].values, gtf_df["gene_name"].values)) + dict_conversion = dict( + zip(gtf_df["gene_id"].values, gtf_df["gene_name"].values) + ) return [dict_conversion[g] for g in gene_list] def __add_interval_lenght(self): @@ -160,7 +164,9 @@ def __add_interval_lenght(self): def get_first_exon_df(gtf_df): """Group genes by transcript id and returns a df with first exont relative to strand""" out_new_df = [] - for k, v in gtf_df.query("feature == 'exon' and exon_number == '1' ").groupby("transcript_id"): + for k, v in gtf_df.query( + "feature == 'exon' and exon_number == '1' " + ).groupby("transcript_id"): out_new_df.append(v) return pd.concat(out_new_df) @@ -170,45 +176,106 @@ def get_last_exon_df(gtf_df): """Group genes by transcript id and returns a df with last exont relative to strand""" out_new_df = [] - for k, v in tqdm(gtf_df.query("feature == 'exon' ").groupby("transcript_id")): + for k, v in tqdm( + gtf_df.query("feature == 'exon' ").groupby("transcript_id") + ): if v.iloc[0].strand == "+": - out_new_df.append(v.sort_values("end", ascending=True).iloc[-1].values) + out_new_df.append( + v.sort_values("end", ascending=True).iloc[-1].values + ) # print v.sort_values('exon_number').iloc[0] if v.iloc[0].strand == "-": - out_new_df.append(v.sort_values("start", ascending=True).iloc[0].values) + out_new_df.append( + v.sort_values("start", ascending=True).iloc[0].values + ) return pd.DataFrame(out_new_df, columns=gtf_df.columns) @staticmethod - def df_to_bed(gtf_df, bed_file_name, fourth_position_feature="gene_name", fifth_position_feature="transcript_id"): + def df_to_bed( + gtf_df, + bed_file_name, + fourth_position_feature="gene_name", + fifth_position_feature="transcript_id", + ): """Save a bed_file using a gtf as reference and returns the bed_file_name string""" - print(gtf_df[["chr", "start", "end", fourth_position_feature, fifth_position_feature, "strand"]].head()) - - gtf_df[["chr", "start", "end", fourth_position_feature, fifth_position_feature, "strand"]].to_csv( - bed_file_name, sep="\t", header=None, index=None + print( + gtf_df[ + [ + "chr", + "start", + "end", + fourth_position_feature, + fifth_position_feature, + "strand", + ] + ].head() ) + + gtf_df[ + [ + "chr", + "start", + "end", + fourth_position_feature, + fifth_position_feature, + "strand", + ] + ].to_csv(bed_file_name, sep="\t", header=None, index=None) return bed_file_name @staticmethod - def df_to_df_bed(gtf_df, fourth_position_feature="gene_name", fifth_position_feature="transcript_id"): + def df_to_df_bed( + gtf_df, + fourth_position_feature="gene_name", + fifth_position_feature="transcript_id", + ): """Save a bed_file using a gtf as reference and returns df with a bed6 format""" - print(gtf_df[["chr", "start", "end", fourth_position_feature, fifth_position_feature, "strand"]].head()) + print( + gtf_df[ + [ + "chr", + "start", + "end", + fourth_position_feature, + fifth_position_feature, + "strand", + ] + ].head() + ) - return gtf_df[["chr", "start", "end", fourth_position_feature, fifth_position_feature, "strand"]] + return gtf_df[ + [ + "chr", + "start", + "end", + fourth_position_feature, + fifth_position_feature, + "strand", + ] + ] @staticmethod def hist_generate(gtf_df, feature="transcript_biotype"): """ ex: GTFProcessing.hist_generate(gtf_to_test.head(1600), 'transcript_biotype') """ - x_axis_feature = GTFProcessing.get_first_exon_df(gtf_df).groupby(feature).count()["start"] + x_axis_feature = ( + GTFProcessing.get_first_exon_df(gtf_df) + .groupby(feature) + .count()["start"] + ) plt.bar(range(0, x_axis_feature.values.shape[0]), x_axis_feature.values) print(x_axis_feature.keys()) print(x_axis_feature.values) - plt.xticks(range(0, x_axis_feature.values.shape[0]), (x_axis_feature.keys().values), rotation="vertical") + plt.xticks( + range(0, x_axis_feature.values.shape[0]), + (x_axis_feature.keys().values), + rotation="vertical", + ) plt.title(feature) plt.show() @@ -222,11 +289,21 @@ def capture_distal_unique_tes(gtf_df): last_exon_df = GTFProcessing.get_last_exon_df(gtf_df) for k, v in tqdm(last_exon_df.groupby("gene_id")): if v.iloc[0]["strand"] == "+": - return_distal_exon.append(v.sort_values("end", ascending=False).iloc[0].values.tolist()) + return_distal_exon.append( + v.sort_values("end", ascending=False) + .iloc[0] + .values.tolist() + ) if v.iloc[0]["strand"] == "-": - return_distal_exon.append(v.sort_values("start", ascending=True).iloc[0].values.tolist()) - - df_distal_exon_by_gene_id = pd.DataFrame(return_distal_exon, columns=last_exon_df.columns.values.tolist()) + return_distal_exon.append( + v.sort_values("start", ascending=True) + .iloc[0] + .values.tolist() + ) + + df_distal_exon_by_gene_id = pd.DataFrame( + return_distal_exon, columns=last_exon_df.columns.values.tolist() + ) return df_distal_exon_by_gene_id @staticmethod @@ -235,19 +312,41 @@ def capture_distal_unique_tss(gtf_df): first_exon_df = GTFProcessing.get_first_exon_df(gtf_df) for k, v in tqdm(first_exon_df.groupby("gene_id")): if v.iloc[0]["strand"] == "+": - return_distal_tss.append(v.sort_values("start", ascending=True).iloc[0].values.tolist()) + return_distal_tss.append( + v.sort_values("start", ascending=True) + .iloc[0] + .values.tolist() + ) if v.iloc[0]["strand"] == "-": - return_distal_tss.append(v.sort_values("end", ascending=False).iloc[0].values.tolist()) - - df_distal_exon_by_gene_id = pd.DataFrame(return_distal_tss, columns=first_exon_df.columns.values.tolist()) + return_distal_tss.append( + v.sort_values("end", ascending=False) + .iloc[0] + .values.tolist() + ) + + df_distal_exon_by_gene_id = pd.DataFrame( + return_distal_tss, columns=first_exon_df.columns.values.tolist() + ) return df_distal_exon_by_gene_id def motif_composition_helper(df: pd.DataFrame): fasta_file = open(f"synthetic_motifs.fasta", "w") - fasta_file.write("\n".join(df[["SEQUENCE", "ID"]].apply(lambda x: f">{x['ID']}\n{x['SEQUENCE']}", axis=1).tolist())) + fasta_file.write( + "\n".join( + df[["SEQUENCE", "ID"]] + .apply(lambda x: f">{x['ID']}\n{x['SEQUENCE']}", axis=1) + .tolist() + ) + ) fasta_file.close() - os.system(f"gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 -n 20 > syn_results_motifs.bed") - df_motifs = pd.read_csv(f"syn_results_motifs.bed", sep="\t", skiprows=5, header=None) - df_motifs["motifs"] = df_motifs[8].apply(lambda x: x.split('motif_name "')[1].split('"')[0]) + os.system( + f"gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 -n 20 > syn_results_motifs.bed" + ) + df_motifs = pd.read_csv( + f"syn_results_motifs.bed", sep="\t", skiprows=5, header=None + ) + df_motifs["motifs"] = df_motifs[8].apply( + lambda x: x.split('motif_name "')[1].split('"')[0] + ) return df_motifs diff --git a/src/dnadiffusion/utils/sample_util.py b/src/dnadiffusion/utils/sample_util.py index e7b8e47c..27261ea4 100644 --- a/src/dnadiffusion/utils/sample_util.py +++ b/src/dnadiffusion/utils/sample_util.py @@ -1,5 +1,4 @@ import os -from typing import Optional import numpy as np import pandas as pd @@ -36,10 +35,15 @@ def create_sample( classes, (sample_bs, 1, 4, 200), cond_weight_to_metric ) # save cross attention maps in a numpy array - np.save(f"cross_att_values_{conditional_numeric_to_tag[group_number]}.npy", cross_att_values) + np.save( + f"cross_att_values_{conditional_numeric_to_tag[group_number]}.npy", + cross_att_values, + ) else: - sampled_images = diffusion_model.sample(classes, (sample_bs, 1, 4, 200), cond_weight_to_metric) + sampled_images = diffusion_model.sample( + classes, (sample_bs, 1, 4, 200), cond_weight_to_metric + ) if save_timesteps: seqs_to_df = {} @@ -54,7 +58,10 @@ def create_sample( else: for n_b, x in enumerate(sampled_images[-1]): seq_final = f">seq_test_{n_a}_{n_b}\n" + "".join( - [nucleotides[s] for s in np.argmax(x.reshape(4, 200), axis=0)] + [ + nucleotides[s] + for s in np.argmax(x.reshape(4, 200), axis=0) + ] ) final_sequences.append(seq_final) @@ -70,7 +77,9 @@ def create_sample( if save_dataframe: # Saving list of sequences to txt file - with open(f"final_{conditional_numeric_to_tag[group_number]}.txt", "w") as f: + with open( + f"final_{conditional_numeric_to_tag[group_number]}.txt", "w" + ) as f: f.write("\n".join(final_sequences)) return @@ -83,12 +92,22 @@ def extract_motifs(sequence_list: list): motifs = open("synthetic_motifs.fasta", "w") motifs.write("\n".join(sequence_list)) motifs.close() - os.system("gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 -n 20> syn_results_motifs.bed") - df_results_syn = pd.read_csv("syn_results_motifs.bed", sep="\t", skiprows=5, header=None) - - df_results_syn["motifs"] = df_results_syn[8].apply(lambda x: x.split('motif_name "')[1].split('"')[0]) - df_results_syn[0] = df_results_syn[0].apply(lambda x: "_".join(x.split("_")[:-1])) - df_motifs_count_syn = df_results_syn[[0, "motifs"]].groupby("motifs").count() + os.system( + "gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 -n 20> syn_results_motifs.bed" + ) + df_results_syn = pd.read_csv( + "syn_results_motifs.bed", sep="\t", skiprows=5, header=None + ) + + df_results_syn["motifs"] = df_results_syn[8].apply( + lambda x: x.split('motif_name "')[1].split('"')[0] + ) + df_results_syn[0] = df_results_syn[0].apply( + lambda x: "_".join(x.split("_")[:-1]) + ) + df_motifs_count_syn = ( + df_results_syn[[0, "motifs"]].groupby("motifs").count() + ) return df_motifs_count_syn diff --git a/src/dnadiffusion/utils/train_util.py b/src/dnadiffusion/utils/train_util.py index f8ce33de..17893b46 100644 --- a/src/dnadiffusion/utils/train_util.py +++ b/src/dnadiffusion/utils/train_util.py @@ -2,14 +2,16 @@ from typing import Any import torch -import torchvision.transforms as T from accelerate import Accelerator from torch.optim import Adam from torch.utils.data import DataLoader from tqdm import tqdm from dnadiffusion.data.dataloader import SequenceDataset -from dnadiffusion.metrics.metrics import compare_motif_list, generate_similarity_using_train +from dnadiffusion.metrics.metrics import ( + compare_motif_list, + generate_similarity_using_train, +) from dnadiffusion.utils.sample_util import create_sample from dnadiffusion.utils.utils import EMA @@ -43,7 +45,9 @@ def __init__( if self.accelerator.is_main_process: self.ema = EMA(0.995) - self.ema_model = copy.deepcopy(self.model).eval().requires_grad_(False) + self.ema_model = ( + copy.deepcopy(self.model).eval().requires_grad_(False) + ) # Metrics self.train_kl, self.test_kl, self.shuffle_kl = 1, 1, 1 @@ -52,18 +56,31 @@ def __init__( self.start_epoch = 1 # Dataloader - seq_dataset = SequenceDataset(seqs=self.encode_data["X_train"], c=self.encode_data["x_train_cell_type"]) - self.train_dl = DataLoader(seq_dataset, batch_size=batch_size, shuffle=True, num_workers=8, pin_memory=True) + seq_dataset = SequenceDataset( + seqs=self.encode_data["X_train"], + c=self.encode_data["x_train_cell_type"], + ) + self.train_dl = DataLoader( + seq_dataset, + batch_size=batch_size, + shuffle=True, + num_workers=8, + pin_memory=True, + ) def train_loop(self): # Prepare for training - self.model, self.optimizer, self.train_dl = self.accelerator.prepare(self.model, self.optimizer, self.train_dl) + self.model, self.optimizer, self.train_dl = self.accelerator.prepare( + self.model, self.optimizer, self.train_dl + ) # Initialize wandb if self.accelerator.is_main_process: self.accelerator.init_trackers( "dnadiffusion", - init_kwargs={"wandb": {"notes": "testing wandb accelerate script"}}, + init_kwargs={ + "wandb": {"notes": "testing wandb accelerate script"} + }, ) for epoch in tqdm(range(self.start_epoch, self.epochs + 1)): @@ -76,15 +93,24 @@ def train_loop(self): loss = self.train_step(batch) # Logging loss - if self.global_step % self.log_step_show == 0 and self.accelerator.is_main_process: + if ( + self.global_step % self.log_step_show == 0 + and self.accelerator.is_main_process + ): self.log_step(loss, epoch) # Sampling - if epoch % self.sample_epoch == 0 and self.accelerator.is_main_process: + if ( + epoch % self.sample_epoch == 0 + and self.accelerator.is_main_process + ): self.sample() # Saving model - if epoch % self.save_epoch == 0 and self.accelerator.is_main_process: + if ( + epoch % self.save_epoch == 0 + and self.accelerator.is_main_process + ): self.save_model(epoch) def train_step(self, batch): @@ -100,7 +126,9 @@ def train_step(self, batch): self.accelerator.wait_for_everyone() if self.accelerator.is_main_process: - self.ema.step_ema(self.ema_model, self.accelerator.unwrap_model(self.model)) + self.ema.step_ema( + self.ema_model, self.accelerator.unwrap_model(self.model) + ) self.accelerator.wait_for_everyone() return loss @@ -130,10 +158,18 @@ def sample(self): cell_types=self.encode_data["cell_types"], number_of_samples=int(self.num_sampling_to_compare_cells / 10), ) - self.seq_similarity = generate_similarity_using_train(self.encode_data["X_train"]) - self.train_kl = compare_motif_list(synt_df, self.encode_data["train_motifs"]) - self.test_kl = compare_motif_list(synt_df, self.encode_data["test_motifs"]) - self.shuffle_kl = compare_motif_list(synt_df, self.encode_data["shuffle_motifs"]) + self.seq_similarity = generate_similarity_using_train( + self.encode_data["X_train"] + ) + self.train_kl = compare_motif_list( + synt_df, self.encode_data["train_motifs"] + ) + self.test_kl = compare_motif_list( + synt_df, self.encode_data["test_motifs"] + ) + self.shuffle_kl = compare_motif_list( + synt_df, self.encode_data["shuffle_motifs"] + ) print("Similarity", self.seq_similarity, "Similarity") print("KL_TRAIN", self.train_kl, "KL") print("KL_TEST", self.test_kl, "KL") diff --git a/src/dnadiffusion/utils/utils.py b/src/dnadiffusion/utils/utils.py index d9db165d..b65a327e 100644 --- a/src/dnadiffusion/utils/utils.py +++ b/src/dnadiffusion/utils/utils.py @@ -83,19 +83,27 @@ def __init__(self, beta: float = 0.995) -> None: self.beta = beta self.step = 0 - def update_model_average(self, ma_model: nn.Module, current_model: nn.Module) -> None: - for current_params, ma_params in zip(current_model.parameters(), ma_model.parameters()): + def update_model_average( + self, ma_model: nn.Module, current_model: nn.Module + ) -> None: + for current_params, ma_params in zip( + current_model.parameters(), ma_model.parameters() + ): old_weight, up_weight = ma_params.data, current_params.data ma_params.data = self.update_average(old_weight, up_weight) - def update_average(self, old: torch.Tensor, new: torch.Tensor) -> torch.Tensor: + def update_average( + self, old: torch.Tensor, new: torch.Tensor + ) -> torch.Tensor: if old is None: return new device = new.device old = old.to(device) return old * self.beta + (1 - self.beta) * new - def step_ema(self, ema_model: nn.Module, model: nn.Module, step_start_ema: int = 500) -> None: + def step_ema( + self, ema_model: nn.Module, model: nn.Module, step_start_ema: int = 500 + ) -> None: if self.step < step_start_ema: self.reset_parameters(ema_model, model) self.step += 1 @@ -119,7 +127,9 @@ def cosine_beta_schedule(timesteps: int, s: float = 0.008): """ steps = timesteps + 1 x = torch.linspace(0, timesteps, steps) - alphas_cumprod = torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2 + alphas_cumprod = ( + torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2 + ) alphas_cumprod = alphas_cumprod / alphas_cumprod[0] betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1]) return torch.clip(betas, 0.0001, 0.9999) diff --git a/src/refactor/tests/__init__.py b/src/dnadiffusion/workflows/__init__.py similarity index 100% rename from src/refactor/tests/__init__.py rename to src/dnadiffusion/workflows/__init__.py diff --git a/src/dnadiffusion/workflows/example.py b/src/dnadiffusion/workflows/example.py new file mode 100644 index 00000000..056672fd --- /dev/null +++ b/src/dnadiffusion/workflows/example.py @@ -0,0 +1,53 @@ +""" +A simple Flyte example. +""" + +import typing + +from flytekit import task, workflow + + +@task +def say_hello(name: str = "testing say_hello") -> str: + """ + A simple Flyte task to say "hello". + + The @task decorator allows Flyte to use this function as a Flyte task, which + is executed as an isolated, containerized unit of compute. + """ + return f"hello {name}!" + + +@task +def greeting_length(greeting: str = "ninechars") -> int: + """ + A task the counts the length of a greeting. + """ + return len(greeting) + + +@workflow +def wf(name: str = "union") -> typing.Tuple[str, int]: + """ + Declare workflow called `wf`. + + The @workflow decorator defines an execution graph that is composed of tasks + and potentially sub-workflows. In this simple example, the workflow is + composed of just one task. + + There are a few important things to note about workflows: + - Workflows are a domain-specific language (DSL) for creating execution + graphs and therefore only support a subset of Python's behavior. + - Tasks must be invoked with keyword arguments + - The output variables of tasks are Promises, which are placeholders for + values that are yet to be materialized, not the actual values. + """ + greeting = say_hello(name=name) + greeting_len = greeting_length(greeting=greeting) + return greeting, greeting_len + + +if __name__ == "__main__": + # Execute the workflow, simply by invoking it like a function and passing in + # the necessary parameters + print(f"Running wf() { wf(name='passengers') }") diff --git a/src/dnadiffusion/workflows/lrwine.py b/src/dnadiffusion/workflows/lrwine.py new file mode 100644 index 00000000..a5f15e01 --- /dev/null +++ b/src/dnadiffusion/workflows/lrwine.py @@ -0,0 +1,201 @@ +from dataclasses import asdict, make_dataclass +from datetime import timedelta +from pprint import pformat +from typing import Any, Dict, Optional, Tuple, Type + +import joblib +import pandas as pd + +# from dataclasses_json import DataClassJsonMixin as DataClassJSONMixin +from flytekit import Resources, task, workflow +from flytekit.extras.accelerators import T4 +from flytekit.types.file import JoblibSerializedFile +from mashumaro.mixins.json import DataClassJSONMixin +from sklearn.datasets import load_wine +from sklearn.linear_model import LogisticRegression + +from dnadiffusion.configuration import create_dataclass_from_callable +from dnadiffusion.logging import configure_logging + +logger = configure_logging("dnadiffusion.workflows.lrwine") + +# This is an optional dictionary that can be used to override the +# default types and values inferred from the callable if necessary +# or required. For this example, we provide commented out defaults +# to illustrate the types that are inferred from the callable and +# the ability to override them. +custom_types_defaults: Dict[str, Tuple[Type, Any]] = { + # "penalty": (str, "l2"), + # "dual": (bool, False), + # "tol": (float, 1e-4), + # "C": (float, 1.0), + # "fit_intercept": (bool, True), + # "intercept_scaling": (int, 1), + "class_weight": (Optional[dict], None), + "random_state": (Optional[int], None), + # "solver": (str, "lbfgs"), + "max_iter": (int, 1200), + # "multi_class": (str, "auto"), + # "verbose": (int, 0), + # "warm_start": (bool, False), + "n_jobs": (Optional[int], None), + "l1_ratio": (Optional[float], None), +} + +logistic_regression_fields = create_dataclass_from_callable( + LogisticRegression, custom_types_defaults +) + +LogisticRegressionInterface = make_dataclass( + "LogisticRegressionInterface", + logistic_regression_fields, + bases=(DataClassJSONMixin,), + # TODO: Python 3.12, https://github.com/python/cpython/pull/102104 + # module=__name__, +) +LogisticRegressionInterface.__module__ = __name__ + + +# The following can be used to test dynamic dataclass construction +# with multiple dataclasses of distinct types. +# from sklearn.linear_model import LinearRegression + +# linear_regression_custom_types: Dict[str, Tuple[Type,Any]] = { +# "n_jobs": (Optional[int], None), +# } +# linear_regression_fields = create_dataclass_from_callable( +# LinearRegression, linear_regression_custom_types +# ) + +# LinearRegressionInterface = make_dataclass( +# "LinearRegressionInterface", +# linear_regression_fields, +# bases=(DataClassJSONMixin,), +# ) +# LinearRegressionInterface.__module__ = __name__ + + +sample_columns = [ + "alcohol", + "target", +] + +sample_data = [ + [13.0, 0], + [14.0, 1], + [12.5, 2], +] + + +@task( + cache=True, + cache_version="0.1.0", + retries=3, + interruptible=False, + timeout=timedelta(minutes=20), + container_image="{{.image.gpu.fqn}}:{{.image.gpu.version}}", + requests=Resources( + cpu="200m", mem="400Mi", ephemeral_storage="1Gi", gpu="1" + ), + accelerator=T4, +) +def get_data() -> pd.DataFrame: + """ + Get the wine dataset. + """ + # import time + + # time.sleep(7200) + return load_wine(as_frame=True).frame + + +@task( + cache=False, + cache_version="0.1.0", + retries=3, + interruptible=True, + timeout=timedelta(minutes=10), + requests=Resources(cpu="200m", mem="400Mi", ephemeral_storage="1Gi"), +) +def process_data( + data: pd.DataFrame = pd.DataFrame(data=sample_data, columns=sample_columns), +) -> pd.DataFrame: + """ + Simplify the task from a 3-class to a binary classification problem. + """ + return data.assign(target=lambda x: x["target"].where(x["target"] == 0, 1)) + + +@task( + cache=True, + cache_version="0.1.0", + retries=3, + interruptible=True, + timeout=timedelta(minutes=10), + requests=Resources(cpu="200m", mem="400Mi", ephemeral_storage="1Gi"), +) +def train_model( + data: pd.DataFrame = pd.DataFrame(data=sample_data, columns=sample_columns), + logistic_regression: LogisticRegressionInterface = LogisticRegressionInterface( + max_iter=1200 + ), +) -> JoblibSerializedFile: + """ + Train a model on the wine dataset. + """ + features = data.drop("target", axis="columns") + target = data["target"] + logger.info(f"{pformat(logistic_regression)}\n\n") + model = LogisticRegression(**asdict(logistic_regression)) + model_path = "logistic_regression_model.joblib" + joblib.dump(model, model_path) + model_file = JoblibSerializedFile(model_path) + return model_file + + +@workflow +def training_workflow( + logistic_regression: LogisticRegressionInterface = LogisticRegressionInterface( + max_iter=2000 + ), + # linear_regression: LinearRegressionInterface = LinearRegressionInterface(), +) -> JoblibSerializedFile: + """ + Put all of the steps together into a single workflow. + """ + data = get_data() + processed_data = process_data(data=data) + return train_model( + data=processed_data, + logistic_regression=logistic_regression, + ) + + +if __name__ == "__main__": + # Execute the workflow, simply by invoking it like a function and passing in + # the necessary parameters + print(f"Running process_data() { process_data() }") + print(f"Running training_workflow() { training_workflow() }") + +# sample_columns = [ +# "alcohol", +# "malic_acid", +# "ash", +# "alcalinity_of_ash", +# "magnesium", +# "total_phenols", +# "flavanoids", +# "nonflavanoid_phenols", +# "proanthocyanins", +# "color_intensity", +# "hue", +# "od280/od315_of_diluted_wines", +# "proline", +# "target", +# ] + +# sample_data = [ +# [13.0, 1.5, 2.3, 15.0, 110, 2.5, 3.0, 0.3, 1.5, 4.0, 1.0, 3.0, 1000, 0], +# [14.0, 1.6, 2.4, 16.0, 120, 2.6, 3.1, 0.4, 1.6, 5.0, 1.1, 3.1, 1100, 1], +# [12.5, 1.4, 2.2, 14.0, 100, 2.4, 2.9, 0.2, 1.4, 3.5, 0.9, 2.9, 900, 2], +# ] diff --git a/src/refactor/README.md b/src/refactor/README.md deleted file mode 100644 index 43eb39f1..00000000 --- a/src/refactor/README.md +++ /dev/null @@ -1,96 +0,0 @@ -## Config Structure - -Current hypothetical config folder structure is as follows: - -``` -├── configs - ├── callbacks - ├── default.yaml - ├── dataset - ├── sequence.yaml - ├── logger - ├── wandb.yaml - ├── model - ├── unet.yaml - ├── unet_conditional.yaml - ├── unet_bitdiffusion.yaml - ├── paths - ├── default.yaml - ├── train.yaml -``` - -As new items (models, datasets, etc.) are added, a corresponding config file can be included so that minimal parameter altering is needed across various experiments - -## How to Run - -Below contains the main training config file that can be altered to fit any training alterations that are desired. -Every parameter listed under defaults is defined within a config listed above. - -
-Training config - -```yaml -defaults: - - model: unet_conditional - - dataset: sequence - - logger: wandb - - callbacks: default - -ckpt: null # path to checkpoint -seed: 42 -batch_size: 32 -devices: gpu -benchmark: True -ckpt_dir: # path still to be defined -accelerator: gpu -strategy: ddp -min_epochs: 5 -max_epochs: 100000 -gradient_clip_val: 1.0 -accumulate_grad_batches: 1 -log_every_n_steps: 1 -check_val_every_n_epoch: 1 #for debug purposes -save_last: True -precision: 32 -``` - -
- -### Using hydra config in a Jupyter Notebook - -Including the following at the beginning of a jupyter notebook will initialize hydra, load defined training config, and then print it. - -```python -from hydra import compose, initialize -from omegaconf import OmegaConf - -initialize(version_base=None, config_path="./src/configs") -cfg = compose(config_name="train") -print(OmegaConf.to_yaml(cfg)) -``` - -When initializing hydra it is possible to override any of the default assignments. -Here is an example of overriding batch_size and seed while initializing hydra: - -```python -from hydra import compose, initialize -from omegaconf import OmegaConf - -initialize(version_base=None, config_path="./src/configs") -cfg = compose(overrides=["batch_size=64", "seed=1"]) -print(OmegaConf.to_yaml(cfg)) -``` - -The following link to hydra documentation provides more information on override syntax:
-https://hydra.cc/docs/advanced/override_grammar/basic/
- -For more information regarding hydra initialization in jupyter see the following link: -https://github.com/facebookresearch/hydra/blob/main/examples/jupyter_notebooks/compose_configs_in_notebook.ipynb - -## Still To Do: - -- Alter training script to accommodate all logs we wish to track using wandb -- Decide on default hyperparameters in train.yaml -- Further alter config folder structure to best suit our training and testing practices -- Define default paths for dataset within path config file so that directory can be referenced across various other configs -- Hydra config logs currently output in src directory, creating the following folder structure ./src/outputs/YYYY-MM-DD/MM-HH-SS. If we wish to alter this it can be done in a hydra config file. diff --git a/src/refactor/config.py b/src/refactor/config.py deleted file mode 100644 index 88044774..00000000 --- a/src/refactor/config.py +++ /dev/null @@ -1,25 +0,0 @@ -### file to include dataclass definition -from dataclasses import dataclass - -from hydra.core.config_store import ConfigStore - -### needs overhaul with new folder structure -### ignore for now -""" -@dataclass -class DNADiffusionConfig: - defaults: - - _self_ - - optimizer: adam - - lr_scheduler: MultiStepLR - - unet: unet_conditional - - _target_: str = "__main__.trgt" # dotpath describing location of callable - timesteps: 200 - use_fp16: True - criterion: torch.nn.MSELoss #utils.metrics.MetricName - use_ema: True - ema_decay: float = 0.999 - lr_warmup: 5000 - image_size: 200 -""" diff --git a/src/refactor/configs/callbacks/default.yaml b/src/refactor/configs/callbacks/default.yaml deleted file mode 100644 index 88e0eac1..00000000 --- a/src/refactor/configs/callbacks/default.yaml +++ /dev/null @@ -1,13 +0,0 @@ -save_checkpoint: - target: pytorch_lightning.callbacks.ModelCheckpoint - params: - path: # to be entered - monitor: val_loss - mode: min - save_top_k: 10 - save_last: True - -learning_rate: - target: pytorch_lightning.callbacks.LearningRateMonitor - params: - logging_interval: epoch diff --git a/src/refactor/configs/data/sequence.yaml b/src/refactor/configs/data/sequence.yaml deleted file mode 100644 index 71ead66c..00000000 --- a/src/refactor/configs/data/sequence.yaml +++ /dev/null @@ -1,15 +0,0 @@ -train_dl: - _target_: data.sequence_dataloader.SequenceDataModule - train_path: # add training dataset path - batch_size: 32 - num_workers: 4 - # transform: - # _target_: # directly reference transforms -> torchvision.transforms.ToTensor - -val_dl: - _target_: data.sequence_dataloader.SequenceDataModule - val_path: # add validation dataset path - batch_size: 32 - num_workers: 4 - #transform: - # _target_: # directly reference transforms -> torchvision.transforms.ToTensor diff --git a/src/refactor/configs/data/vanilla_sequences.yaml b/src/refactor/configs/data/vanilla_sequences.yaml deleted file mode 100644 index f8c46fa8..00000000 --- a/src/refactor/configs/data/vanilla_sequences.yaml +++ /dev/null @@ -1,8 +0,0 @@ -_target_: "src.data.sequence_datamodule.SequenceDataModule" -data_dir: ${paths.data_dir} -sequence_length: 200 -sequence_encoding: polar -cell_type_transform: None -batch_size: 128 -num_workers: 0 -#pin_memory: False diff --git a/src/refactor/configs/logger/wandb.yaml b/src/refactor/configs/logger/wandb.yaml deleted file mode 100644 index d9f53ddb..00000000 --- a/src/refactor/configs/logger/wandb.yaml +++ /dev/null @@ -1,5 +0,0 @@ -wandb: - _target_: pytorch_lightning.loggers.wandb.WandbLogger - save_dir: "" - project: "" - log_model: False diff --git a/src/refactor/configs/main.yaml b/src/refactor/configs/main.yaml deleted file mode 100644 index 3d1ea933..00000000 --- a/src/refactor/configs/main.yaml +++ /dev/null @@ -1,13 +0,0 @@ -defaults: - - _self_ - - data: vanilla_sequences - - model: dnadiffusion - - logger: wandb - - trainer: ddp - - callbacks: default - - paths: default - -seed: 42 -train: True -test: True -ckpt_path: null diff --git a/src/refactor/configs/model/dnaddpmdiffusion.yaml b/src/refactor/configs/model/dnaddpmdiffusion.yaml deleted file mode 100644 index 4ebfbbba..00000000 --- a/src/refactor/configs/model/dnaddpmdiffusion.yaml +++ /dev/null @@ -1,16 +0,0 @@ -defaults: - - _self_ - - optimizer: adam - - lr_scheduler: MultiStepLR - - unet: unet_conditional - -_target_: models.diffusion.ddpm.DDPM -timesteps: 200 -use_fp16: True -criterion: torch.nn.MSELoss #utils.metrics.MetricName -use_ema: True -ema_decay: 0.999 -lr_warmup: 5000 -image_size: -beta_end: 0.02 -schedule: linear diff --git a/src/refactor/configs/model/dnadiffusion.yaml b/src/refactor/configs/model/dnadiffusion.yaml deleted file mode 100644 index 31feaa61..00000000 --- a/src/refactor/configs/model/dnadiffusion.yaml +++ /dev/null @@ -1,14 +0,0 @@ -defaults: - - _self_ - - optimizer: adam - - lr_scheduler: MultiStepLR - - unet: unet_conditional - -_target_: models.diffusion.diffusion.DiffusionModel -timesteps: 200 -use_fp16: True -criterion: torch.nn.MSELoss #utils.metrics.MetricName -use_ema: True -ema_decay: 0.999 -lr_warmup: 5000 -image_size: 200 diff --git a/src/refactor/configs/model/lr_scheduler/MultiStepLR.yaml b/src/refactor/configs/model/lr_scheduler/MultiStepLR.yaml deleted file mode 100644 index d1c879b7..00000000 --- a/src/refactor/configs/model/lr_scheduler/MultiStepLR.yaml +++ /dev/null @@ -1,4 +0,0 @@ -_target_: torch.optim.lr_scheduler.MultiStepLR -_partial_: True -milestones: [5, 10, 20] -gamma: 0.1 diff --git a/src/refactor/configs/model/optimizer/adam.yaml b/src/refactor/configs/model/optimizer/adam.yaml deleted file mode 100644 index b40e9884..00000000 --- a/src/refactor/configs/model/optimizer/adam.yaml +++ /dev/null @@ -1,3 +0,0 @@ -_target_: torch.optim.Adam -_partial_: True -lr: 0.02 diff --git a/src/refactor/configs/model/unet/unet.yaml b/src/refactor/configs/model/unet/unet.yaml deleted file mode 100644 index 07db7de2..00000000 --- a/src/refactor/configs/model/unet/unet.yaml +++ /dev/null @@ -1,9 +0,0 @@ -_target_: models.networks.unet_lucas.UNetLucas -dim: 200 -init_dim: 200 -dim_mults: [1, 2, 4] -channels: 1 -resnet_block_groups: 8 -learned_sinusoidal_dim: 16 -num_classes: 10 -self_conditioned: False diff --git a/src/refactor/configs/model/unet/unet_conditional.yaml b/src/refactor/configs/model/unet/unet_conditional.yaml deleted file mode 100644 index f18a242a..00000000 --- a/src/refactor/configs/model/unet/unet_conditional.yaml +++ /dev/null @@ -1,9 +0,0 @@ -_target_: models.networks.unet_lucas_cond.UNet -dim: 200 -init_dim: 200 -dim_mults: [1, 2, 4] -channels: 1 -resnet_block_groups: 8 -learned_sinusoidal_dim: 16 -num_classes: 10 -class_embed_dim: 3 diff --git a/src/refactor/configs/paths/default.yaml b/src/refactor/configs/paths/default.yaml deleted file mode 100644 index 74da19c5..00000000 --- a/src/refactor/configs/paths/default.yaml +++ /dev/null @@ -1 +0,0 @@ -root: ${hydra:runtime.cwd}/ diff --git a/src/refactor/configs/trainer/ddp.yaml b/src/refactor/configs/trainer/ddp.yaml deleted file mode 100644 index 6d81d27e..00000000 --- a/src/refactor/configs/trainer/ddp.yaml +++ /dev/null @@ -1,4 +0,0 @@ -defaults: - - default.yaml - -strategy: ddp diff --git a/src/refactor/configs/trainer/default.yaml b/src/refactor/configs/trainer/default.yaml deleted file mode 100644 index 7496ce78..00000000 --- a/src/refactor/configs/trainer/default.yaml +++ /dev/null @@ -1,13 +0,0 @@ -_target_: pytorch_lightning.Trainer - -min_epochs: 1 -max_epochs: 100000 - -accelerator: gpu -devices: 1 -gradient_clip_val: 1.0 -accumulate_grad_batches: 1 -log_every_n_steps: 1 -save_last: True -precision: 32 -check_val_every_n_epoch: 1 #for debug purposes diff --git a/src/refactor/data/__pycache__/sequence_datamodule.cpython-39.pyc b/src/refactor/data/__pycache__/sequence_datamodule.cpython-39.pyc deleted file mode 100644 index d57e7ded..00000000 Binary files a/src/refactor/data/__pycache__/sequence_datamodule.cpython-39.pyc and /dev/null differ diff --git a/src/refactor/data/sequence_dataloader.py b/src/refactor/data/sequence_dataloader.py deleted file mode 100644 index 66a34465..00000000 --- a/src/refactor/data/sequence_dataloader.py +++ /dev/null @@ -1,194 +0,0 @@ -import numpy as np -import pandas as pd -import pytorch_lightning as pl -import torch -import torch.nn.functional as F -import torchvision.transforms as T -from torch.utils.data import DataLoader, Dataset - - -class SequenceDatasetBase(Dataset): - def __init__( - self, - data_path, - sequence_length: int = 200, - sequence_encoding: str = "polar", - sequence_transform=None, - cell_type_transform=None, - ) -> None: - super().__init__() - self.data = pd.read_csv(data_path, sep="\t") - self.sequence_length = sequence_length - self.sequence_encoding = sequence_encoding - self.sequence_transform = sequence_transform - self.cell_type_transform = cell_type_transform - self.alphabet = ["A", "C", "T", "G"] - self.check_data_validity() - - def __len__(self) -> int: - return len(self.data) - - def __getitem__(self, index): - # Iterating through DNA sequences from dataset and one-hot encoding all nucleotides - current_seq = self.data["raw_sequence"][index] - if "N" not in current_seq: - X_seq = self.encode_sequence(current_seq, encoding=self.sequence_encoding) - - # Reading cell component at current index - X_cell_type = self.data["component"][index] - - if self.sequence_transform is not None: - X_seq = self.sequence_transform(X_seq) - if self.cell_type_transform is not None: - X_cell_type = self.cell_type_transform(X_cell_type) - - return X_seq, X_cell_type - - def check_data_validity(self) -> None: - """ - Checks if the data is valid. - """ - if not set("".join(self.data["raw_sequence"])).issubset(set(self.alphabet)): - raise ValueError(f"Sequence contains invalid characters.") - - uniq_raw_seq_len = self.data["raw_sequence"].str.len().unique() - if len(uniq_raw_seq_len) != 1 or uniq_raw_seq_len[0] != self.sequence_length: - raise ValueError(f"The sequence length does not match the data.") - - def encode_sequence(self, seq, encoding): - """ - Encodes a sequence using the given encoding scheme ("polar", "onehot", "ordinal"). - """ - if encoding == "polar": - seq = self.one_hot_encode(seq).T - seq[seq == 0] = -1 - elif encoding == "onehot": - seq = self.one_hot_encode(seq).T - elif encoding == "ordinal": - seq = np.array([self.alphabet.index(n) for n in seq]) - else: - raise ValueError(f"Unknown encoding scheme: {encoding}") - return seq - - # Function for one hot encoding each line of the sequence dataset - def one_hot_encode(self, seq) -> np.ndarray: - """ - One-hot encoding a sequence - """ - seq_len = len(seq) - seq_array = np.zeros((self.sequence_length, len(self.alphabet))) - for i in range(seq_len): - seq_array[i, self.alphabet.index(seq[i])] = 1 - return seq_array - - -class SequenceDatasetTrain(SequenceDatasetBase): - def __init__(self, data_path="", **kwargs) -> None: - super().__init__(data_path=data_path, **kwargs) - - -class SequenceDatasetValidation(SequenceDatasetBase): - def __init__(self, data_path="", **kwargs) -> None: - super().__init__(data_path=data_path, **kwargs) - - -class SequenceDatasetTest(SequenceDatasetBase): - def __init__(self, data_path="", **kwargs) -> None: - super().__init__(data_path=data_path, **kwargs) - - -class SequenceDataModule(pl.LightningDataModule): - def __init__( - self, - train_path=None, - val_path=None, - test_path=None, - sequence_length: int = 200, - sequence_encoding: str = "polar", - sequence_transform=None, - cell_type_transform=None, - batch_size=None, - num_workers: int = 1, - ) -> None: - super().__init__() - self.datasets = {} - self.train_dataloader, self.val_dataloader, self.test_dataloader = ( - None, - None, - None, - ) - - if train_path: - self.datasets["train"] = train_path - self.train_dataloader = self._train_dataloader - - if val_path: - self.datasets["validation"] = val_path - self.val_dataloader = self._val_dataloader - - if test_path: - self.datasets["test"] = test_path - self.test_dataloader = self._test_dataloader - - self.sequence_length = sequence_length - self.sequence_encoding = sequence_encoding - self.sequence_transform = sequence_transform - self.cell_type_transform = cell_type_transform - self.batch_size = batch_size - self.num_workers = num_workers - - def prepare_data(self): - return - - def setup(self): - if "train" in self.datasets: - self.train_data = SequenceDatasetTrain( - data_path=self.datasets["train"], - sequence_length=self.sequence_length, - sequence_encoding=self.sequence_encoding, - sequence_transform=self.sequence_transform, - cell_type_transform=self.cell_type_transform, - ) - if "validation" in self.datasets: - self.val_data = SequenceDatasetValidation( - data_path=self.datasets["validation"], - sequence_length=self.sequence_length, - sequence_encoding=self.sequence_encoding, - sequence_transform=self.sequence_transform, - cell_type_transform=self.cell_type_transform, - ) - if "test" in self.datasets: - self.test_data = SequenceDatasetTest( - data_path=self.datasets["test"], - sequence_length=self.sequence_length, - sequence_encoding=self.sequence_encoding, - sequence_transform=self.sequence_transform, - cell_type_transform=self.cell_type_transform, - ) - - def _train_dataloader(self): - return DataLoader( - self.train_data, - self.batch_size, - shuffle=True, - num_workers=self.num_workers, - pin_memory=True, - ) - - def _val_dataloader(self): - return DataLoader( - self.val_data, - self.batch_size, - shuffle=True, - num_workers=self.num_workers, - pin_memory=True, - ) - - def _test_dataloader(self): - return DataLoader( - self.test_data, - self.batch_size, - shuffle=True, - num_workers=self.num_workers, - pin_memory=True, - ) diff --git a/src/refactor/data/sequence_datamodule.py b/src/refactor/data/sequence_datamodule.py deleted file mode 100644 index 53032f00..00000000 --- a/src/refactor/data/sequence_datamodule.py +++ /dev/null @@ -1,275 +0,0 @@ -import pickle -import random -from typing import Any, Dict, List, Optional, Tuple -import os -from pathlib import Path - -import pandas as pd -import numpy as np -import pytorch_lightning as pl -import torch -from pytorch_lightning import LightningDataModule -from pytorch_lightning.utilities import rank_zero_only -from torch.utils.data import ConcatDataset, DataLoader, Dataset, random_split - -import torchvision.transforms as T - -from refactor.utils.data import get_motif, read_master_dataset, subset_by_experiment -from refactor.utils.misc import one_hot_encode - -DEFAULT_BASE_PATH = Path('.') -DEFAULT_DATA_DIR_PATH = DEFAULT_BASE_PATH / Path("data") -DEFAULT_DATA_ENCODE_FILENAME = "encode_data.pkl" -DEFAULT_DATA_ENCODE_PATH = DEFAULT_DATA_DIR_PATH / DEFAULT_DATA_ENCODE_FILENAME -DEFAULT_SEQUENCES_PER_GROUP_FILENAME = "K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt" -DEFAULT_SEQUENCES_PER_GROUP_PATH = DEFAULT_DATA_DIR_PATH / DEFAULT_SEQUENCES_PER_GROUP_FILENAME - -DEFAULT_SUBSET_COMPONENTS = [ - "GM12878_ENCLB441ZZZ", - "hESCT0_ENCLB449ZZZ", - "K562_ENCLB843GMH", - "HepG2_ENCLB029COU", -] - - -class SequenceDataset(Dataset): - def __init__( - self, - seqs: str, - c: str, - sequence_transform: Optional[T.Compose] = T.Compose([T.ToTensor()]), - cell_type_transform: Optional[T.Compose] = T.Compose([T.ToTensor()]), - ): - "Initialization" - self.seqs = seqs - self.c = c - self.sequence_transform = sequence_transform - self.cell_type_transform = cell_type_transform - - def __len__(self): - "Denotes the total number of samples" - return len(self.seqs) - - def __getitem__(self, index): - "Generates one sample of data" - x = self.seqs[index] - if self.sequence_transform: - x = self.transform(x) - - y = self.c[index] - if self.cell_type_transform: - y = self.cell_type_transform(y) - - return x, y - - -class SequenceDataModule(pl.LightningDataModule): - """ - PyTorch Lightning data module for sequence datasets. - - Args: - data_path (str): Path to the data - sequence_length (int): Length of the sequence - sequence_encoding (str): Encoding scheme for the sequence ("polar", "onehot", "ordinal") - sequence_transform (callable): Transformation for the sequence - cell_type_transform (callable): Transformation for the cell type - batch_size (int): Batch size - num_workers (int): Number of workers - """ - - df_train = None - df_validation = None - df_test = None - - train_chr: List[str] = None - val_chr: List[str] = ['chr1'] - test_chr: List[str] = ['chr2'] - - encode_data = None - train_dataset: Dataset = None - val_dataset: Dataset = None - test_dataset: Dataset = None - - datasets_per_split: Dict[str, Dataset] = dict() - motifs_per_split: Dict[str, Any] = dict() - motifs_per_components_dict_per_split: Dict[str, Dict[str, Any]] = dict() - - def __init__( - self, - data_dir: str, - encoded_filename: str = DEFAULT_DATA_ENCODE_FILENAME, - sequences_per_group_filename: str = DEFAULT_SEQUENCES_PER_GROUP_FILENAME, - sequence_length: int = 200, - sequence_encoding: str = "polar", - sequence_transform=None, - cell_type_transform=None, - batch_size=None, - num_workers: int = 0, - load_saved_data: bool = True, - train_chr: List[str] = None, - val_chr: List[str] = None, - test_chr: List[str] = None, - subset_components: List[str] = DEFAULT_SUBSET_COMPONENTS, - number_of_sequences_to_motif_creation: int = 1000, - ) -> None: - super().__init__() - self.save_hyperparameters(logger=False) - # self.df_train, self.df_validation, self.df_test = Optional[Dataset] = None - self.number_of_sequences_to_motif_creation = number_of_sequences_to_motif_creation - self.sequence_length = sequence_length - self.sequence_encoding = sequence_encoding - self.sequence_transform = sequence_transform - self.cell_type_transform = cell_type_transform - self.data_dir = data_dir # 'data' - self.data_path = Path(self.data_dir) - self.batch_size = batch_size - self.num_workers = num_workers - self.load_saved_data = load_saved_data - self.subset_components = subset_components - - self.encoded_filename = encoded_filename - self.sequences_per_group_filename = sequences_per_group_filename - - self.train_chr = train_chr - - if val_chr: - self.val_chr = val_chr - - if test_chr: - self.test_chr = test_chr - - def prepare_data(self) -> None: - if self.load_saved_data: - return - - print("Preparing data...") - data_path = self.data_path / self.sequences_per_group_filename - df = read_master_dataset(data_path) - if len(self.subset_components) < 4: - df = subset_by_experiment(df, subset_components=self.subset_components) - - if not self.df_train and not self.df_validation and not self.df_test: - self.df_train, self.df_validation, self.df_test = self.create_train_groups(df) - - self.df_train, self.df_validation, self.df_test = get_motif( - self.df_train, - self.df_validation, - self.df_test, - self.subset_components, - self.number_of_sequences_to_motif_creation, - ) - - combined_dict = { - "train": self.df_train, - "val": self.df_validation, - "test": self.df_test, - } - - for split, data in combined_dict.items(): - split_data_path = self.data_path / f"{split}_{self.encoded_filename}" # src/refactor/data/encode_data.pkl - with open(split_data_path, "wb") as f: - pickle.dump(data, f) - - print("Preparing data DONE!") - - def setup(self, stage: str): - # TODO: incorporate some extra information after the split (experiement -> split -> motif -> train/test assignment) - # WARNING: have to be able to call loading_data on the main process of accelerate/fabric bc of gimme_motifs caching dependecies - # Creating sequence datasets unless they exist already - if stage == 'fit' or stage is None: # then load train and val splits - self._setup_split('train') - self._setup_split('val') - - if stage in ('test', 'predict') or stage is None: - self._setup_split('test') - - def _setup_split(self, split: str): - print(f"Loading {split}...") - stage_data_path = self.data_path / f"{split}_{self.encoded_filename}" - with open(stage_data_path, "rb") as f: - encode_data = pickle.load(f) - - self.motifs_per_split[split] = encode_data['motifs'] - self.motifs_per_components_dict_per_split[split] = encode_data["motifs_per_components_dict"] - self.datasets_per_split[split] = self.create_sequence_dataset(encode_data) - print(f"Loading {split} split  DONE!") - - def create_sequence_dataset(self, data): - df = data["dataset"] - self.cell_components = df.sort_values("TAG")["TAG"].unique().tolist() - self.tag_to_numeric = {x: n + 1 for n, x in enumerate(df.TAG.unique())} - self.numeric_to_tag = {n + 1: x for n, x in enumerate(df.TAG.unique())} - - self.cell_types = sorted(self.numeric_to_tag.keys()) - X_cell_types = torch.from_numpy(df["TAG"].apply(lambda x: self.tag_to_numeric[x]).to_numpy()) - - nucleotides = ["A", "C", "G", "T"] - X_sequences = np.array([one_hot_encode(x, nucleotides, 200) for x in (df["sequence"]) if "N" not in x]) - X_sequences = np.array([x.T.tolist() for x in X_sequences]) - X_sequences[X_sequences == 0] = -1 - - return SequenceDataset( - X_sequences, - X_cell_types, - sequence_transform=self.sequence_transform, - cell_type_transform=self.cell_type_transform, - ) - - def create_train_groups(self, df: pd.DataFrame) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]: - if self.train_chr is None: - val_test_chr = self.val_chr + self.test_chr - df_train = df[~df.chr.isin(val_test_chr)].reset_index(drop=True) - else: - df_train = df[df.chr.isin(self.train_chr)].reset_index(drop=True) - - df_validation = df[df.chr.isin(self.val_chr)].reset_index(drop=True) - df_test = df[df.chr.isin(self.test_chr)].reset_index(drop=True) - - df_validation["sequence"] = df_validation["sequence"].apply(lambda x: "".join(random.sample(list(x), len(x)))) - return df_train, df_validation, df_test - - def train_dataloader(self): - train_dataset = self.datasets_per_split['train'] - return DataLoader( - dataset=train_dataset, - batch_size=self.hparams.batch_size, - num_workers=self.hparams.num_workers, - # pin_memory=self.hparams.pin_memory, - shuffle=True, - ) - - def val_dataloader(self): - val_dataset = self.datasets_per_split['val'] - return DataLoader( - dataset=val_dataset, - batch_size=self.hparams.batch_size, - num_workers=self.hparams.num_workers, - # pin_memory=self.hparams.pin_memory, - shuffle=False, - ) - - def test_dataloader(self): - test_dataset = self.datasets_per_split['test'] - return DataLoader( - dataset=test_dataset, - batch_size=self.hparams.batch_size, - num_workers=self.hparams.num_workers, - # pin_memory=self.hparams.pin_memory, - shuffle=False, - ) - - def teardown(self, stage: Optional[str] = None): - """Clean up after fit or test.""" - pass - - def state_dict(self): - """Extra things to save to checkpoint.""" - return {} - - def load_state_dict(self, state_dict: Dict[str, Any]): - """Things to do when loading checkpoint.""" - pass - - -if __name__ == "__main__": - _ = SequenceDataModule() diff --git a/src/refactor/main.py b/src/refactor/main.py deleted file mode 100644 index a4eceec2..00000000 --- a/src/refactor/main.py +++ /dev/null @@ -1,81 +0,0 @@ -import logging -import os -import sys -from dataclasses import dataclass - -import hydra -import pyrootutils -import pytorch_lightning as pl -import wandb -from hydra.core.config_store import ConfigStore -from hydra.utils import get_original_cwd, instantiate, to_absolute_path -from omegaconf import DictConfig, OmegaConf -from pytorch_lightning.callbacks import LearningRateMonitor, ModelCheckpoint - -root = pyrootutils.setup_root(__file__, indicator=".project-root", pythonpath=True) - - -@dataclass -class DNADiffusionConfig: - data: str = "vanilla_sequences" - model: str = "dnadiffusion" - logger: str = "wandb" - trainer: str = "ddp" - callbacks: str = "default" - paths: str = "default" - seed: int = 42 - train: bool = True - test: bool = False - # ckpt_path: None - - -cs = ConfigStore.instance() -cs.store(name="dnadiffusion_config", node=DNADiffusionConfig) - - -@hydra.main(version_base="1.3", config_path="configs", config_name="main") -def main(cfg: DNADiffusionConfig): - # print(HydraConfig.get().job.name) - - # run = wandb.init( - # name=parser.logdir, - # save_dir=parser.logdir, - # project=cfg.logger.wandb.project, - # config=cfg, - # ) - - # Placeholder for what loss or metric values we plan to track with wandb - # wandb.log({"loss": cfg.model.criterion}) - print(f"Current working directory : {os.getcwd()}") - print(f"Orig working directory : {get_original_cwd()}") - - pl.seed_everything(cfg.seed) - # Check if this works - model = instantiate(cfg.model) - train_dl = instantiate(cfg.data) - print(train_dl) - return - val_dl = instantiate(cfg.data) - if cfg.ckpt_path: - model.load_from_checkpoint(cfg.ckpt_path) - - model_checkpoint_callback = ModelCheckpoint( - dirpath="checkpoints", - monitor="val_loss", - mode="min", - save_top_k=10, - save_last=True, - ) - lr_monitor_callback = LearningRateMonitor(logging_interval="epoch") - - trainer = pl.Trainer( - callbacks=[model_checkpoint_callback, lr_monitor_callback], - accelerator=cfg.trainer.accelerator, - devices=cfg.trainer.devices, - logger=cfg.logger.wandb, - ) - trainer.fit(model, train_dl, val_dl) - - -if __name__ == "__main__": - main() diff --git a/src/refactor/models/diffusion/__pycache__/ddpm.cpython-39.pyc b/src/refactor/models/diffusion/__pycache__/ddpm.cpython-39.pyc deleted file mode 100644 index 9eb008df..00000000 Binary files a/src/refactor/models/diffusion/__pycache__/ddpm.cpython-39.pyc and /dev/null differ diff --git a/src/refactor/models/diffusion/__pycache__/diffusion.cpython-39.pyc b/src/refactor/models/diffusion/__pycache__/diffusion.cpython-39.pyc deleted file mode 100644 index 6522be7a..00000000 Binary files a/src/refactor/models/diffusion/__pycache__/diffusion.cpython-39.pyc and /dev/null differ diff --git a/src/refactor/models/diffusion/ddpm.py b/src/refactor/models/diffusion/ddpm.py deleted file mode 100644 index 7fa63162..00000000 --- a/src/refactor/models/diffusion/ddpm.py +++ /dev/null @@ -1,277 +0,0 @@ -from functools import partial - -import torch -import torch.nn.functional as F -import tqdm -from models.diffusion.diffusion import DiffusionModel -from torch import nn -from utils.misc import extract, extract_data_from_batch, mean_flat -from utils.schedules import ( - alpha_cosine_log_snr, - beta_linear_log_snr, - linear_beta_schedule, -) - - -class DDPM(DiffusionModel): - def __init__( - self, - *, - image_size, - timesteps=50, - noise_schedule="cosine", - time_difference=0.0, - unet: nn.Module, - is_conditional: bool, - p_uncond: float = 0.1, - use_fp16: bool, - logdir: str, - optimizer: torch.optim.Optimizer, - lr_scheduler: torch.optim.lr_scheduler._LRScheduler, - criterion: nn.Module, - use_ema: bool = True, - ema_decay: float = 0.9999, - lr_warmup=0, - use_p2_weigthing: bool = False, - p2_gamma: float = 0.5, - p2_k: float = 1, - ): - super().__init__( - unet, - is_conditional, - use_fp16, - logdir, - optimizer, - lr_scheduler, - criterion, - use_ema, - ema_decay, - lr_warmup, - ) - print("saludos del matei") - print("\n") - self.image_size = image_size - - if noise_schedule == "linear": - self.log_snr = beta_linear_log_snr - elif noise_schedule == "cosine": - self.log_snr = alpha_cosine_log_snr - else: - raise ValueError(f"invalid noise schedule {noise_schedule}") - - self.timesteps = timesteps - self.p_uncond = p_uncond - - # self.betas = cosine_beta_schedule(timesteps=timesteps, s=0.0001) - self.set_noise_schedule(self.betas, self.timesteps) - - # proposed in the paper, summed to time_next - # as a way to fix a deficiency in self-conditioning and lower FID when the number of sampling timesteps is < 400 - - self.time_difference = time_difference - - def set_noise_schedule(self, betas, timesteps): - # define beta schedule - self.betas = linear_beta_schedule(timesteps=timesteps, beta_end=0.05) - - # define alphas - alphas = 1.0 - self.betas - alphas_cumprod = torch.cumprod(alphas, axis=0) - alphas_cumprod_prev = F.pad(alphas_cumprod[:-1], (1, 0), value=1.0) - - self.sqrt_recip_alphas = torch.sqrt(1.0 / alphas) - - # calculations for diffusion q(x_t | x_{t-1}) and others - self.sqrt_alphas_cumprod = torch.sqrt(alphas_cumprod) - - # sqrt_one_minus_alphas_cumprod = torch.sqrt(1. - alphas_cumprod) - self.sqrt_one_minus_alphas_cumprod = torch.sqrt(1.0 - alphas_cumprod) - - # calculations for posterior q(x_{t-1} | x_t, x_0) - self.posterior_variance = betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod) - - def q_sample(self, x_start, t, noise=None): - """ - Forward pass with noise. - """ - if noise is None: - noise = torch.randn_like(x_start) - - sqrt_alphas_cumprod_t = extract(self.sqrt_alphas_cumprod, t, x_start.shape) - sqrt_one_minus_alphas_cumprod_t = extract(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) - - return sqrt_alphas_cumprod_t * x_start + sqrt_one_minus_alphas_cumprod_t * noise - - @torch.no_grad() - def p_sample(self, x, t, t_index): - betas_t = extract(self.betas, t, x.shape) - sqrt_one_minus_alphas_cumprod_t = extract(self.sqrt_one_minus_alphas_cumprod, t, x.shape) - # print (x.shape, 'x_shape') - sqrt_recip_alphas_t = extract(self.sqrt_recip_alphas, t, x.shape) - - # Equation 11 in the paper - # Use our model (noise predictor) to predict the mean - model_mean = sqrt_recip_alphas_t * (x - betas_t * self.model(x, time=t) / sqrt_one_minus_alphas_cumprod_t) - - if t_index == 0: - return model_mean - else: - posterior_variance_t = extract(self.posterior_variance, t, x.shape) - noise = torch.randn_like(x) - # Algorithm 2 line 4: - return model_mean + torch.sqrt(posterior_variance_t) * noise - - @torch.no_grad() - def p_sample_guided(self, x, classes, t, t_index, context_mask, cond_weight=0.0): - # adapted from: https://openreview.net/pdf?id=qw8AKxfYbI - # print (classes[0]) - batch_size = x.shape[0] - # double to do guidance with - t_double = t.repeat(2) - x_double = x.repeat(2, 1, 1, 1) - betas_t = extract(self.betas, t_double, x_double.shape) - sqrt_one_minus_alphas_cumprod_t = extract(self.sqrt_one_minus_alphas_cumprod, t_double, x_double.shape) - sqrt_recip_alphas_t = extract(self.sqrt_recip_alphas, t_double, x_double.shape) - - # classifier free sampling interpolates between guided and non guided using `cond_weight` - classes_masked = classes * context_mask - classes_masked = classes_masked.type(torch.long) - # print ('class masked', classes_masked) - preds = self.model(x_double, time=t_double, classes=classes_masked) - eps1 = (1 + cond_weight) * preds[:batch_size] - eps2 = cond_weight * preds[batch_size:] - x_t = eps1 - eps2 - - # Equation 11 in the paper - # Use our model (noise predictor) to predict the mean - model_mean = sqrt_recip_alphas_t[:batch_size] * ( - x - betas_t[:batch_size] * x_t / sqrt_one_minus_alphas_cumprod_t[:batch_size] - ) - - if t_index == 0: - return model_mean - else: - posterior_variance_t = extract(self.posterior_variance, t, x.shape) - noise = torch.randn_like(x) - # Algorithm 2 line 4: - return model_mean + torch.sqrt(posterior_variance_t) * noise - - # Algorithm 2 but save all images: - @torch.no_grad() - def p_sample_loop(self, classes, shape, cond_weight): - device = next(self.model.parameters()).device - - b = shape[0] - # start from pure noise (for each example in the batch) - image = torch.randn(shape, device=device) - images = [] - - if classes is not None: - n_sample = classes.shape[0] - context_mask = torch.ones_like(classes).to(device) - # make 0 index unconditional - # double the batch - classes = classes.repeat(2) - context_mask = context_mask.repeat(2) - context_mask[n_sample:] = 0.0 # makes second half of batch context free - sampling_fn = partial( - self.p_sample_guided, - classes=classes, - cond_weight=cond_weight, - context_mask=context_mask, - ) - else: - sampling_fn = partial(self.p_sample) - - for i in tqdm( - reversed(range(0, self.timesteps)), - desc="sampling loop time step", - total=self.timesteps, - ): - image = sampling_fn( - self.model, - x=image, - t=torch.full((b,), i, device=device, dtype=torch.long), - t_index=i, - ) - images.append(image.cpu().numpy()) - return images - - @torch.no_grad() - def sample(self, image_size, classes=None, batch_size=16, channels=3, cond_weight=0): - return self.p_sample_loop( - self.model, - classes=classes, - shape=(batch_size, channels, 4, image_size), - cond_weight=cond_weight, - ) - - def training_step(self, batch: torch.Tensor, batch_idx: int): - x_start, condition = extract_data_from_batch(batch) - - if noise is None: - noise = torch.randn_like(x_start) - x_noisy = self.q_sample(x_start=x_start, t=self.timesteps, noise=noise) - - # calculating generic loss function, we'll add it to the class constructor once we have the code - # we should log more metrics at train and validation e.g. l1, l2 and other suggestions - if self.use_fp16: - with torch.cuda.amp.autocast(): - if self.is_conditional: - predicted_noise = self.model(x_noisy, self.timesteps, condition) - else: - predicted_noise = self.model(x_noisy, self.timesteps) - else: - if self.is_conditional: - predicted_noise = self.model(x_noisy, self.timesteps, condition) - else: - predicted_noise = self.model(x_noisy, self.timesteps) - - loss = self.criterion(predicted_noise, noise) - self.log("train", loss, batch_size=batch.shape[0]) - - return loss - - def validation_step(self, batch: torch.Tensor, batch_idx: int): - return self.inference_step(batch, batch_idx, "validation") - - def test_step(self, batch: torch.Tensor, batch_idx: int): - return self.inference_step(batch, batch_idx, "test") - - def inference_step(self, batch: torch.Tensor, batch_idx: int, phase="validation", noise=None): - x_start, condition = extract_data_from_batch(batch) - device = x_start.device - batch_size = batch.shape[0] - - t = torch.randint(0, self.timesteps, (batch_size,), device=device).long() # sampling a t to generate t and t+1 - - if noise is None: - noise = torch.randn_like(x_start) # gauss noise - x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) # this is the auto generated noise given t and Noise - - context_mask = torch.bernoulli(torch.zeros(classes.shape[0]) + (1 - self.p_uncond)).to(device) - - # mask for unconditinal guidance - classes = classes * context_mask - classes = classes.type(torch.long) - - predictions = self.model(x_noisy, t, condition) - - loss = self.criterion(predictions, batch) - - self.log("validation_loss", loss) if phase == "validation" else self.log("test_loss", loss) - - """ - Log multiple losses at validation/test time according to internal discussions. - """ - - return predictions - - def p2_weighting(self, x_t, ts, target, prediction): - """ - From Perception Prioritized Training of Diffusion Models: https://arxiv.org/abs/2204.00227. - """ - weight = (1 / (self.p2_k + self.snr) ** self.p2_gamma, ts, x_t.shape) - loss_batch = mean_flat(weight * (target - prediction) ** 2) - loss = torch.mean(loss_batch) - return loss diff --git a/src/refactor/models/diffusion/diffusion.py b/src/refactor/models/diffusion/diffusion.py deleted file mode 100644 index 029b769b..00000000 --- a/src/refactor/models/diffusion/diffusion.py +++ /dev/null @@ -1,91 +0,0 @@ -import pytorch_lightning as pl -import torch -from hydra.utils import instantiate -from torch import nn -from utils.ema import EMA - - -class DiffusionModel(pl.LightningModule): - def __init__( - self, - unet: nn.Module, - timesteps: int, - use_fp16: bool, - image_size: int, - optimizer: torch.optim.Optimizer, - lr_scheduler: torch.optim.lr_scheduler._LRScheduler, - criterion: nn.Module, - use_ema: bool = True, - ema_decay: float = 0.9999, - lr_warmup=0, - ) -> None: - super().__init__() - self.save_hyperparameters(ignore=["criterion"]) - - # create Unet - # attempt using hydra.utils.instantiate to instantiate both unet, lr scheduler and optimizer - self.model = unet - self.optimizer = optimizer - self.lr_scheduler = lr_scheduler - self.timesteps = timesteps - # training parameters - self.use_ema = use_ema - if self.use_ema: - self.eps_model_ema = EMA(self.model, beta=ema_decay) - self.use_fp16 = use_fp16 - self.image_size = image_size - self.optimizer = optimizer - self.lr_warmup = lr_warmup - self.criterion = criterion - - def training_step(self, batch: torch.Tensor, batch_idx: int): - loss = 0 - self.log("train_loss", loss) - return loss - - def validation_step(self, batch: torch.Tensor, batch_idx: int): - preds = self.inference_step(batch) - return preds - - def test_step(self, batch: torch.Tensor, batch_idx: int): - preds = self.inference_step(batch) - return preds - - def inference_step(self, batch: torch.Tensor): - return - - def sample( - self, n_sample: int, condition=None, timesteps=None, *args, **kwargs # number of samples - ) -> torch.Tensor: - return - - def optimizer_step( - self, - epoch, - batch_idx, - optimizer, - optimizer_idx, - optimizer_closure, - on_tpu=False, - using_native_amp=False, - using_lbfgs=False, - ): - if self.trainer.global_step < self.lr_warmup: - lr_scale = min(1.0, float(self.trainer.global_step + 1) / self.lr_warmup) - for pg in optimizer.param_groups: - pg["learning_rate"] = lr_scale * self.optimizer_config.params.lr - - optimizer.step(closure=optimizer_closure) - - def on_before_zero_grad(self, *args, **kwargs) -> None: - if self.use_ema: - self.eps_model_ema.update(self.model) - - def configure_optimizers(self): - # optimizer = instantiate( - # self.optimizer) - # if self.lr_scheduler is not None: - # scheduler = instantiate( - # self.lr_scheduler, optimizer=optimizer) - # return {"optimizer": optimizer, "lr_scheduler": scheduler} - return {"optimizer": self.optimizer, "lr_scheduler": self.scheduler} diff --git a/src/refactor/models/networks/__pycache__/unet_lucas.cpython-39.pyc b/src/refactor/models/networks/__pycache__/unet_lucas.cpython-39.pyc deleted file mode 100644 index 930d1434..00000000 Binary files a/src/refactor/models/networks/__pycache__/unet_lucas.cpython-39.pyc and /dev/null differ diff --git a/src/refactor/models/networks/__pycache__/unet_lucas_cond.cpython-39.pyc b/src/refactor/models/networks/__pycache__/unet_lucas_cond.cpython-39.pyc deleted file mode 100644 index c2739ccd..00000000 Binary files a/src/refactor/models/networks/__pycache__/unet_lucas_cond.cpython-39.pyc and /dev/null differ diff --git a/src/refactor/models/networks/unet_lucas.py b/src/refactor/models/networks/unet_lucas.py deleted file mode 100644 index c8a483a2..00000000 --- a/src/refactor/models/networks/unet_lucas.py +++ /dev/null @@ -1,297 +0,0 @@ -import math -from functools import partial -from typing import Callable, List, Optional - -import torch -from einops import rearrange -from torch import einsum, nn -from utils.misc import default, exists -from utils.network import l2norm - - -class Residual(nn.Module): - def __init__(self, fn: Callable) -> None: - super().__init__() - self.fn = fn - - def forward(self, x: torch.Tensor, *args, **kwargs) -> torch.Tensor: - return self.fn(x, *args, **kwargs) + x - - -def Upsample(dim: int, dim_out: Optional[int] = None): - return nn.Sequential( - nn.Upsample(scale_factor=2, mode="nearest"), - nn.Conv2d(dim, default(dim_out, dim), 3, padding=1), - ) - - -def Downsample(dim: int, dim_out: Optional[int] = None): - return nn.Conv2d(dim, default(dim_out, dim), 4, 2, 1) - - -class LayerNorm(nn.Module): - def __init__(self, dim: int) -> None: - super().__init__() - self.g = nn.Parameter(torch.ones(1, dim, 1, 1)) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - eps = 1e-5 if x.dtype == torch.float32 else 1e-3 - var = torch.var(x, dim=1, unbiased=False, keepdim=True) - mean = torch.mean(x, dim=1, keepdim=True) - return (x - mean) * (var + eps).rsqrt() * self.g - - -class PreNorm(nn.Module): - def __init__(self, dim: int, fn) -> None: - super().__init__() - self.fn = fn - self.norm = LayerNorm(dim) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - x = self.norm(x) - return self.fn(x) - - -# positional embeds -class LearnedSinusoidalPositionalEmbedding(nn.Module): - """following @crowsonkb 's lead with learned sinusoidal pos emb""" - - """ https://github.com/crowsonkb/v-diffusion-jax/blob/master/diffusion/models/danbooru_128.py#L8 """ - - def __init__(self, dim: int) -> None: - super().__init__() - assert (dim % 2) == 0 - half_dim = dim // 2 - self.weights = nn.Parameter(torch.randn(half_dim)) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - x = rearrange(x, "b -> b 1") - freqs = x * rearrange(self.weights, "d -> 1 d") * 2 * math.pi - fouriered = torch.cat((freqs.sin(), freqs.cos()), dim=-1) - fouriered = torch.cat((x, fouriered), dim=-1) - return fouriered - - -# building block modules -class Block(nn.Module): - def __init__(self, dim: int, dim_out: int, groups: int = 8) -> None: - super().__init__() - self.proj = nn.Conv2d(dim, dim_out, 3, padding=1) - self.norm = nn.GroupNorm(groups, dim_out) - self.act = nn.SiLU() - - def forward(self, x: torch.Tensor, scale_shift=None) -> torch.Tensor: - x = self.proj(x) - x = self.norm(x) - - if exists(scale_shift): - scale, shift = scale_shift - x = x * (scale + 1) + shift - - x = self.act(x) - return x - - -class ResnetBlock(nn.Module): - def __init__(self, dim: int, dim_out: int, *, time_emb_dim: Optional[int] = None, groups: int = 8) -> None: - super().__init__() - self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None - - self.block1 = Block(dim, dim_out, groups=groups) - self.block2 = Block(dim_out, dim_out, groups=groups) - self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity() - - def forward(self, x: torch.Tensor, time_emb=None) -> torch.Tensor: - scale_shift = None - if exists(self.mlp) and exists(time_emb): - time_emb = self.mlp(time_emb) - time_emb = rearrange(time_emb, "b c -> b c 1 1") - scale_shift = time_emb.chunk(2, dim=1) - - h = self.block1(x, scale_shift=scale_shift) - - h = self.block2(h) - - return h + self.res_conv(x) - - -class LinearAttention(nn.Module): - def __init__(self, dim: int, heads: int = 4, dim_head: int = 32) -> None: - super().__init__() - self.scale = dim_head**-0.5 - self.heads = heads - hidden_dim = dim_head * heads - self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) - self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - b, c, h, w = x.shape - qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) - - q = q.softmax(dim=-2) - k = k.softmax(dim=-1) - - q = q * self.scale - v = v / (h * w) - - context = torch.einsum("b h d n, b h e n -> b h d e", k, v) - - out = torch.einsum("b h d e, b h d n -> b h e n", context, q) - out = rearrange(out, "b h c (x y) -> b (h c) x y", h=self.heads, x=h, y=w) - return self.to_out(out) - - -class Attention(nn.Module): - def __init__(self, dim: int, heads: int = 4, dim_head: int = 32, scale: int = 10) -> None: - super().__init__() - self.scale = scale - self.heads = heads - hidden_dim = dim_head * heads - self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) - self.to_out = nn.Conv2d(hidden_dim, dim, 1) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - b, c, h, w = x.shape - qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) - - q, k = map(l2norm, (q, k)) - - sim = einsum("b h d i, b h d j -> b h i j", q, k) * self.scale - attn = sim.softmax(dim=-1) - out = einsum("b h i j, b h d j -> b h i d", attn, v) - out = rearrange(out, "b h (x y) d -> b (h d) x y", x=h, y=w) - return self.to_out(out) - - -class UNetLucas(nn.Module): - def __init__( - self, - dim: int, - init_dim: int = None, - dim_mults: Optional[list] = (1, 2, 4), - channels: int = 1, - resnet_block_groups: int = 8, - learned_sinusoidal_dim: int = 18, - num_classes: int = 10, - self_conditioned: bool = False, - ) -> None: - super().__init__() - - channels = 1 - self.channels = channels - - input_channels = channels - if self_conditioned: - input_channels = channels * 2 - - init_dim = default(init_dim, dim) - self.init_conv = nn.Conv2d(input_channels, init_dim, (7, 7), padding=3) - - dims = [init_dim, *(dim * m for m in dim_mults)] - - in_out = list(zip(dims[:-1], dims[1:])) - - resnet_block = partial(ResnetBlock, groups=resnet_block_groups) - - time_dim = dim * 4 - - sinu_pos_emb = LearnedSinusoidalPositionalEmbedding(learned_sinusoidal_dim) - fourier_dim = learned_sinusoidal_dim + 1 - - self.time_mlp = nn.Sequential( - sinu_pos_emb, - nn.Linear(fourier_dim, time_dim), - nn.GELU(), - nn.Linear(time_dim, time_dim), - ) - - if num_classes is not None: - self.label_emb = nn.Embedding(num_classes, time_dim) - - self.downs = nn.ModuleList([]) - self.ups = nn.ModuleList([]) - num_resolutions = len(in_out) - - for index, (dim_in, dim_out) in enumerate(in_out): - is_last = index >= (num_resolutions - 1) - - self.downs.append( - nn.ModuleList( - [ - resnet_block(dim_in, dim_in, time_emb_dim=time_dim), - resnet_block(dim_in, dim_in, time_emb_dim=time_dim), - (PreNorm(dim_in, LinearAttention(dim_in))), - Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1), - ] - ) - ) - - mid_dim = dims[-1] - self.mid_block1 = resnet_block(mid_dim, mid_dim, time_emb_dim=time_dim) - self.mid_attn = Residual(PreNorm(mid_dim, Attention(mid_dim))) - self.mid_block2 = resnet_block(mid_dim, mid_dim, time_emb_dim=time_dim) - - for index, (dim_in, dim_out) in enumerate(reversed(in_out)): - is_last = index == (len(in_out) - 1) - - self.ups.append( - nn.ModuleList( - [ - resnet_block(dim_out + dim_in, dim_out, time_emb_dim=time_dim), - resnet_block(dim_out + dim_in, dim_out, time_emb_dim=time_dim), - Residual(PreNorm(dim_out, LinearAttention(dim_out))), - Upsample(dim_out, dim_in) if not is_last else nn.Conv2d(dim_out, dim_in, 3, padding=1), - ] - ) - ) - - self.final_res_block = resnet_block(dim * 2, dim, time_emb_dim=time_dim) - self.final_conv = nn.Conv2d(dim, 1, 1) - - def forward(self, x: torch.Tensor, time, classes, x_self_cond=None) -> torch.Tensor: - x = self.init_conv(x) - r = x.clone() - - t_start = self.time_mlp(time) - t_mid = t_start.clone() - t_end = t_start.clone() - - if classes is not None: - t_start += self.label_emb(classes) - t_mid += self.label_emb(classes) - t_end += self.label_emb(classes) - - h = [] - - for block1, block2, attn, downsample in self.downs: - x = block1(x, t_start) - h.append(x) - - x = block2(x, t_start) - x = attn(x) - h.append(x) - - x = downsample(x) - - x = self.mid_block1(x, t_mid) - x = self.mid_attn(x) - x = self.mid_block2(x, t_mid) - - for block1, block2, attn, upsample in self.ups: - x = torch.cat((x, h.pop()), dim=1) - x = block1(x, t_mid) - - x = torch.cat((x, h.pop()), dim=1) - x = block2(x, t_mid) - x = attn(x) - - x = upsample(x) - - x = torch.cat((x, r), dim=1) - - x = self.final_res_block(x, t_end) - x = self.final_conv(x) - - return x diff --git a/src/refactor/models/networks/unet_lucas_cond.py b/src/refactor/models/networks/unet_lucas_cond.py deleted file mode 100644 index fd7bc12c..00000000 --- a/src/refactor/models/networks/unet_lucas_cond.py +++ /dev/null @@ -1,353 +0,0 @@ -import math -from functools import partial -from typing import Callable, List, Optional - -import torch -from einops import rearrange -from torch import einsum, nn -from utils.misc import default, exists -from utils.network import Downsample, Upsample, l2norm - -# Building blocks of UNET - - -class Residual(nn.Module): - def __init__(self, fn: Callable) -> None: - super().__init__() - self.fn = fn - - def forward(self, x: torch.Tensor, *args, **kwargs) -> torch.Tensor: - return self.fn(x, *args, **kwargs) + x - - -def Upsample(dim: int, dim_out: Optional[int] = None): - return nn.Sequential( - nn.Upsample(scale_factor=2, mode="nearest"), - nn.Conv2d(dim, default(dim_out, dim), 3, padding=1), - ) - - -def Downsample(dim: int, dim_out: Optional[int] = None): - return nn.Conv2d(dim, default(dim_out, dim), 4, 2, 1) - - -class LayerNorm(nn.Module): - def __init__(self, dim: int) -> None: - super().__init__() - self.g = nn.Parameter(torch.ones(1, dim, 1, 1)) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - eps = 1e-5 if x.dtype == torch.float32 else 1e-3 - var = torch.var(x, dim=1, unbiased=False, keepdim=True) - mean = torch.mean(x, dim=1, keepdim=True) - return (x - mean) * (var + eps).rsqrt() * self.g - - -class PreNorm(nn.Module): - def __init__(self, dim: int, fn: Callable) -> None: - super().__init__() - self.fn = fn - self.norm = LayerNorm(dim) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - x = self.norm(x) - return self.fn(x) - - -# Building blocks of UNET, positional embeddings - - -class LearnedSinusoidalPosEmb(nn.Module): - """following @crowsonkb 's lead with learned sinusoidal pos emb""" - - """ https://github.com/crowsonkb/v-diffusion-jax/blob/master/diffusion/models/danbooru_128.py#L8 """ - - def __init__(self, dim: int) -> None: - super().__init__() - assert (dim % 2) == 0 - half_dim = dim // 2 - self.weights = nn.Parameter(torch.randn(half_dim)) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - x = rearrange(x, "b -> b 1") - freqs = x * rearrange(self.weights, "d -> 1 d") * 2 * math.pi - fouriered = torch.cat((freqs.sin(), freqs.cos()), dim=-1) - fouriered = torch.cat((x, fouriered), dim=-1) - return fouriered - - -class EmbedFC(nn.Module): - def __init__(self, input_dim: int, emb_dim: int) -> None: - super().__init__() - """ - generic one layer FC NN for embedding things - """ - self.input_dim = input_dim - layers = [nn.Linear(input_dim, emb_dim), nn.GELU(), nn.Linear(emb_dim, emb_dim)] - self.model = nn.Sequential(*layers) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - return self.model(x) - - -# Building blocks of UNET, convolution + group norm blocks - - -class Block(nn.Module): - def __init__(self, dim: int, dim_out: int, groups: int = 8) -> None: - super().__init__() - self.proj = nn.Conv2d(dim, dim_out, 3, padding=1) - self.norm = nn.GroupNorm(groups, dim_out) - self.act = nn.SiLU() - - def forward(self, x: torch.Tensor, scale_shift=None) -> torch.Tensor: - x = self.proj(x) - x = self.norm(x) - - if exists(scale_shift): - scale, shift = scale_shift - x = x * (scale + 1) + shift - - x = self.act(x) - return x - - -# Building blocks of UNET, residual blocks - - -class ResnetBlock(nn.Module): - def __init__(self, dim: int, dim_out: int, *, time_emb_dim=None, groups: int = 8) -> None: - super().__init__() - self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None - - self.block1 = Block(dim, dim_out, groups=groups) - self.block2 = Block(dim_out, dim_out, groups=groups) - self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity() - - def forward(self, x: torch.Tensor, time_emb=None) -> torch.Tensor: - scale_shift = None - if exists(self.mlp) and exists(time_emb): - time_emb = self.mlp(time_emb) - time_emb = rearrange(time_emb, "b c -> b c 1 1") - scale_shift = time_emb.chunk(2, dim=1) - - h = self.block1(x, scale_shift=scale_shift) - - h = self.block2(h) - - return h + self.res_conv(x) - - -# Additional code to the https://github.com/lucidrains/bit-diffusion/blob/main/bit_diffusion/bit_diffusion.py - - -class ResnetBlockClassConditioned(ResnetBlock): - def __init__( - self, dim: int, dim_out: int, *, num_classes: int, class_embed_dim: int, time_emb_dim=None, groups: int = 8 - ) -> None: - super().__init__( - dim=dim + class_embed_dim, - dim_out=dim_out, - time_emb_dim=time_emb_dim, - groups=groups, - ) - self.class_mlp = EmbedFC(num_classes, class_embed_dim) - - def forward(self, x: torch.Tensor, time_emb=None, c=None) -> torch.Tensor: - emb_c = self.class_mlp(c) - emb_c = emb_c.view(*emb_c.shape, 1, 1) - emb_c = emb_c.expand(-1, -1, x.shape[-2], x.shape[-1]) - x = torch.cat([x, emb_c], axis=1) - - return super().forward(x, time_emb) - - -# Building blocks of UNET, attention modules - - -class LinearAttention(nn.Module): - def __init__(self, dim: int, heads: int = 4, dim_head: int = 32) -> None: - super().__init__() - self.scale = dim_head**-0.5 - self.heads = heads - hidden_dim = dim_head * heads - self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) - self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - b, c, h, w = x.shape - qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) - - q = q.softmax(dim=-2) - k = k.softmax(dim=-1) - - q = q * self.scale - v = v / (h * w) - - context = torch.einsum("b h d n, b h e n -> b h d e", k, v) - - out = torch.einsum("b h d e, b h d n -> b h e n", context, q) - out = rearrange(out, "b h c (x y) -> b (h c) x y", h=self.heads, x=h, y=w) - return self.to_out(out) - - -class Attention(nn.Module): - def __init__(self, dim: int, heads: int = 4, dim_head: int = 32, scale: int = 10) -> None: - super().__init__() - self.scale = scale - self.heads = heads - hidden_dim = dim_head * heads - self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) - self.to_out = nn.Conv2d(hidden_dim, dim, 1) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - b, c, h, w = x.shape - qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) - - q, k = map(l2norm, (q, k)) - - sim = einsum("b h d i, b h d j -> b h i j", q, k) * self.scale - attn = sim.softmax(dim=-1) - out = einsum("b h i j, b h d j -> b h i d", attn, v) - out = rearrange(out, "b h (x y) d -> b (h d) x y", x=h, y=w) - return self.to_out(out) - - -# Core part of UNET - - -class UNet(nn.Module): - """ - Refer to the main paper for the architecture details https://arxiv.org/pdf/2208.04202.pdf - """ - - def __init__( - self, - dim: int, - init_dim: int = 200, - dim_mults: Optional[list] = [1, 2, 4], - channels=1, - resnet_block_groups: int = 8, - learned_sinusoidal_dim: int = 18, - num_classes: int = 10, - class_embed_dim: bool = 3, - ) -> None: - super().__init__() - - self.channels = channels - # if you want to do self conditioning uncomment this - # input_channels = channels * 2 - input_channels = channels - - init_dim = default(init_dim, dim) - self.init_conv = nn.Conv2d(input_channels, init_dim, (7, 7), padding=3) - dims = [init_dim, *(dim * m for m in dim_mults)] - in_out = list(zip(dims[:-1], dims[1:])) - - block_klass = partial(ResnetBlock, groups=resnet_block_groups) - - time_dim = dim * 4 - - sinu_pos_emb = LearnedSinusoidalPosEmb(learned_sinusoidal_dim) - fourier_dim = learned_sinusoidal_dim + 1 - - self.time_mlp = nn.Sequential( - sinu_pos_emb, - nn.Linear(fourier_dim, time_dim), - nn.GELU(), - nn.Linear(time_dim, time_dim), - ) - - if num_classes is not None: - self.label_emb = nn.Embedding(num_classes, time_dim) - - # layers - self.downs = nn.ModuleList([]) - self.ups = nn.ModuleList([]) - num_resolutions = len(in_out) - - for ind, (dim_in, dim_out) in enumerate(in_out): - is_last = ind >= (num_resolutions - 1) - - self.downs.append( - nn.ModuleList( - [ - block_klass(dim_in, dim_in, time_emb_dim=time_dim), - block_klass(dim_in, dim_in, time_emb_dim=time_dim), - Residual(PreNorm(dim_in, LinearAttention(dim_in))), - Downsample(dim_in, dim_out) if not is_last else nn.Conv2d(dim_in, dim_out, 3, padding=1), - ] - ) - ) - - mid_dim = dims[-1] - self.mid_block1 = block_klass(mid_dim, mid_dim, time_emb_dim=time_dim) - self.mid_attn = Residual(PreNorm(mid_dim, Attention(mid_dim))) - self.mid_block2 = block_klass(mid_dim, mid_dim, time_emb_dim=time_dim) - - for ind, (dim_in, dim_out) in enumerate(reversed(in_out)): - is_last = ind == (len(in_out) - 1) - - self.ups.append( - nn.ModuleList( - [ - block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim), - block_klass(dim_out + dim_in, dim_out, time_emb_dim=time_dim), - Residual(PreNorm(dim_out, LinearAttention(dim_out))), - Upsample(dim_out, dim_in) if not is_last else nn.Conv2d(dim_out, dim_in, 3, padding=1), - ] - ) - ) - - self.final_res_block = block_klass(dim * 2, dim, time_emb_dim=time_dim) - self.final_conv = nn.Conv2d(dim, 1, 1) - print("final", dim, channels, self.final_conv) - - # Additional code to the https://github.com/lucidrains/bit-diffusion/blob/main/bit_diffusion/bit_diffusion.py mostly in forward method. - - def forward(self, x: torch.Tensor, time, classes, x_self_cond=None) -> torch.Tensor: - x = self.init_conv(x) - r = x.clone() - - t_start = self.time_mlp(time) - t_mid = t_start.clone() - t_end = t_start.clone() - - if classes is not None: - t_start += self.label_emb(classes) - t_mid += self.label_emb(classes) - t_end += self.label_emb(classes) - - h = [] - - for block1, block2, attn, downsample in self.downs: - x = block1(x, t_start) - h.append(x) - - x = block2(x, t_start) - x = attn(x) - h.append(x) - - x = downsample(x) - - x = self.mid_block1(x, t_mid) - x = self.mid_attn(x) - x = self.mid_block2(x, t_mid) - - for block1, block2, attn, upsample in self.ups: - x = torch.cat((x, h.pop()), dim=1) - x = block1(x, t_mid) - - x = torch.cat((x, h.pop()), dim=1) - x = block2(x, t_mid) - x = attn(x) - - x = upsample(x) - - x = torch.cat((x, r), dim=1) - x = self.final_res_block(x, t_end) - - x = self.final_conv(x) - return x diff --git a/src/refactor/sample.py b/src/refactor/sample.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/refactor/tests/data/test_sequence_dataloader.py b/src/refactor/tests/data/test_sequence_dataloader.py deleted file mode 100644 index 5882970f..00000000 --- a/src/refactor/tests/data/test_sequence_dataloader.py +++ /dev/null @@ -1,510 +0,0 @@ -import os - -import pandas as pd -import torch -from torchvision import transforms - -from src.data.sequence_dataloader import SequenceDataModule - - -def prepare_default_data(path): - """Prepares dummy data for testing.""" - pd.DataFrame( - { - "raw_sequence": [ - "ATCGATCGATCG", - "GGTGAACGATTA", - "AATCGTATCGCG", - "CTTATCGATCCG", - ], - "component": [1, 2, 1, 10], - } - ).to_csv(path, index=False, sep="\t") - - -def prepare_high_diversity_datasets(train_data_path, val_data_path, test_data_path): - pd.DataFrame( - { - "raw_sequence": ["AAAAAAAAAA", "AAAAAAAAAA", "AAAAAAAAAA", "AAAAAAAAAA"], - "component": [0, 0, 0, 0], - } - ).to_csv(train_data_path, index=False, sep="\t") - pd.DataFrame( - { - "raw_sequence": ["CCCCCCCCCC", "CCCCCCCCCC", "CCCCCCCCCC", "CCCCCCCCCC"], - "component": [1, 1, 1, 1], - } - ).to_csv(val_data_path, index=False, sep="\t") - pd.DataFrame( - { - "raw_sequence": ["TTTTTTTTTT", "TTTTTTTTTT", "TTTTTTTTTT", "TTTTTTTTTT"], - "component": [2, 2, 2, 2], - } - ).to_csv(test_data_path, index=False, sep="\t") - - -def test_invalid_sequence_letters(): - # prepare invalid data - dummy_data_path = "_tmp_seq_dataloader_data.csv" - pd.DataFrame( - { - "raw_sequence": [ - "ZCCCACTGACTG", - "ACTGACTGACTG", - "AAAACCCCTTTT", - "ABCDEFGHIJKL", - ], - "component": [1, 2, 1, 10], - } - ).to_csv(dummy_data_path, index=False, sep="\t") - - datamodule = SequenceDataModule( - train_path=dummy_data_path, - val_path=dummy_data_path, - test_path=dummy_data_path, - sequence_length=12, - sequence_encoding="polar", - batch_size=2, - num_workers=1, - ) - # check that invalid data is detected - try: - datamodule.setup() - assert False, "Invalid sequence letters should have been detected." - except ValueError: - pass - - # remove dummy data - os.remove(dummy_data_path) - - -def test_invalid_sequence_lengths(): - # prepare dummy data - dummy_data_path = "_tmp_seq_dataloader_data.csv" - # second sequence too short - pd.DataFrame( - { - "raw_sequence": ["ATCG", "GGT", "AATC", "CTTA"], - "component": [1, 2, 1, 10], - } - ).to_csv(dummy_data_path, index=False, sep="\t") - - # prepare data module - datamodule = SequenceDataModule( - train_path=dummy_data_path, - val_path=None, - test_path=None, - sequence_length=4, - sequence_encoding="polar", - batch_size=2, - num_workers=1, - ) - try: - datamodule.setup() - assert False, "Invalid sequence length should have been detected." - except ValueError: - pass - - # remove dummy data - os.remove(dummy_data_path) - - # fourth sequence too long - pd.DataFrame( - { - "raw_sequence": ["ATCG", "GGT", "AATC", "CTTAT"], - "component": [1, 2, 1, 10], - } - ).to_csv(dummy_data_path, index=False, sep="\t") - - # prepare data module - datamodule = SequenceDataModule( - train_path=dummy_data_path, - val_path=None, - test_path=None, - sequence_length=4, - sequence_encoding="polar", - batch_size=2, - num_workers=1, - ) - try: - datamodule.setup() - assert False, "Invalid sequence length should have been detected." - except ValueError: - pass - - # remove dummy data - os.remove(dummy_data_path) - - -def test_train_val_test_data_split(): - # prepare dummy data - dummy_train_data_path = "_tmp_seq_dataloader_train_data.csv" - dummy_val_data_path = "_tmp_seq_dataloader_val_data.csv" - dummy_test_data_path = "_tmp_seq_dataloader_test_data.csv" - prepare_high_diversity_datasets(dummy_train_data_path, dummy_val_data_path, dummy_test_data_path) - - # check loading of only a single data set - datamodule = SequenceDataModule( - train_path=None, - val_path=dummy_val_data_path, - test_path=None, - sequence_length=10, - ) - datamodule.setup() - assert datamodule.train_dataloader is None - assert len(datamodule.val_data) == 4 - assert datamodule.test_dataloader is None - - # check differences between train, val, and test data - datamodule = SequenceDataModule( - train_path=dummy_train_data_path, - val_path=dummy_val_data_path, - test_path=dummy_test_data_path, - sequence_length=10, - sequence_encoding="polar", - batch_size=3, - num_workers=1, - ) - datamodule.setup() - - assert len(datamodule.train_dataloader()) == 2 - assert len(datamodule.val_dataloader()) == 2 - assert len(datamodule.test_dataloader()) == 2 - seen_nucleotide_idxs = set() - for dl_idx, dataloader in enumerate( - [ - datamodule.train_dataloader(), - datamodule.val_dataloader(), - datamodule.test_dataloader(), - ] - ): - dataloader_iter = iter(dataloader) - - # first batch - batch = next(dataloader_iter) - assert len(batch) == 2 - assert isinstance(batch[0], torch.Tensor) - assert isinstance(batch[1], torch.Tensor) - assert batch[0].shape == (3, 4, 10) - assert batch[1].shape == (3,) - uniq_nucleotides = batch[0].max(dim=1).indices.unique().tolist() - seen_nucleotide_idxs.update(uniq_nucleotides) - assert len(uniq_nucleotides) == 1 - assert (batch[1] == dl_idx).all() - - # second batch - batch = next(dataloader_iter) - assert len(batch) == 2 - assert isinstance(batch[0], torch.Tensor) - assert isinstance(batch[1], torch.Tensor) - assert batch[0].shape == (1, 4, 10) - assert batch[1].shape == (1,) - assert batch[0].max(dim=1).indices.unique().tolist() == uniq_nucleotides - assert (batch[1] == dl_idx).all() - - # remove dummy data - for path in [dummy_train_data_path, dummy_val_data_path, dummy_test_data_path]: - os.remove(path) - - -def test_polar_encoding(): - # prepare dummy data - dummy_data_path = "_tmp_seq_dataloader_data.csv" - prepare_default_data(dummy_data_path) - - # prepare data module - datamodule = SequenceDataModule( - train_path=dummy_data_path, - val_path=dummy_data_path, - test_path=dummy_data_path, - sequence_length=12, - sequence_encoding="polar", - batch_size=2, - num_workers=1, - ) - datamodule.setup() - - # data checks - assert len(datamodule.train_data) == 4 - assert len(datamodule.val_data) == 4 - assert len(datamodule.test_data) == 4 - for dataloader in [ - datamodule.train_dataloader(), - datamodule.val_dataloader(), - datamodule.test_dataloader(), - ]: - for batch in dataloader: - assert len(batch) == 2 - assert isinstance(batch[0], torch.Tensor) - assert isinstance(batch[1], torch.Tensor) - assert batch[0].shape == (2, 4, 12) - assert batch[1].shape == (2,) - assert (batch[0].max(dim=1).values == 1).all() - assert (batch[0].min(dim=1).values == -1).all() - assert (batch[0].prod(dim=1) == -1).all() - - # remove dummy data - os.remove(dummy_data_path) - - -def test_onehot_encoding(): - # prepare dummy data - dummy_data_path = "_tmp_seq_dataloader_data.csv" - prepare_default_data(dummy_data_path) - - # prepare data module - datamodule = SequenceDataModule( - train_path=dummy_data_path, - val_path=dummy_data_path, - test_path=dummy_data_path, - sequence_length=12, - sequence_encoding="onehot", - batch_size=2, - num_workers=1, - ) - datamodule.setup() - - # data checks - assert len(datamodule.train_data) == 4 - assert len(datamodule.val_data) == 4 - assert len(datamodule.test_data) == 4 - for dataloader in [ - datamodule.train_dataloader(), - datamodule.val_dataloader(), - datamodule.test_dataloader(), - ]: - for batch in dataloader: - assert len(batch) == 2 - assert isinstance(batch[0], torch.Tensor) - assert isinstance(batch[1], torch.Tensor) - assert batch[0].shape == (2, 4, 12) - assert batch[1].shape == (2,) - assert (batch[0].max(dim=1).values == 1).all() - assert (batch[0].min(dim=1).values == 0).all() - assert (batch[0].sum(dim=1) == 1).all() - - # remove dummy data - os.remove(dummy_data_path) - - -def test_ordinal_encoding(): - # prepare dummy data - dummy_data_path = "_tmp_seq_dataloader_data.csv" - prepare_default_data(dummy_data_path) - - # prepare data module - datamodule = SequenceDataModule( - train_path=dummy_data_path, - val_path=dummy_data_path, - test_path=dummy_data_path, - sequence_length=12, - sequence_encoding="ordinal", - batch_size=2, - num_workers=1, - ) - datamodule.setup() - - # data checks - assert len(datamodule.train_data) == 4 - assert len(datamodule.val_data) == 4 - assert len(datamodule.test_data) == 4 - for dataloader in [ - datamodule.train_dataloader(), - datamodule.val_dataloader(), - datamodule.test_dataloader(), - ]: - for batch in dataloader: - assert len(batch) == 2 - assert isinstance(batch[0], torch.Tensor) - assert isinstance(batch[1], torch.Tensor) - assert batch[0].shape == (2, 12) - assert batch[1].shape == (2,) - assert (batch[0].max(dim=1).values == 3).all() - assert (batch[0].min(dim=1).values == 0).all() - assert set(batch[0].tolist()[0]) == {0, 1, 2, 3} - - # remove dummy data - os.remove(dummy_data_path) - - -def test_polar_transforms(): - # prepare dummy data - dummy_data_path = "_tmp_seq_dataloader_data.csv" - prepare_default_data(dummy_data_path) - - # prepare data module - def seg_transform(seq): - return seq + 1 - - def cell_type_transform(cell_type): - return cell_type + 20 - - datamodule = SequenceDataModule( - train_path=dummy_data_path, - val_path=dummy_data_path, - test_path=dummy_data_path, - sequence_length=12, - sequence_encoding="polar", - sequence_transform=transforms.Compose( - [ - transforms.Lambda(seg_transform), - ] - ), - cell_type_transform=transforms.Compose( - [ - transforms.Lambda(cell_type_transform), - ] - ), - batch_size=2, - num_workers=0, - ) - datamodule.setup() - - # data checks - assert len(datamodule.train_data) == 4 - assert len(datamodule.val_data) == 4 - assert len(datamodule.test_data) == 4 - for dataloader in [ - datamodule.train_dataloader, - datamodule.val_dataloader, - datamodule.test_dataloader, - ]: - seen_cell_type_ids = set() - for batch in dataloader(): - assert len(batch) == 2 - assert isinstance(batch[0], torch.Tensor) - assert isinstance(batch[1], torch.Tensor) - assert batch[0].shape == (2, 4, 12) - assert batch[1].shape == (2,) - assert (batch[0].max(dim=1).values == 2).all() - assert (batch[0].min(dim=1).values == 0).all() - assert (batch[0].sum(dim=1) == 2).all() - cell_type_ids = set(batch[1].tolist()) - assert cell_type_ids.difference([21, 22, 30]) == set() - seen_cell_type_ids.update(cell_type_ids) - assert seen_cell_type_ids == {21, 22, 30} - - # remove dummy data - os.remove(dummy_data_path) - - -def test_onehot_transforms(): - # prepare dummy data - dummy_data_path = "_tmp_seq_dataloader_data.csv" - prepare_default_data(dummy_data_path) - - # prepare data module - def seg_transform(seq): - return seq + 1 - - def cell_type_transform(cell_type): - return cell_type + 20 - - datamodule = SequenceDataModule( - train_path=dummy_data_path, - val_path=dummy_data_path, - test_path=dummy_data_path, - sequence_length=12, - sequence_encoding="onehot", - sequence_transform=transforms.Compose( - [ - transforms.Lambda(seg_transform), - ] - ), - cell_type_transform=transforms.Compose( - [ - transforms.Lambda(cell_type_transform), - ] - ), - batch_size=2, - num_workers=0, - ) - datamodule.setup() - - # data checks - assert len(datamodule.train_data) == 4 - assert len(datamodule.val_data) == 4 - assert len(datamodule.test_data) == 4 - for dataloader in [ - datamodule.train_dataloader, - datamodule.val_dataloader, - datamodule.test_dataloader, - ]: - seen_cell_type_ids = set() - for batch in dataloader(): - assert len(batch) == 2 - assert isinstance(batch[0], torch.Tensor) - assert isinstance(batch[1], torch.Tensor) - assert batch[0].shape == (2, 4, 12) - assert batch[1].shape == (2,) - assert (batch[0].max(dim=1).values == 2).all() - assert (batch[0].min(dim=1).values == 1).all() - assert (batch[0].sum(dim=1) == 5).all() - cell_type_ids = set(batch[1].tolist()) - assert cell_type_ids.difference([21, 22, 30]) == set() - seen_cell_type_ids.update(cell_type_ids) - assert seen_cell_type_ids == {21, 22, 30} - - # remove dummy data - os.remove(dummy_data_path) - - -def test_ordinal_transforms(): - # prepare dummy data - dummy_data_path = "_tmp_seq_dataloader_data.csv" - prepare_default_data(dummy_data_path) - - # prepare data module - def seg_transform(seq): - return seq + 1 - - def cell_type_transform(cell_type): - return cell_type + 20 - - datamodule = SequenceDataModule( - train_path=dummy_data_path, - val_path=dummy_data_path, - test_path=dummy_data_path, - sequence_length=12, - sequence_encoding="ordinal", - sequence_transform=transforms.Compose( - [ - transforms.Lambda(seg_transform), - ] - ), - cell_type_transform=transforms.Compose( - [ - transforms.Lambda(cell_type_transform), - ] - ), - batch_size=2, - num_workers=0, - ) - datamodule.setup() - - # data checks - assert len(datamodule.train_data) == 4 - assert len(datamodule.val_data) == 4 - assert len(datamodule.test_data) == 4 - for dataloader in [ - datamodule.train_dataloader, - datamodule.val_dataloader, - datamodule.test_dataloader, - ]: - seen_cell_type_ids = set() - for batch in dataloader(): - assert len(batch) == 2 - assert isinstance(batch[0], torch.Tensor) - assert isinstance(batch[1], torch.Tensor) - assert batch[0].shape == (2, 12) - assert batch[1].shape == (2,) - assert (batch[0].max(dim=1).values == 4).all() - assert (batch[0].min(dim=1).values == 1).all() - assert set(batch[0].tolist()[0]) == {1, 2, 3, 4} - cell_type_ids = set(batch[1].tolist()) - assert cell_type_ids.difference([21, 22, 30]) == set() - seen_cell_type_ids.update(cell_type_ids) - assert seen_cell_type_ids == {21, 22, 30} - - # remove dummy data - os.remove(dummy_data_path) diff --git a/src/refactor/tests/models/diffusion/test_ddim.py b/src/refactor/tests/models/diffusion/test_ddim.py deleted file mode 100644 index 20665c0a..00000000 --- a/src/refactor/tests/models/diffusion/test_ddim.py +++ /dev/null @@ -1 +0,0 @@ -import path diff --git a/src/refactor/tests/models/diffusion/test_ddpm.py b/src/refactor/tests/models/diffusion/test_ddpm.py deleted file mode 100644 index 20665c0a..00000000 --- a/src/refactor/tests/models/diffusion/test_ddpm.py +++ /dev/null @@ -1 +0,0 @@ -import path diff --git a/src/refactor/tests/models/encoders/test_vqvae.py b/src/refactor/tests/models/encoders/test_vqvae.py deleted file mode 100644 index 20665c0a..00000000 --- a/src/refactor/tests/models/encoders/test_vqvae.py +++ /dev/null @@ -1 +0,0 @@ -import path diff --git a/src/refactor/tests/models/networks/test_unet_bitdiffusion.py b/src/refactor/tests/models/networks/test_unet_bitdiffusion.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/refactor/tests/models/networks/test_unet_lucas.py b/src/refactor/tests/models/networks/test_unet_lucas.py deleted file mode 100644 index 115875a0..00000000 --- a/src/refactor/tests/models/networks/test_unet_lucas.py +++ /dev/null @@ -1,165 +0,0 @@ -import numpy as np -import pytest -import torch - -import src.models.networks.unet_lucas as unet - - -def test_layernorm_zero_dim(): - dim = 0 - with pytest.raises(ValueError): - unet.LayerNorm(dim) - - -def test_layernorm_forward_negative_values_float64(): - test_layer = unet.LayerNorm(2) - all_negs = torch.tensor([[-2, -8, -1], [-1, -6, -3], [-3, -9, -7]], dtype=torch.float64) - # row_wise_mu = np.array([-11/3,-10/3,-19/3]) - # row_wise_var = np.array([9.555555555555557,4.222222222222222, 6.222222222222222]) - result = torch.tensor( - [ - [ - [ - [0.5391356561685612, -1.4017527060382593, 0.8626170498696979], - [1.1354154987771312, -1.2976177128881499, 0.16220221411101882], - [1.3361988407547418, -1.0689590726037936, -0.26723976815094846], - ], - [ - [0.5391356561685612, -1.4017527060382593, 0.8626170498696979], - [1.1354154987771312, -1.2976177128881499, 0.16220221411101882], - [1.3361988407547418, -1.0689590726037936, -0.26723976815094846], - ], - ] - ], - dtype=torch.float64, - ) - assert torch.eq(test_layer.forward(all_negs), result).all() - - -def test_layernorm_forward_negative_values_float32(): - test_layer = unet.LayerNorm(2) - all_negs = torch.tensor([[-2, -8, -1], [-1, -6, -3], [-3, -9, -7]], dtype=torch.float32) - # row_wise_mu = np.array([-11/3,-10/3,-19/3]) - # row_wise_var = np.array([9.555555555555557,4.222222222222222, 6.222222222222222]) - result = torch.tensor( - [ - [ - [ - [0.53916365, -1.4018253, 0.8626618], - [1.1355485, -1.2977698, 0.16222118], - [1.3363053, -1.069044, -0.26726097], - ], - [ - [0.53916365, -1.4018253, 0.8626618], - [1.1355485, -1.2977698, 0.16222118], - [1.3363053, -1.069044, -0.26726097], - ], - ] - ], - dtype=torch.float32, - ) - assert torch.eq(test_layer.forward(all_negs), result).all() - - -def test_layernorm_forward_positive_values_float64(): - test_layer = unet.LayerNorm(2) - all_pos = torch.tensor([[2, 8, 1], [1, 6, 3], [3, 9, 7]], dtype=torch.float64) - # row_wise_mu = np.array([-11/3,-10/3,-19/3]) - # row_wise_var = np.array([9.555555555555557,4.222222222222222, 6.222222222222222]) - result = torch.tensor( - [ - [ - [ - [-0.5391356561685612, 1.4017527060382593, -0.8626170498696979], - [-1.1354154987771312, 1.2976177128881499, -0.16220221411101882], - [-1.3361988407547418, 1.0689590726037936, 0.26723976815094846], - ], - [ - [-0.5391356561685612, 1.4017527060382593, -0.8626170498696979], - [-1.1354154987771312, 1.2976177128881499, -0.16220221411101882], - [-1.3361988407547418, 1.0689590726037936, 0.26723976815094846], - ], - ] - ], - dtype=torch.float64, - ) - assert torch.eq(test_layer.forward(all_pos), result).all() - - -def test_layernorm_forward_positive_values_float32(): - test_layer = unet.LayerNorm(2) - all_pos = torch.tensor([[2, 8, 1], [1, 6, 3], [3, 9, 7]], dtype=torch.float32) - # row_wise_mu = np.array([11/3,10/3,19/3]) - # row_wise_var = np.array([9.555555555555557,4.222222222222222, 6.222222222222222]) - result = torch.tensor( - [ - [ - [ - [-0.53916365, 1.4018253, -0.8626618], - [-1.1355485, 1.2977698, -0.16222118], - [-1.3363053, 1.069044, 0.26726097], - ], - [ - [-0.53916365, 1.4018253, -0.8626618], - [-1.1355485, 1.2977698, -0.16222118], - [-1.3363053, 1.069044, 0.26726097], - ], - ] - ], - dtype=torch.float32, - ) - assert torch.eq(test_layer.forward(all_pos), result).all() - - -def test_layernorm_forward_float64(): - test_layer = unet.LayerNorm(2) - mixed_values = torch.tensor([[-2, 8, -1], [1, -6, 3], [-3, 9, -7]], dtype=torch.float64) - # row_wise_mu = np.array([5/3,-2/3,-1/3]) - # row_wise_var = np.array([20.222222222222225,14.888888888888891, 46.22222222222222]) - result = torch.tensor( - [ - [ - [ - [-0.8153540887225921, 1.4083388805208406, -0.5929847917982488], - [0.43191970826789955, -1.3821430664572785, 0.950223358189379], - [-0.39222802744805746, 1.3727980960682014, -0.9805700686201437], - ], - [ - [-0.8153540887225921, 1.4083388805208406, -0.5929847917982488], - [0.43191970826789955, -1.3821430664572785, 0.950223358189379], - [-0.39222802744805746, 1.3727980960682014, -0.9805700686201437], - ], - ] - ], - dtype=torch.float64, - ) - assert torch.eq(test_layer.forward(mixed_values), result).all() - - -def test_layernorm_forward__float32(): - test_layer = unet.LayerNorm(2) - mixed_values = torch.tensor([[-2, 8, -1], [1, -6, 3], [-3, 9, -7]], dtype=torch.float32) - # row_wise_mu = np.array([5/3,-2/3,-1/3]) - # row_wise_var = np.array([20.222222222222225,14.888888888888891, 46.22222222222222]) - result = torch.tensor( - [ - [ - [ - [-0.8153741, 1.4083735, -0.5929993], - [0.43193412, -1.3821892, 0.95025504], - [-0.39223224, 1.3728127, -0.9805805], - ], - [ - [-0.8153741, 1.4083735, -0.5929993], - [0.43193412, -1.3821892, 0.95025504], - [-0.39223224, 1.3728127, -0.9805805], - ], - ] - ], - dtype=torch.float32, - ) - assert torch.eq(test_layer.forward(mixed_values), result).all() - - -def test_layernorm_dimensions(): - return diff --git a/src/refactor/tests/utils/test_ema.py b/src/refactor/tests/utils/test_ema.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/refactor/tests/utils/test_misc.py b/src/refactor/tests/utils/test_misc.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/refactor/tests/utils/test_network.py b/src/refactor/tests/utils/test_network.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/refactor/tests/utils/test_schedules.py b/src/refactor/tests/utils/test_schedules.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/refactor/utils/__pycache__/ema.cpython-39.pyc b/src/refactor/utils/__pycache__/ema.cpython-39.pyc deleted file mode 100644 index 1091cb03..00000000 Binary files a/src/refactor/utils/__pycache__/ema.cpython-39.pyc and /dev/null differ diff --git a/src/refactor/utils/__pycache__/misc.cpython-39.pyc b/src/refactor/utils/__pycache__/misc.cpython-39.pyc deleted file mode 100644 index b1fcac5f..00000000 Binary files a/src/refactor/utils/__pycache__/misc.cpython-39.pyc and /dev/null differ diff --git a/src/refactor/utils/__pycache__/network.cpython-39.pyc b/src/refactor/utils/__pycache__/network.cpython-39.pyc deleted file mode 100644 index 7e1ed11a..00000000 Binary files a/src/refactor/utils/__pycache__/network.cpython-39.pyc and /dev/null differ diff --git a/src/refactor/utils/__pycache__/schedules.cpython-39.pyc b/src/refactor/utils/__pycache__/schedules.cpython-39.pyc deleted file mode 100644 index 115b3151..00000000 Binary files a/src/refactor/utils/__pycache__/schedules.cpython-39.pyc and /dev/null differ diff --git a/src/refactor/utils/data.py b/src/refactor/utils/data.py deleted file mode 100644 index d1a8c17c..00000000 --- a/src/refactor/utils/data.py +++ /dev/null @@ -1,124 +0,0 @@ -import pandas as pd -from typing import Any, Dict, List -import os - - -def subset_by_experiment(df: pd.DataFrame, subset_components: List[str]) -> pd.DataFrame: - df_generate = df - if subset_components is not None: - query = " or ".join([f'TAG == "{c}" ' for c in subset_components]) - df_generate = df_generate.query(query).copy() - print("Subsetting...") - - return df_generate - - -def read_master_dataset(input_csv: str, limit_total_sequences=0, change_comp_index=False) -> pd.DataFrame: - df = pd.read_csv(input_csv, sep="\t") - if change_comp_index: - df["component"] = df["component"] + 1 - - if limit_total_sequences > 0: - print(f"Limiting total sequences {limit_total_sequences}") - df = df.sample(limit_total_sequences) - - return df - - -def motifs_from_fasta(fasta: str): - print("Computing Motifs....") - os.system(f"gimme scan {fasta} -p JASPAR2020_vertebrates -g hg38 > train_results_motifs.bed") - - df_results_seq_guime = pd.read_csv("train_results_motifs.bed", sep="\t", skiprows=5, header=None) - df_results_seq_guime["motifs"] = df_results_seq_guime[8].apply(lambda x: x.split('motif_name "')[1].split('"')[0]) - - df_results_seq_guime[0] = df_results_seq_guime[0].apply(lambda x: "_".join(x.split("_")[:-1])) - df_results_seq_guime_count_out = df_results_seq_guime[[0, "motifs"]].drop_duplicates().groupby("motifs").count() - return df_results_seq_guime_count_out - - -def get_motif( - df_train: pd.DataFrame, - df_shuffled: pd.DataFrame, - df_test: pd.DataFrame, - subset_components: List[str], - number_of_sequences_to_motif_creation: int, -) -> None: - train = generate_motifs_and_fastas( - df_train, - "train", - subset_components=subset_components, - number_of_sequences_to_motif_creation=number_of_sequences_to_motif_creation, - ) - test = generate_motifs_and_fastas( - df_test, - "test", - subset_components=subset_components, - number_of_sequences_to_motif_creation=number_of_sequences_to_motif_creation, - ) - train_shuffle = generate_motifs_and_fastas( - df_shuffled, - "val", - subset_components=subset_components, - number_of_sequences_to_motif_creation=number_of_sequences_to_motif_creation, - ) - return train, test, train_shuffle - - -def generate_motifs_and_fastas( - df: pd.DataFrame, name: str, subset_components: List[str], number_of_sequences_to_motif_creation -) -> Dict[str, Any]: - """return fasta anem , and dict with components motifs""" - print("Generating Fasta and Motis:", name) - print("---" * 10) - name_fasta = f"{name}_{'_'.join([str(c) for c in subset_components])}" - fasta_saved = save_fasta(df, name_fasta, number_of_sequences_to_motif_creation) - print("FASTA SAVED", fasta_saved) - print("Generating Motifs (all seqs)") - motif_all_components = motifs_from_fasta(fasta_saved) - print("Generating Motifs per component") - train_comp_motifs_dict = generate_motifs_components(df, number_of_sequences_to_motif_creation) - - return { - "fasta_name": fasta_saved, - "motifs": motif_all_components, - "motifs_per_components_dict": train_comp_motifs_dict, - "dataset": df, - } - - -def save_fasta( - df: pd.DataFrame, name_fasta: str, to_seq_groups_comparison: bool = False, number_of_sequences_to_motif_creation=1 -) -> str: - fasta_final_name = name_fasta + ".fasta" - save_fasta_file = open(fasta_final_name, "w") - number_to_sample = df.shape[0] - - if to_seq_groups_comparison and number_of_sequences_to_motif_creation: - number_to_sample = number_of_sequences_to_motif_creation - - print(number_to_sample, "#seq used") - write_fasta_component = "\n".join( - df[["dhs_id", "sequence", "TAG"]] - .head(number_to_sample) - .apply(lambda x: f">{x[0]}_TAG_{x[2]}\n{x[1]}", axis=1) - .values.tolist() - ) - save_fasta_file.write(write_fasta_component) - save_fasta_file.close() - return fasta_final_name - - -def generate_motifs_components(df: pd.DataFrame, number_of_sequences_to_motif_creation) -> dict: - final_comp_values = {} - for comp, v_comp in df.groupby("TAG"): - print(comp) - print("number of sequences used to generate the motifs") - name_c_fasta = save_fasta( - v_comp, - "temp_component", - to_seq_groups_comparison=True, - number_of_sequences_to_motif_creation=number_of_sequences_to_motif_creation, - ) - final_comp_values[comp] = motifs_from_fasta(name_c_fasta) - return final_comp_values diff --git a/src/refactor/utils/ema.py b/src/refactor/utils/ema.py deleted file mode 100644 index 5fc4e158..00000000 --- a/src/refactor/utils/ema.py +++ /dev/null @@ -1,32 +0,0 @@ -from copy import deepcopy - -from torch import nn - - -class EMA: - def __init__(self, model: nn.Module, beta: float): - super().__init__() - self.beta = beta - self.step = 0 - self.ema_model = deepcopy(model).eval().requires_grad_(False) - - def update_model_average(self, current_model): - for current_params, ema_params in zip(current_model.parameters(), self.ema_model.parameters()): - old_weight, up_weight = ema_params.data, current_params.data - ma_params.data = self.update_average(old_weight, up_weight) - - def update_average(self, old, new): - if old is None: - return new - return old * self.beta + (1 - self.beta) * new - - def step_ema(self, model, step_start_ema=2000): - if self.step < step_start_ema: - self.reset_parameters(self.ema_model, model) - self.step += 1 - return - self.update_model_average(self.ema_model, model) - self.step += 1 - - def reset_parameters(self, model): - self.ema_model.load_state_dict(model.state_dict()) diff --git a/src/refactor/utils/metrics.py b/src/refactor/utils/metrics.py deleted file mode 100644 index 6d766e4f..00000000 --- a/src/refactor/utils/metrics.py +++ /dev/null @@ -1,183 +0,0 @@ -import os -from typing import Callable, Dict - -import matplotlib.pyplot as plt -import numpy as np -import pandas as pd -import seaborn as sns -import torch -from scipy.special import rel_entr -from tqdm.auto import tqdm - - -def motif_scoring_KL_divergence(original: pd.Series, generated: pd.Series) -> torch.Tensor: - """ - This function encapsulates the logic of evaluating the KL divergence metric - between two sequences. - Returns - ------- - kl_divergence: Float - The KL divergence between the input and output (generated) - sequences' distribution - """ - - kl_pq = rel_entr(original, generated) - return np.sum(kl_pq) - - -def compare_motif_list( - df_motifs_a: pd.DataFrame, - df_motifs_b: pd.DataFrame, - motif_scoring_metric: Callable = motif_scoring_KL_divergence, - plot_motif_probs: bool = False, -) -> torch.Tensor: - """ - This function encapsulates the logic of evaluating the difference between the distribution - of frequencies between generated (diffusion/df_motifs_a) and the input (training/df_motifs_b) for an arbitrary metric ("motif_scoring_metric") - - Please note that some metrics, like KL_divergence, are not metrics in official sense. Reason - for that is that they dont satisfy certain properties, such as in KL case, the simmetry property. - Hence it makes a big difference what are the positions of input. - """ - set_all_mot = set(df_motifs_a.index.values.tolist() + df_motifs_b.index.values.tolist()) - create_new_matrix = [] - for x in set_all_mot: - list_in = [] - list_in.append(x) # adding the name - if x in df_motifs_a.index: - list_in.append(df_motifs_a.loc[x][0]) - else: - list_in.append(1) - - if x in df_motifs_b.index: - list_in.append(df_motifs_b.loc[x][0]) - else: - list_in.append(1) - - create_new_matrix.append(list_in) - - df_motifs = pd.DataFrame(create_new_matrix, columns=["motif", "motif_a", "motif_b"]) - - df_motifs["Diffusion_seqs"] = df_motifs["motif_a"] / df_motifs["motif_a"].sum() - df_motifs["Training_seqs"] = df_motifs["motif_b"] / df_motifs["motif_b"].sum() - if plot_motif_probs: - plt.rcParams["figure.figsize"] = (3, 3) - sns.regplot(x="Diffusion_seqs", y="Training_seqs", data=df_motifs) - plt.xlabel("Diffusion Seqs") - plt.ylabel("Training Seqs") - plt.title("Motifs Probs") - plt.show() - - return motif_scoring_metric(df_motifs["Diffusion_seqs"].values, df_motifs["Training_seqs"].values) - - -def sampling_to_metric( - model, - cell_types, - image_size, - nucleotides, - number_of_samples=20, - specific_group=False, - group_number=None, - cond_weight_to_metric=0, -): - """ - Might need to add to the DDPM class since if we can't call the sample() method outside PyTorch Lightning. - - This function encapsulates the logic of sampling from the trained model in order to generate counts of the motifs. - The reasoning is that we are interested only in calculating the evaluation metric - for the count of occurances and not the nucleic acids themselves. - """ - final_sequences = [] - for n_a in tqdm(range(number_of_samples)): - sample_bs = 10 - if specific_group: - sampled = torch.from_numpy(np.array([group_number] * sample_bs)) - print("specific") - else: - sampled = torch.from_numpy(np.random.choice(cell_types, sample_bs)) - - random_classes = sampled.float().cuda() - sampled_images = model.sample( - classes=random_classes, - image_size=image_size, - batch_size=sample_bs, - channels=1, - cond_weight=cond_weight_to_metric, - ) - for n_b, x in enumerate(sampled_images[-1]): - seq_final = f">seq_test_{n_a}_{n_b}\n" + "".join( - [nucleotides[s] for s in np.argmax(x.reshape(4, 200), axis=0)] - ) - final_sequences.append(seq_final) - - save_motifs_syn = open("synthetic_motifs.fasta", "w") - - save_motifs_syn.write("\n".join(final_sequences)) - save_motifs_syn.close() - - # Scan for motifs - os.system("gimme scan synthetic_motifs.fasta -p JASPAR2020_vertebrates -g hg38 > syn_results_motifs.bed") - df_results_syn = pd.read_csv("syn_results_motifs.bed", sep="\t", skiprows=5, header=None) - df_results_syn["motifs"] = df_results_syn[8].apply(lambda x: x.split('motif_name "')[1].split('"')[0]) - df_results_syn[0] = df_results_syn[0].apply(lambda x: "_".join(x.split("_")[:-1])) - df_motifs_count_syn = df_results_syn[[0, "motifs"]].drop_duplicates().groupby("motifs").count() - plt.rcParams["figure.figsize"] = (30, 2) - df_motifs_count_syn.sort_values(0, ascending=False).head(50)[0].plot.bar() - plt.show() - - return df_motifs_count_syn - - -def metric_comparison_between_components( - original_data: Dict, - generated_data: Dict, - x_label_plot: str, - y_label_plot: str, - cell_components, -) -> None: - """ - This functions takes as inputs dictionaries, which contain as keys different components (cell types) - and as values the distribution of occurances of different motifs. These two dictionaries represent two different datasets, i.e. - generated dataset and the input (train) dataset. - - The goal is to then plot a the main evaluation metric (KL or otherwise) across all different types of cell types - in a heatmap fashion. - """ - ENUMARATED_CELL_NAME = """7 Trophoblasts - 5 CD8_cells - 15 CD34_cells - 9 Fetal_heart - 12 Fetal_muscle - 14 HMVEC(vascular) - 3 hESC(Embryionic) - 8 Fetal(Neural) - 13 Intestine - 2 Skin(stromalA) - 4 Fibroblast(stromalB) - 6 Renal(Cancer) - 16 Esophageal(Cancer) - 11 Fetal_Lung - 10 Fetal_kidney - 1 Tissue_Invariant""".split( - "\n" - ) - CELL_NAMES = {int(x.split(" ")[0]): x.split(" ")[1] for x in ENUMARATED_CELL_NAME} - - final_comparison_all_components = [] - for components_1, motif_occurance_frequency in original_data.items(): - comparisons_single_component = [] - for components_2 in generated_data.keys(): - compared_motifs_occurances = compare_motif_list(motif_occurance_frequency, generated_data[components_2]) - comparisons_single_component.append(compared_motifs_occurances) - - final_comparison_all_components.append(comparisons_single_component) - - plt.rcParams["figure.figsize"] = (10, 10) - df_plot = pd.DataFrame(final_comparison_all_components) - df_plot.columns = [CELL_NAMES[x] for x in cell_components] - df_plot.index = df_plot.columns - sns.heatmap(df_plot, cmap="Blues_r", annot=True, lw=0.1, vmax=1, vmin=0) - plt.title(f"Kl divergence \n {x_label_plot} sequences x {y_label_plot} sequences \n MOTIFS probabilities") - plt.xlabel(f"{x_label_plot} Sequences \n(motifs dist)") - plt.ylabel(f"{y_label_plot} \n (motifs dist)") diff --git a/src/refactor/utils/misc.py b/src/refactor/utils/misc.py deleted file mode 100644 index 33ed8a13..00000000 --- a/src/refactor/utils/misc.py +++ /dev/null @@ -1,149 +0,0 @@ -import argparse -import importlib -import math -import os -import random -from typing import Any, Dict, Generator - -import numpy as np -import torch - - -def get_parser(**parser_kwargs): - parser = argparse.ArgumentParser(**parser_kwargs) - parser.add_argument("--logdir", type=str, default="logs", help="where to save logs and ckpts") - parser.add_argument("--name", type=str, default="dummy", help="postfix for logdir") - parser.add_argument( - "--resume", - type=str, - default="", - help="resume training from given folder or checkpoint", - ) - return parser.parse_args() - - -def seed_everything(seed: int) -> None: - """ " - Seed everything. - """ - random.seed(seed) - os.environ["PYTHONHASHSEED"] = str(seed) - np.random.seed(seed) - torch.manual_seed(seed) - torch.cuda.manual_seed(seed) - torch.cuda.manual_seed_all(seed) - torch.backends.cudnn.deterministic = True - - -def exists(x): - return x is not None - - -def default(val, d): - if exists(val): - return val - return d() if callable(d) else d - - -def extract(a, t, x_shape): - batch_size = t.shape[0] - out = a.gather(-1, t.cpu()) - return out.reshape(batch_size, *((1,) * (len(x_shape) - 1))).to(t.device) - - -def extract_data_from_batch(): - return None - - -def cycle(dl): - while True: - yield from dl - - -def has_int_squareroot(num): - return (math.sqrt(num) ** 2) == num - - -def num_to_groups(num, divisor): - groups = num // divisor - remainder = num % divisor - arr = [divisor] * groups - if remainder > 0: - arr.append(remainder) - return arr - - -def convert_image_to(img_type, image): - if image.mode != img_type: - return image.convert(img_type) - return image - - -def one_hot_encode(seq, nucleotides, max_seq_len: int) -> np.ndarray: - """ - One-hot encode a sequence of nucleotides. - """ - seq_len = len(seq) - seq_array = np.zeros((max_seq_len, len(nucleotides))) - for i in range(seq_len): - seq_array[i, nucleotides.index(seq[i])] = 1 - return seq_array - - -def log(t: torch.Tensor, eps=1e-20) -> torch.Tensor: - """ - Toch log for the purporses of diffusion time steps t. - """ - return torch.log(t.clamp(min=eps)) - - -def right_pad_dims_to(x, t): - padding_dims = x.ndim - t.ndim - if padding_dims <= 0: - return t - return t.view(*t.shape, *((1,) * padding_dims)) - - -def instantiate_from_config(config, **kwargs): - if not "_target_" in config: - raise KeyError("Expected key `_target_` to instantiate.") - return get_obj_from_str(config["_target_"])(**config.get("params", {}), **kwargs) - - -def get_obj_from_str(string, reload=False): - module, class_ = string.rsplit(".", 1) - if reload: - module_to_reload = importlib.import_module(module) - importlib.reload(module_to_reload) - return getattr(importlib.import_module(module, package=None), class_) - - -def mean_flat(tensor): - """ - Take the mean over all non-batch dimensions. - From Perception Prioritized Training of Diffusion Models: https://arxiv.org/abs/2204.00227. - """ - return tensor.mean(dim=list(range(1, len(tensor.shape)))) - - -def load_obj(obj_path: str, default_obj_path: str = "") -> Any: - """ - from - https://github.com/Erlemar/pytorch_tempest/blob/3d593b91fc025a2d0bea2342478f811961acf79a/src/utils/technical_utils.py#L11 - Extract an object from a given path. - https://github.com/quantumblacklabs/kedro/blob/9809bd7ca0556531fa4a2fc02d5b2dc26cf8fa97/kedro/utils.py - Args: - obj_path: Path to an object to be extracted, including the object name. - default_obj_path: Default object path. - Returns: - Extracted object. - Raises: - AttributeError: When the object does not have the given named attribute. - """ - obj_path_list = obj_path.rsplit(".", 1) - obj_path = obj_path_list.pop(0) if len(obj_path_list) > 1 else default_obj_path - obj_name = obj_path_list[0] - module_obj = importlib.import_module(obj_path) - if not hasattr(module_obj, obj_name): - raise AttributeError(f"Object `{obj_name}` cannot be loaded from `{obj_path}`.") - return getattr(module_obj, obj_name) diff --git a/src/refactor/utils/network.py b/src/refactor/utils/network.py deleted file mode 100644 index 238075d3..00000000 --- a/src/refactor/utils/network.py +++ /dev/null @@ -1,213 +0,0 @@ -import math -from typing import Callable, List, Optional - -import torch -import torch.nn.functional as F -from einops import rearrange -from torch import einsum, nn -from utils.misc import default, exists - - -def l2norm(t): - return F.normalize(t, dim=-1) - - -class Residual(nn.Module): - def __init__(self, fn: Callable) -> None: - super().__init__() - self.fn = fn - - def forward(self, x: torch.Tensor, *args, **kwargs) -> torch.Tensor: - return self.fn(x, *args, **kwargs) + x - - -def Upsample(dim: int, dim_out: Optional[int] = None): - return nn.Sequential( - nn.Upsample(scale_factor=2, mode="nearest"), - nn.Conv2d(dim, default(dim_out, dim), 3, padding=1), - ) - - -def Downsample(dim: int, dim_out: Optional[int] = None): - return nn.Conv2d(dim, default(dim_out, dim), 4, 2, 1) - - -class LayerNorm(nn.Module): - def __init__(self, dim: int) -> None: - super().__init__() - self.g = nn.Parameter(torch.ones(1, dim, 1, 1)) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - eps = 1e-5 if x.dtype == torch.float32 else 1e-3 - var = torch.var(x, dim=1, unbiased=False, keepdim=True) - mean = torch.mean(x, dim=1, keepdim=True) - return (x - mean) * (var + eps).rsqrt() * self.g - - -class PreNorm(nn.Module): - def __init__(self, dim: int, fn: Callable) -> None: - super().__init__() - self.fn = fn - self.norm = LayerNorm(dim) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - x = self.norm(x) - return self.fn(x) - - -class LearnedSinusoidalPosEmb(nn.Module): - """following @crowsonkb 's lead with learned sinusoidal pos emb""" - - """ https://github.com/crowsonkb/v-diffusion-jax/blob/master/diffusion/models/danbooru_128.py#L8 """ - - def __init__(self, dim: int) -> None: - super().__init__() - assert (dim % 2) == 0 - half_dim = dim // 2 - self.weights = nn.Parameter(torch.randn(half_dim)) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - x = rearrange(x, "b -> b 1") - freqs = x * rearrange(self.weights, "d -> 1 d") * 2 * math.pi - fouriered = torch.cat((freqs.sin(), freqs.cos()), dim=-1) - fouriered = torch.cat((x, fouriered), dim=-1) - return fouriered - - -class EmbedFC(nn.Module): - def __init__(self, input_dim: int, emb_dim: int) -> None: - super().__init__() - """ - generic one layer FC NN for embedding things - """ - self.input_dim = input_dim - layers = [nn.Linear(input_dim, emb_dim), nn.GELU(), nn.Linear(emb_dim, emb_dim)] - self.model = nn.Sequential(*layers) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - return self.model(x) - - -# Building blocks of UNET, convolution + group norm blocks - - -class Block(nn.Module): - def __init__(self, dim: int, dim_out: int, groups: int = 8) -> None: - super().__init__() - self.proj = nn.Conv2d(dim, dim_out, 3, padding=1) - self.norm = nn.GroupNorm(groups, dim_out) - self.act = nn.SiLU() - - def forward(self, x: torch.Tensor, scale_shift=None) -> torch.Tensor: - x = self.proj(x) - x = self.norm(x) - - if exists(scale_shift): - scale, shift = scale_shift - x = x * (scale + 1) + shift - - x = self.act(x) - return x - - -# Building blocks of UNET, residual blocks - - -class ResnetBlock(nn.Module): - def __init__(self, dim: int, dim_out: int, *, time_emb_dim=None, groups: int = 8) -> None: - super().__init__() - self.mlp = nn.Sequential(nn.SiLU(), nn.Linear(time_emb_dim, dim_out * 2)) if exists(time_emb_dim) else None - - self.block1 = Block(dim, dim_out, groups=groups) - self.block2 = Block(dim_out, dim_out, groups=groups) - self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity() - - def forward(self, x: torch.Tensor, time_emb=None) -> torch.Tensor: - scale_shift = None - if exists(self.mlp) and exists(time_emb): - time_emb = self.mlp(time_emb) - time_emb = rearrange(time_emb, "b c -> b c 1 1") - scale_shift = time_emb.chunk(2, dim=1) - - h = self.block1(x, scale_shift=scale_shift) - - h = self.block2(h) - - return h + self.res_conv(x) - - -# Additional code to the https://github.com/lucidrains/bit-diffusion/blob/main/bit_diffusion/bit_diffusion.py - - -class ResnetBlockClassConditioned(ResnetBlock): - def __init__( - self, dim: int, dim_out: int, *, num_classes: int, class_embed_dim: int, time_emb_dim=None, groups: int = 8 - ) -> None: - super().__init__( - dim=dim + class_embed_dim, - dim_out=dim_out, - time_emb_dim=time_emb_dim, - groups=groups, - ) - self.class_mlp = EmbedFC(num_classes, class_embed_dim) - - def forward(self, x: torch.Tensor, time_emb=None, c=None) -> torch.Tensor: - emb_c = self.class_mlp(c) - emb_c = emb_c.view(*emb_c.shape, 1, 1) - emb_c = emb_c.expand(-1, -1, x.shape[-2], x.shape[-1]) - x = torch.cat([x, emb_c], axis=1) - - return super().forward(x, time_emb) - - -# Building blocks of UNET, attention modules - - -class LinearAttention(nn.Module): - def __init__(self, dim: int, heads: int = 4, dim_head: int = 32) -> None: - super().__init__() - self.scale = dim_head**-0.5 - self.heads = heads - hidden_dim = dim_head * heads - self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) - self.to_out = nn.Sequential(nn.Conv2d(hidden_dim, dim, 1), LayerNorm(dim)) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - b, c, h, w = x.shape - qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) - - q = q.softmax(dim=-2) - k = k.softmax(dim=-1) - - q = q * self.scale - v = v / (h * w) - - context = torch.einsum("b h d n, b h e n -> b h d e", k, v) - - out = torch.einsum("b h d e, b h d n -> b h e n", context, q) - out = rearrange(out, "b h c (x y) -> b (h c) x y", h=self.heads, x=h, y=w) - return self.to_out(out) - - -class Attention(nn.Module): - def __init__(self, dim: int, heads: int = 4, dim_head: int = 32, scale: int = 10) -> None: - super().__init__() - self.scale = scale - self.heads = heads - hidden_dim = dim_head * heads - self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) - self.to_out = nn.Conv2d(hidden_dim, dim, 1) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - b, c, h, w = x.shape - qkv = self.to_qkv(x).chunk(3, dim=1) - q, k, v = (rearrange(t, "b (h c) x y -> b h c (x y)", h=self.heads) for t in qkv) - - q, k = map(l2norm, (q, k)) - - sim = einsum("b h d i, b h d j -> b h i j", q, k) * self.scale - attn = sim.softmax(dim=-1) - out = einsum("b h i j, b h d j -> b h i d", attn, v) - out = rearrange(out, "b h (x y) d -> b (h d) x y", x=h, y=w) - return self.to_out(out) diff --git a/src/refactor/utils/schedules.py b/src/refactor/utils/schedules.py deleted file mode 100644 index 6d441021..00000000 --- a/src/refactor/utils/schedules.py +++ /dev/null @@ -1,47 +0,0 @@ -import math -from math import exp, log - -import torch - - -def beta_linear_log_snr(t: torch.Tensor) -> torch.Tensor: - return -torch.log(exp(1e-4 + 10 * (t**2))) - - -def alpha_cosine_log_snr(t: torch.Tensor, s: float = 0.008) -> torch.Tensor: - # not sure if this accounts for beta being clipped to 0.999 in discrete version - return -log((torch.cos((t + s) / (1 + s) * math.pi * 0.5) ** -2) - 1, eps=1e-5) - - -def log_snr_to_alpha_sigma(log_snr) -> torch.Tensor: - return torch.sqrt(torch.sigmoid(log_snr)), torch.sqrt(torch.sigmoid(-log_snr)) - - -def cosine_beta_schedule(timesteps, s=0.008): - """ - cosine schedule as proposed in https://arxiv.org/abs/2102.09672 - """ - steps = timesteps + 1 - x = torch.linspace(0, timesteps, steps) - alphas_cumprod = torch.cos(((x / timesteps) + s) / (1 + s) * torch.pi * 0.5) ** 2 - alphas_cumprod = alphas_cumprod / alphas_cumprod[0] - betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1]) - return torch.clip(betas, 0.0001, 0.9999) - - -def linear_beta_schedule(timesteps, beta_end=0.005) -> torch.Tensor: - beta_start = 0.0001 - return torch.linspace(beta_start, beta_end, timesteps) - - -def quadratic_beta_schedule(timesteps) -> torch.Tensor: - beta_start = 0.0001 - beta_end = 0.02 - return torch.linspace(beta_start**0.5, beta_end**0.5, timesteps) ** 2 - - -def sigmoid_beta_schedule(timesteps) -> torch.Tensor: - beta_start = 0.001 - beta_end = 0.02 - betas = torch.linspace(-6, 6, timesteps) - return torch.sigmoid(betas) * (beta_end - beta_start) + beta_start diff --git a/src/refactor/wandb/settings b/src/refactor/wandb/settings deleted file mode 100644 index b08e2014..00000000 --- a/src/refactor/wandb/settings +++ /dev/null @@ -1,3 +0,0 @@ -[default] -disabled = true -mode = offline diff --git a/tests/conftest.py b/tests/conftest.py index 428134b1..21c60859 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,9 @@ @pytest.fixture(scope="session") def event_loop(): - loop = asyncio.new_event_loop() + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = asyncio.new_event_loop() yield loop loop.close() diff --git a/tests/test_data_util.py b/tests/test_data_util.py index 0982def7..f5cf6cd5 100644 --- a/tests/test_data_util.py +++ b/tests/test_data_util.py @@ -18,7 +18,13 @@ def sample_df(): { "SEQUENCE": ["ACTG", "GATC", "TAGC", "GCTA", "ATCG"], "CELL_TYPE": ["GM12878", "HEPG2", "HESCT0", "K562", "NO"], - "TAG": ["GENERATED", "PROMOTERS", "RANDOM_GENOME_REGIONS", "test", "training"], + "TAG": [ + "GENERATED", + "PROMOTERS", + "RANDOM_GENOME_REGIONS", + "test", + "training", + ], } ) @@ -28,7 +34,9 @@ def test_seq_extract(tmp_path, sample_df): assert len(seqs) == 1 assert seqs["SEQUENCE"].tolist() == ["ACTG"] - expected = pd.DataFrame({"SEQUENCE": ["ACTG"], "CELL_TYPE": ["GM12878"], "TAG": ["GENERATED"]}) + expected = pd.DataFrame( + {"SEQUENCE": ["ACTG"], "CELL_TYPE": ["GM12878"], "TAG": ["GENERATED"]} + ) pd.testing.assert_frame_equal(seqs, expected) @@ -40,7 +48,14 @@ def sequence(self, seqname, start, end): @pytest.fixture def mock_df(): - return pd.DataFrame({"seqname": ["chr1", "chr2"], "start": [10, 20], "end": [100, 200], "summit": [50, 150]}) + return pd.DataFrame( + { + "seqname": ["chr1", "chr2"], + "start": [10, 20], + "end": [100, 200], + "summit": [50, 150], + } + ) def test_add_sequence_column(mock_df): @@ -81,7 +96,9 @@ def test_sequence_bounds(): assert sequence_bounds(summit, start, end, length) == expected -def test_seq_extract(data_path: str = "tests/test_data/data_util/seq_extract_data.txt"): +def test_seq_extract( + data_path: str = "tests/test_data/data_util/seq_extract_data.txt", +): seqs = SEQ_EXTRACT(data_path) # Dict of all the tag combinations @@ -99,12 +116,18 @@ def test_seq_extract(data_path: str = "tests/test_data/data_util/seq_extract_dat if isinstance(cell_type, list): for cell in cell_type: seq_input = seqs.extract_seq(tag, cell).reset_index(drop=True) - seq_output = pd.read_csv(f"tests/test_data/data_util/{tag}_{cell}.txt", sep="\t", dtype=object) + seq_output = pd.read_csv( + f"tests/test_data/data_util/{tag}_{cell}.txt", + sep="\t", + dtype=object, + ) # Assert the two dataframes are equal pd.testing.assert_frame_equal(seq_input, seq_output) else: seq_input = seqs.extract_seq(tag, cell_type).reset_index(drop=True) - seq_output = pd.read_csv(f"tests/test_data/data_util/{tag}.txt", sep="\t", dtype=object) + seq_output = pd.read_csv( + f"tests/test_data/data_util/{tag}.txt", sep="\t", dtype=object + ) # Assert the two dataframes are equal pd.testing.assert_frame_equal(seq_input, seq_output) @@ -152,7 +175,9 @@ def mock_fasta(tmpdir): @pytest.fixture def sample_df(): - return pd.read_csv("tests/test_data/data_util/motif_composition_helper_data.txt", sep="\t") + return pd.read_csv( + "tests/test_data/data_util/motif_composition_helper_data.txt", sep="\t" + ) def test_motif_composition_helper(sample_df, mock_fasta, mock_bed): diff --git a/tests/test_metrics.py b/tests/test_metrics.py index d2a1da18..de926f79 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -1,4 +1,3 @@ -import numpy as np import pandas as pd import pytest diff --git a/tests/test_motif_composition.py b/tests/test_motif_composition.py index afcb1a5b..70d40bce 100644 --- a/tests/test_motif_composition.py +++ b/tests/test_motif_composition.py @@ -1,9 +1,8 @@ -from unittest.mock import patch - -import pandas as pd import pytest -from dnadiffusion.metrics.motif_composition import motif_composition_matrix, parse_motif_file +from dnadiffusion.metrics.motif_composition import ( + parse_motif_file, +) def test_parse_motif_file(): diff --git a/tests/test_preprocessing.py b/tests/test_preprocessing.py index ca3cbf46..b2521fae 100644 --- a/tests/test_preprocessing.py +++ b/tests/test_preprocessing.py @@ -8,15 +8,27 @@ def test_filtering_data(): data_path = "tests/test_data/preprocessing" df_path = "/test_dataset.ftr" - cell_list = ["K562_ENCLB843GMH", "hESCT0_ENCLB449ZZZ", "HepG2_ENCLB029COU", "GM12878_ENCLB441ZZZ"] + cell_list = [ + "K562_ENCLB843GMH", + "hESCT0_ENCLB449ZZZ", + "HepG2_ENCLB029COU", + "GM12878_ENCLB441ZZZ", + ] # Running filtering data and saving the output to a temporary directory - FilteringData(data_path, df_path, cell_list).filter_exclusive_replicates(sort=True, balance=True) + FilteringData(data_path, df_path, cell_list).filter_exclusive_replicates( + sort=True, balance=True + ) # Assert file path exists - assert os.path.exists(data_path + "/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt") + assert os.path.exists( + data_path + "/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt" + ) # Loading the output - df = pd.read_csv(data_path + "/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt", sep="\t") + df = pd.read_csv( + data_path + "/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt", + sep="\t", + ) # Checking filtering assert df["TAG"].unique().tolist().sort() == cell_list.sort() @@ -25,4 +37,6 @@ def test_filtering_data(): assert len(set(df.groupby("TAG").value_counts().tolist())) == 1 # Remove output file - os.remove(data_path + "/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt") + os.remove( + data_path + "/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt" + ) diff --git a/tests/test_sample_util.py b/tests/test_sample_util.py index 7e0df003..2c2984bb 100644 --- a/tests/test_sample_util.py +++ b/tests/test_sample_util.py @@ -3,7 +3,10 @@ import pandas as pd import pytest -from dnadiffusion.utils.sample_util import convert_sample_to_fasta, extract_motifs +from dnadiffusion.utils.sample_util import ( + convert_sample_to_fasta, + extract_motifs, +) @pytest.fixture diff --git a/tests/test_validation_preprocessing.py b/tests/test_validation_preprocessing.py index 6f83f70a..0078f30a 100644 --- a/tests/test_validation_preprocessing.py +++ b/tests/test_validation_preprocessing.py @@ -13,7 +13,9 @@ def mock_k562_sequences(): sequence_list = [] for i in range(10): sequence_list.append("".join(random.choices("ATGC", k=200))) - temp_file = tempfile.NamedTemporaryFile(prefix="K562_", delete=False, dir=os.getcwd()) + temp_file = tempfile.NamedTemporaryFile( + prefix="K562_", delete=False, dir=os.getcwd() + ) temp_path = temp_file.name with open(temp_path, "w") as f: for seq in sequence_list: @@ -27,7 +29,9 @@ def mock_hESCT0_sequences(tmpdir): sequence_list = [] for i in range(10): sequence_list.append("".join(random.choices("ATGC", k=200))) - temp_file = tempfile.NamedTemporaryFile(prefix="hESCT0_", delete=False, dir=os.getcwd()) + temp_file = tempfile.NamedTemporaryFile( + prefix="hESCT0_", delete=False, dir=os.getcwd() + ) temp_path = temp_file.name with open(temp_path, "w") as f: for seq in sequence_list: @@ -41,7 +45,9 @@ def mock_HepG2_sequences(tmpdir): sequence_list = [] for i in range(10): sequence_list.append("".join(random.choices("ATGC", k=200))) - temp_file = tempfile.NamedTemporaryFile(prefix="HepG2_", delete=False, dir=os.getcwd()) + temp_file = tempfile.NamedTemporaryFile( + prefix="HepG2_", delete=False, dir=os.getcwd() + ) temp_path = temp_file.name with open(temp_path, "w") as f: for seq in sequence_list: @@ -56,7 +62,9 @@ def mock_GM12878_sequences(tmpdir): sequence_list = [] for i in range(10): sequence_list.append("".join(random.choices("ATGC", k=200))) - temp_file = tempfile.NamedTemporaryFile(prefix="GM12878_", delete=False, dir=os.getcwd()) + temp_file = tempfile.NamedTemporaryFile( + prefix="GM12878_", delete=False, dir=os.getcwd() + ) temp_path = temp_file.name with open(temp_path, "w") as f: for seq in sequence_list: @@ -64,10 +72,17 @@ def mock_GM12878_sequences(tmpdir): return temp_path -def test_combine_all_seqs(mock_GM12878_sequences, mock_k562_sequences, mock_HepG2_sequences, mock_hESCT0_sequences): +def test_combine_all_seqs( + mock_GM12878_sequences, + mock_k562_sequences, + mock_HepG2_sequences, + mock_hESCT0_sequences, +): # Call function with mock sample file cell_list = ["GM12878", "HepG2", "hESCT0", "K562"] - sequences = combine_all_seqs(cell_list, "tests/test_data/validation_preprocessing/df_train.txt") + sequences = combine_all_seqs( + cell_list, "tests/test_data/validation_preprocessing/df_train.txt" + ) # Assert format is correct assert sequences["SEQUENCE"].str.len().max() == 200 diff --git a/train.py b/train.py index b129c1cf..a8247b5e 100644 --- a/train.py +++ b/train.py @@ -7,7 +7,9 @@ def train(): - accelerator = Accelerator(split_batches=True, log_with=["wandb"], mixed_precision="bf16") + accelerator = Accelerator( + split_batches=True, log_with=["wandb"], mixed_precision="bf16" + ) data = load_data( data_path="src/dnadiffusion/data/K562_hESCT0_HepG2_GM12878_12k_sequences_per_group.txt",