diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml
index b0d9f90c..591dc586 100644
--- a/.github/workflows/run-tests.yml
+++ b/.github/workflows/run-tests.yml
@@ -41,16 +41,16 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
- - name: Install pipenv and wheel for managing dependencies
+ - name: Install tooling for managing dependencies
run: |
python -m pip install --upgrade uv wheel
- - name: Set up a cache-key for installations of dependencies, in .venv
- id: cache-pipenv
- uses: actions/cache@v4
- with:
- path: ./.venv
- key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
+ # - name: Set up a cache-key for installations of dependencies, in .venv
+ # id: cache-venv
+ # uses: actions/cache@v4
+ # with:
+ # path: ./.venv
+ # key: ${{ runner.os }}-venv-${{ hashFiles('**/Pipfile.lock') }}
- name: Install dependencies
# skipping this check to see if we have a collision between cache keys
diff --git a/.gitpod.yml b/.gitpod.yml
index a413bde7..94ca7e12 100644
--- a/.gitpod.yml
+++ b/.gitpod.yml
@@ -25,14 +25,15 @@ tasks:
init: |
cp ./.env.gitpod ./.env
mysqladmin create greencheck
- python -m pipenv install --dev
- python -m pipenv run python ./manage.py migrate
- python -m pipenv run python ./manage.py tailwind install
- python -m pipenv run python ./manage.py tailwind build
+ source .venv/bin/activate
+ dotenv run -- python -m pip install -r requirements/requirements.linux.generated.txt
+ dotenv run -- ./manage.py migrate
+ dotenv run -- ./manage.py tailwind install
+ dotenv run -- ./manage.py tailwind build
cd ./apps/theme/static_src/
npx rollup --config
cd ../../../
- python -m pipenv run python ./manage.py collectstatic --no-input
+ dotenv run -- ./manage.py collectstatic --no-input
command: ls
- name: rabbitmq
diff --git a/ansible/_add_system_dependencies.yml b/ansible/_add_system_dependencies.yml
index b34b78e5..3bfa51ca 100644
--- a/ansible/_add_system_dependencies.yml
+++ b/ansible/_add_system_dependencies.yml
@@ -71,6 +71,6 @@
when: install_system_reqs is true
tags: [python]
-- name: Install pipenv libraries
- ansible.builtin.command: "python -m pip install pipenv wheel"
+- name: Install libraries for handling dependencies
+ ansible.builtin.command: "python3.11 -m pip install uv wheel"
when: install_system_reqs is true
diff --git a/ansible/_assemble_deploy_assets.yml b/ansible/_assemble_deploy_assets.yml
index 88a50e05..a3cbfc3f 100644
--- a/ansible/_assemble_deploy_assets.yml
+++ b/ansible/_assemble_deploy_assets.yml
@@ -1,8 +1,11 @@
---
- name: Build minified tailwind css file
- ansible.builtin.command: "python -m pipenv run ./manage.py tailwind build"
+ ansible.builtin.shell: |
+ source .venv/bin/activate
+ dotenv run -- ./manage.py tailwind build
args:
chdir: "{{ project_root }}/current"
+ executable: "/usr/bin/bash"
changed_when: false
- name: Build javascript bundle with rollup, using the local config file
@@ -12,7 +15,10 @@
changed_when: false
- name: Collect static files for django
- ansible.builtin.command: "python -m pipenv run ./manage.py collectstatic --no-input"
+ ansible.builtin.shell: |
+ source .venv/bin/activate
+ dotenv run -- ./manage.py collectstatic --no-input
args:
chdir: "{{ project_root }}/current"
+ executable: "/usr/bin/bash"
changed_when: false
diff --git a/ansible/_install_deploy_dependencies.yml b/ansible/_install_deploy_dependencies.yml
index ac46e7ed..95491712 100644
--- a/ansible/_install_deploy_dependencies.yml
+++ b/ansible/_install_deploy_dependencies.yml
@@ -1,36 +1,32 @@
---
-- name: Set up venv for pipenv to use. this is needed for the step below to work
- # without creating the venv in .venv, pipenv can't find the version
- # of python or pip to use, and errors out
- ansible.builtin.command: "python -m pipenv --python /usr/bin/python3.11"
+- name: Set up a virtual environment for this project
+ ansible.builtin.command: "python3.11 -m venv .venv"
args:
chdir: "{{ project_root }}/current"
changed_when: false
-- name: Install latest version of pipenv
- ansible.builtin.pip:
- name: pipenv
- virtualenv: "{{ project_root }}/current/.venv"
-
-- name: Install python dependencies with pipenv
- ansible.builtin.command: "python -m pipenv sync" # noqa no-changed-when
+- name: Install python dependencies with uv
+ ansible.builtin.shell: |
+ source .venv/bin/activate
+ python3.11 -m pip install -r requirements/requirements.linux.generated.txt
args:
chdir: "{{ project_root }}/current"
- environment:
- # make sure we install in the project root.
- # this way when we clear up old releases we also
- # remove the deps.
- # using env vars triggers the 'schema[playbook]', hence the noqa
- PIPENV_VENV_IN_PROJECT: "1" # noqa schema[playbook]
+ executable: "/usr/bin/bash"
- name: Update node deps for building tailwind
- ansible.builtin.command: "python -m pipenv run ./manage.py tailwind update"
+ ansible.builtin.shell: |
+ source .venv/bin/activate
+ dotenv run -- python3.11 ./manage.py tailwind update
args:
chdir: "{{ project_root }}/current"
+ executable: "/usr/bin/bash"
when: update_front_end_deps is true
- name: Install node deps for building tailwind
- ansible.builtin.command: "python -m pipenv run ./manage.py tailwind install"
+ ansible.builtin.shell: |
+ source .venv/bin/activate
+ dotenv run -- python3.11 ./manage.py tailwind install
args:
+ executable: "/usr/bin/bash"
chdir: "{{ project_root }}/current"
changed_when: false
diff --git a/ansible/cloud-ip-range-import.yml b/ansible/cloud-ip-range-import.yml
deleted file mode 100644
index e4d807ec..00000000
--- a/ansible/cloud-ip-range-import.yml
+++ /dev/null
@@ -1,12 +0,0 @@
----
-- name: Run the import tasks for the big cloud providers who expose IP-ranges over an API
- hosts:
- - all
- remote_user: "deploy"
- become: no
-
- tasks:
- - name: run management task import latest set of ip ranges from AWS
- shell: "pipenv run ./manage.py update_aws_ip_ranges"
- args:
- chdir: "{{ project_root }}/current"
diff --git a/ansible/domain-dataset-snapshot.yml b/ansible/domain-dataset-snapshot.yml
index fafbb71e..625ef5e4 100644
--- a/ansible/domain-dataset-snapshot.yml
+++ b/ansible/domain-dataset-snapshot.yml
@@ -7,6 +7,9 @@
tasks:
- name: run management task to create domains snapshot
- shell: "pipenv run ./manage.py dump_green_domains --upload"
+ shell: |
+ source .venv/bin/acticate
+ dotenv run -- ./manage.py dump_green_domains --upload
args:
chdir: "{{ project_root }}/current"
+ executable: "/usr/bin/bash"
diff --git a/ansible/migrate.yml b/ansible/migrate.yml
index 4c2ed14d..4fc88338 100644
--- a/ansible/migrate.yml
+++ b/ansible/migrate.yml
@@ -1,12 +1,15 @@
---
- - name: Deploy the TGWF django admin
- hosts:
- - all
- remote_user: "deploy"
- become: no
+- name: Deploy the TGWF django admin
+ hosts:
+ - all
+ remote_user: "deploy"
+ become: no
- tasks:
- - name: run migration
- shell: "pipenv run ./manage.py migrate"
- args:
- chdir: "{{ project_root }}/current"
\ No newline at end of file
+ tasks:
+ - name: run migration
+ shell: |
+ source .venv/bin/activate
+ dotenv run -- ./manage.py migrate
+ args:
+ chdir: "{{ project_root }}/current"
+ executable: "/usr/bin/bash"
diff --git a/ansible/templates/dotenv.j2 b/ansible/templates/dotenv.j2
index da79a291..68aaf4e5 100644
--- a/ansible/templates/dotenv.j2
+++ b/ansible/templates/dotenv.j2
@@ -44,5 +44,3 @@ API_URL = "{{ lookup('env', 'API_URL') }}"
TRELLO_REGISTRATION_EMAIL_TO_BOARD_ADDRESS = "{{ lookup('env', 'TRELLO_REGISTRATION_EMAIL_TO_BOARD_ADDRESS') }}"
-
-PIPENV_VENV_IN_PROJECT=True
diff --git a/ansible/templates/export_green_domains.sh.j2 b/ansible/templates/export_green_domains.sh.j2
index d89276a4..c12d9350 100644
--- a/ansible/templates/export_green_domains.sh.j2
+++ b/ansible/templates/export_green_domains.sh.j2
@@ -10,4 +10,5 @@ set -euo pipefail
cd {{ project_root }}/current/
# run the domains export and upload to object storage
-python -m pipenv run ./manage.py dump_green_domains --upload
+source .venv/bin/activate
+dotenv run -- python ./manage.py dump_green_domains --upload
diff --git a/ansible/templates/import_ips_for_large_providers.sh.j2 b/ansible/templates/import_ips_for_large_providers.sh.j2
index 7b02a2bb..1592351d 100644
--- a/ansible/templates/import_ips_for_large_providers.sh.j2
+++ b/ansible/templates/import_ips_for_large_providers.sh.j2
@@ -10,6 +10,7 @@ set -euo pipefail
cd {{ project_root }}/current/
# run our ip imports
-python -m pipenv run ./manage.py update_networks_in_db_amazon
-python -m pipenv run ./manage.py update_networks_in_db_google
-python -m pipenv run ./manage.py update_networks_in_db_microsoft
+source .venv/bin/activate
+dotenv run -- ./manage.py update_networks_in_db_amazon
+dotenv run -- ./manage.py update_networks_in_db_google
+dotenv run -- ./manage.py update_networks_in_db_microsoft
diff --git a/ansible/templates/run_gunicorn.sh.j2 b/ansible/templates/run_gunicorn.sh.j2
index 67b6fbfe..e3409134 100644
--- a/ansible/templates/run_gunicorn.sh.j2
+++ b/ansible/templates/run_gunicorn.sh.j2
@@ -1,4 +1,9 @@
# supervisor can only control processes it started itself.
# So we need to use exec to replace the parent shell script process
-# that starts pipenv
-exec dotenv run -- gunicorn greenweb.wsgi -b {{ internal_ip }}:{{ gunicorn_port }} -t 300 -c gunicorn.conf.py --statsd-host=10.0.0.2:9125 --statsd-prefix=member.app
+# that calls gunicorn
+source .venv/bin/activate
+dotenv run -- gunicorn greenweb.wsgi \
+ --bind {{ internal_ip }}:{{ gunicorn_port }} \
+ --timeout 300 \
+ --config gunicorn.conf.py \
+ --statsd-host=10.0.0.2:9125 --statsd-prefix=member.app
diff --git a/ansible/templates/run_worker.sh.j2 b/ansible/templates/run_worker.sh.j2
index cadb8da8..d8cfe365 100644
--- a/ansible/templates/run_worker.sh.j2
+++ b/ansible/templates/run_worker.sh.j2
@@ -1,4 +1,8 @@
# supervisor can only control processes it started itself.
# So we need to use exec to replace the parent shell script process
-# that starts pipenv
-exec python -m pipenv run ./manage.py rundramatiq --threads {{ dramatiq_threads }} --processes {{ dramatiq_processes }} --queues default
+# that calls manage.py
+source .venv/bin/activate
+exec dotenv run -- ./manage.py rundramatiq \
+ --threads {{ dramatiq_threads }} \
+ --processes {{ dramatiq_processes }} \
+ --queues default
diff --git a/docs/how-to.md b/docs/how-to.md
index d3a2c398..0f3884b0 100644
--- a/docs/how-to.md
+++ b/docs/how-to.md
@@ -18,7 +18,7 @@ Important: make sure to be outside of an enviroment (deactivate).
#### Run all test until one fails
```
-pipenv run pytest -x
+dotenv run -- pytest -x
```
## Set up Gitpod environment
diff --git a/docs/installation.md b/docs/installation.md
index 67aa345e..13e456a6 100644
--- a/docs/installation.md
+++ b/docs/installation.md
@@ -26,19 +26,27 @@ sudo apt install python3 python3-dev build-essential libmariadb3 libmariadb-dev
__Note__ In the context of development, it is recommended to manage Python versions using [`pyenv`](https://github.com/pyenv/pyenv) instead of relying on the version shipped in the operating system.
-__Note__ Currently Python version 3.8.5 is used on production.
+__Note__ Currently Python version 3.11.9 is used on production.
### Setup
Before following the following list, make sure you are in the root directory (workspace/admin-portal).
+
1. Make sure you have the right Python version installed: `python --version`
-2. Install a package named `pipenv` with `pip`: `python -m pip install pipenv`.
-3. Once installed, use this command to install all project packages: `pipenv install --dev`. The project packages are defined in `Pipfile`, and the `--dev` option installs both: develop and default packages.
-4. You can activate the virtual environment created for you by `pipenv` by running the command: `pipenv shell`.
-5. As a final step, make sure to copy the content of `.env.test` to `.env` and add the necessary credentials.
+2. Create a virtual environment: `python -m venv .venv`
+3. Activate the virtual environment created for you: `source .venv/bin/activate`
+4. Install a package named `uv` with `pip`: `python -m pip install uv`.
+5. Once installed, use this command to install all project packages: `uv pip install -r `. The project packages are defined in `requirements.dev.generated.txt`.
+5. As a final step, make sure to copy the content of `.env.test` to `.env` and add the necessary credentials.
__Note__ that this project relies on object storage. In production this is needed to store static files, but in development it is not required.
-By default `pipenv` loads the content of the `.env` file.
+By default `dotenv run` loads the content of the `.env` files before the next command, so
+
+```
+dotenv run -- my-command
+```
+
+Will run my-command with all the environment variables in .env set.
## Working with email
diff --git a/docs/recurring-tasks.md b/docs/recurring-tasks.md
index c2dcb652..f8cd0092 100644
--- a/docs/recurring-tasks.md
+++ b/docs/recurring-tasks.md
@@ -16,9 +16,10 @@ While the Green Web Foundation offers a self-service way to maintain up to date
These can be run on the command line using the following commands
```
-pipnev run ./manage.py update_networks_in_db_amazon
-pipnev run ./manage.py update_networks_in_db_google
-pipnev run ./manage.py update_networks_in_db_microsoft
+source .venv/bin/activate
+dotenv run -- ./manage.py update_networks_in_db_amazon
+dotenv run -- ./manage.py update_networks_in_db_google
+dotenv run -- ./manage.py update_networks_in_db_microsoft
```
Look in the `import_ips_for_large_providers.sh.j2` file to see the specific shell script run each week, and the `setup_cronjobs.yml` ansible playbook to see the specific tasks used to set up a a server to run these on a recurring schedule.
@@ -52,13 +53,13 @@ We use the yaml folded scalar literal to make the formatting easier to read, alo
If you wanted to check that a cronjob is set up on the necessary server, you would run the following ansible command, passing in the correct inventory file to tell ansible which set of servers to connect to, and passing in `--check`` to see what changes might take effect:
```
-pipenv run ansible-playbook -i ansible/inventories/prod.yml ansible/setup_cronjobs.yml --check
+dotenv run -- ansible-playbook -i ansible/inventories/prod.yml ansible/setup_cronjobs.yml --check
```
To run the actual command, and update the cronjobs, you would run the same command, without the `--check` flag.
```
-pipenv run ansible-playbook -i ansible/inventories/prod.yml ansible/setup_cronjobs.yml
+dotenv run -- ansible-playbook -i ansible/inventories/prod.yml ansible/setup_cronjobs.yml
```
### Daily Green Domain Exports
@@ -70,14 +71,14 @@ These are intended for cases when hitting an external API is either impractical,
This export is normally run with the following django management command.
```
-pipenv run ./manage.py dump_green_domains
+dotenv run -- ./manage.py dump_green_domains
```
To upload the database snapshot to object storage, pass long the `--upload` flag.
```
-pipenv run ./manage.py dump_green_domains --upload
+dotenv run -- ./manage.py dump_green_domains --upload
```
This is currently set to run every day, but historically, this job not run consistently every single day.
diff --git a/makefile b/makefile
index 6c08841c..7ea587fd 100644
--- a/makefile
+++ b/makefile
@@ -6,10 +6,10 @@ venv:
## Installing
release:
- PIPENV_DOTENV_LOCATION=.env.prod pipenv run sentry-cli releases new -p admin-portal $(shell sentry-cli releases propose-version)
- PIPENV_DOTENV_LOCATION=.env.prod pipenv run sentry-cli releases set-commits --auto $(shell sentry-cli releases propose-version)
- PIPENV_DOTENV_LOCATION=.env.prod pipenv run ansible-playbook ansible/deploy.yml -i ansible/inventories/prod.yml
- PIPENV_DOTENV_LOCATION=.env.prod pipenv run sentry-cli releases finalize $(shell sentry-cli releases propose-version)
+ dotenv -f env.prod run -- sentry-cli releases new -p admin-portal $(shell sentry-cli releases propose-version)
+ dotenv -f env.prod run -- sentry-cli releases set-commits --auto $(shell sentry-cli releases propose-version)
+ dotenv -f env.prod run -- ansible-playbook ansible/deploy.yml -i ansible/inventories/prod.yml
+ dotenv -f env.prod run -- sentry-cli releases finalize $(shell sentry-cli releases propose-version)
dev.createsuperuser:
python ./manage.py createsuperuser --username admin --email admin@admin.commits --noinput
@@ -52,16 +52,6 @@ test:
test.only:
pytest -s --create-db -m only -v --ds=greenweb.settings.testing
-flake:
- flake8 ./greenweb ./apps ./*.py --count --statistics
-black:
- black ./greenweb ./apps ./*.py $(ARGS)
-
-black.check:
- @ARGS="--check --color --diff" make black
-
-ci: | black.check flake
-
# Build the documentation using Sphinx
docs:
sphinx-build ./docs _build/
diff --git a/run-tests.sh b/run-tests.sh
deleted file mode 100755
index 3e9f63ab..00000000
--- a/run-tests.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/usr/bin/env bash
-pipenv run pytest
\ No newline at end of file