diff --git a/README.md b/README.md index e5769f7..222805a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ lpcjobqueue =========== -A dask-jobqueue plugin for the LPC Condor queue designed to work with the `coffea-dask` singularity image. +A dask-jobqueue plugin for the LPC Condor queue designed to work with the `coffea-dask` apptainer image. __For users of LXPLUS:__ a similar implementation is available in https://github.com/cernops/dask-lxplus @@ -13,16 +13,16 @@ curl -OL https://raw.githubusercontent.com/CoffeaTeam/lpcjobqueue/main/bootstrap bash bootstrap.sh ``` This creates two new files in this directory: `shell` and `.bashrc`. The `./shell` -executable can then be used to start a singularity shell with a coffea environment. +executable can then be used to start an apptainer shell with a coffea environment. Optionally, one can choose a specific image using e.g. `./shell coffeateam/coffea-dask:coffea-dask:0.7.1-gd5339d7`. You can list all choices of image with `ls /cvmfs/unpacked.cern.ch/registry.hub.docker.com/coffeateam/`. -Note the singularity environment does inherit from your calling environemnt, so +Note the apptainer environment does inherit from your calling environment, so it should be "clean" (i.e. no cmsenv, LCG release, etc.) # Usage -The singularity shell can spawn dask clusters on the LPC condor farm, using the same image for the workers +The apptainer shell can spawn dask clusters on the LPC condor farm, using the same image for the workers as the shell environment. Be sure your x509 grid proxy certificate is up to date before starting the shell. The package assumes your proxy is located in your home directory (as is usual for LPC interactive nodes) @@ -35,7 +35,7 @@ and provides the following additional options: run workers from that environent. This allows user-installed packages to be available on the worker image: str - Name of the singularity image to use (default: $COFFEA_IMAGE) + Name of the apptainer image to use (default: $COFFEA_IMAGE) transfer_input_files: str, List[str] Files to be shipped along with the job. They will be placed in the working directory of the workers, as usual for HTCondor. Any paths diff --git a/bootstrap.sh b/bootstrap.sh index ae6cb60..4908e39 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -12,7 +12,9 @@ fi grep -v '^include' /etc/condor/config.d/01_cmslpc_interactive > .condor_config -SINGULARITY_SHELL=\$(which bash) singularity exec -B \${PWD}:/srv -B /cvmfs -B /uscmst1b_scratch --pwd /srv \\ +export APPTAINER_BINDPATH=/uscmst1b_scratch,/cvmfs,/cvmfs/grid.cern.ch/etc/grid-security/vomses:/etc/vomses,/cvmfs/grid.cern.ch/etc/grid-security:/etc/grid-security + +APPTAINER_SHELL=\$(which bash) apptainer exec -B \${PWD}:/srv --pwd /srv \\ /cvmfs/unpacked.cern.ch/registry.hub.docker.com/\${COFFEA_IMAGE} \\ /bin/bash --rcfile /srv/.bashrc EOF @@ -45,4 +47,4 @@ pip show lpcjobqueue 2>/dev/null | grep -q "Version: \${LPCJQ_VERSION}" || pip i EOF chmod u+x shell .bashrc -echo "Wrote shell and .bashrc to current directory. You can delete this file. Run ./shell to start the singularity shell" +echo "Wrote shell and .bashrc to current directory. You can delete this file. Run ./shell to start the apptainer shell" diff --git a/src/lpcjobqueue/cluster.py b/src/lpcjobqueue/cluster.py index 923fff6..8318332 100644 --- a/src/lpcjobqueue/cluster.py +++ b/src/lpcjobqueue/cluster.py @@ -211,7 +211,7 @@ class LPCCondorCluster(HTCondorCluster): run workers from that environent. This allows user-installed packages to be available on the worker image: str - Name of the singularity image to use (default: $COFFEA_IMAGE) + Name of the apptainer image to use (default: $COFFEA_IMAGE) transfer_input_files: str, List[str] Files to be shipped along with the job. They will be placed in the working directory of the workers, as usual for HTCondor. Any paths diff --git a/src/lpcjobqueue/schedd.py b/src/lpcjobqueue/schedd.py index 1689d94..1351422 100644 --- a/src/lpcjobqueue/schedd.py +++ b/src/lpcjobqueue/schedd.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) os.environ["CONDOR_CONFIG"] = os.path.join("/srv/.condor_config") if not os.path.isfile(os.environ['CONDOR_CONFIG']): - logger.warn(f"Condor configuration not found! run the following command outside the singularity instance") + logger.warn(f"Condor configuration not found! run the following command outside the apptainer instance") logger.warn(f"grep -v '^include' /etc/condor/config.d/01_cmslpc_interactive > .condor_config") import htcondor # noqa: E402