From 2c5da59c23ef1758705357f6f223b2fee1706ee4 Mon Sep 17 00:00:00 2001 From: Xudong Sun Date: Sat, 23 Sep 2023 23:54:44 +0200 Subject: [PATCH 1/2] Update exp_utils.py --- domainlab/compos/exp/exp_utils.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/domainlab/compos/exp/exp_utils.py b/domainlab/compos/exp/exp_utils.py index 3e39a11a0..d423958f2 100644 --- a/domainlab/compos/exp/exp_utils.py +++ b/domainlab/compos/exp/exp_utils.py @@ -63,6 +63,9 @@ def mk_model_na(self, tag=None, dd_cut=19): model_name = "_".join(list4mname) if self.host.args.debug: model_name = "debug_" + model_name + slurm = os.environ.get('SLURM_JOB_ID') + if slurm: + model_name = model_name + '_' + slurm logger = Logger.get_logger() logger.info(f"model name: {model_name}") return model_name From e47e39e580851237d26a2f6a9261b6237b17ca56 Mon Sep 17 00:00:00 2001 From: smilesun Date: Thu, 28 Sep 2023 09:00:18 +0200 Subject: [PATCH 2/2] Revert "Merge pull request #311 from marrlab/fishr" This reverts commit d7f3c3135c69b068ab640b3295f56adf0f7af962, reversing changes made to 174ad27ef7ebb6e034fa5c3ea309455d864fac40. --- domainlab/algos/trainers/a_trainer.py | 2 - domainlab/algos/trainers/train_fishr.py | 172 ------------------------ domainlab/algos/trainers/zoo_trainer.py | 3 - domainlab/models/a_model.py | 8 -- domainlab/models/a_model_classif.py | 13 +- domainlab/models/model_deep_all.py | 12 -- domainlab/tasks/a_task.py | 1 - domainlab/tasks/b_task.py | 1 - poetry.lock | 139 ++++++------------- pyproject.toml | 1 - requirements.txt | 7 +- test_fishr.sh | 5 - tests/test_fishr.py | 12 -- 13 files changed, 45 insertions(+), 331 deletions(-) delete mode 100644 domainlab/algos/trainers/train_fishr.py delete mode 100644 test_fishr.sh delete mode 100644 tests/test_fishr.py diff --git a/domainlab/algos/trainers/a_trainer.py b/domainlab/algos/trainers/a_trainer.py index 224620c63..4dbe745cc 100644 --- a/domainlab/algos/trainers/a_trainer.py +++ b/domainlab/algos/trainers/a_trainer.py @@ -37,7 +37,6 @@ def __init__(self, successor_node=None): # self.loader_tr = None self.loader_te = None - self.dict_loader_tr = None self.num_batches = None self.flag_update_hyper_per_epoch = None self.flag_update_hyper_per_batch = None @@ -67,7 +66,6 @@ def init_business(self, model, task, observer, device, aconf, flag_accept=True): # self.loader_tr = task.loader_tr self.loader_te = task.loader_te - self.dict_loader_tr = task.dict_loader_tr if flag_accept: self.observer.accept(self) diff --git a/domainlab/algos/trainers/train_fishr.py b/domainlab/algos/trainers/train_fishr.py deleted file mode 100644 index a33f4d90d..000000000 --- a/domainlab/algos/trainers/train_fishr.py +++ /dev/null @@ -1,172 +0,0 @@ -""" -use random start to generate adversarial images -""" - -from collections import OrderedDict -import torch -from torch import nn - -try: - from backpack import backpack, extend - from backpack.extensions import Variance -except: - backpack = None - -from domainlab.algos.trainers.train_basic import TrainerBasic - -_bce_extended = extend(nn.CrossEntropyLoss(reduction='none')) - - -class TrainerFishr(TrainerBasic): - """ - The goal is to minimize the variance of the domain-level variance of the gradients. - This aligns the domain-level loss landscapes locally around the final weights, reducing - inconsistencies across domains. - - For more details, see: Alexandre Ramé, Corentin Dancette, and Matthieu Cord. - "Fishr: Invariant gradient variances for out-of-distribution generalization." - International Conference on Machine Learning. PMLR, 2022. - """ - def tr_epoch(self, epoch): - list_loaders = list(self.dict_loader_tr.values()) - loaders_zip = zip(*list_loaders) - self.model.train() - self.model.convert4backpack() - self.epo_loss_tr = 0 - - for ind_batch, tuple_data_domains_batch in enumerate(loaders_zip): - self.optimizer.zero_grad() - list_dict_var_grads, list_loss_erm = self.var_grads_and_loss(tuple_data_domains_batch) - dict_layerwise_var_var_grads = self.variance_between_dict(list_dict_var_grads) - dict_layerwise_var_var_grads_sum = \ - {key: val.sum() for key, val in dict_layerwise_var_var_grads.items()} - loss_fishr = sum(dict_layerwise_var_var_grads_sum.values()) - loss = sum(list_loss_erm) + self.aconf.gamma_reg * loss_fishr - loss.backward() - self.optimizer.step() - self.epo_loss_tr += loss.detach().item() - self.after_batch(epoch, ind_batch) - - flag_stop = self.observer.update(epoch) # notify observer - return flag_stop - - def var_grads_and_loss(self, tuple_data_domains_batch): - """ - Calculate the domain-level variance of the gradients and the layer-wise erm loss. - Input: a tupel containing lists with the data per domain - Return: two lists. The first one contains dictionaries with the gradient variances. The keys - are the layers and the values are tensors. The gradient variances are stored in the tensors. - The second list contains the losses. Each list entry represents the summed up erm loss of a - single layer. - """ - - list_dict_var_grads = [] - list_loss_erm = [] - for list_x_y_d_single_domain in tuple_data_domains_batch: # traverse each domain - # first dimension of tensor_x is batchsize - tensor_x, vec_y, vec_d, *_ = tuple(list_x_y_d_single_domain) - tensor_x, vec_y, vec_d = \ - tensor_x.to(self.device), vec_y.to(self.device), vec_d.to(self.device) - dict_var_grads_single_domain = self.cal_dict_variance_grads(tensor_x, vec_y) - list_dict_var_grads.append(dict_var_grads_single_domain) - loss_erm = self.model.cal_loss(tensor_x, vec_y, vec_d) - list_loss_erm.append(loss_erm.sum()) # FIXME: let sum() to be configurable - # now len(list_dict_var_grads) = (# domains) - return list_dict_var_grads, list_loss_erm - - - def variance_between_dict(self, list_dict_var_paragrad): - """ - Computes the variance of the domain-level gradient variances, layer-wise. - Let $v=1/n\\sum_i^n v_i represent the mean across n domains, with - $$v_i = var(\\nabla_{\\theta}\\ell(x^{(d_i)}, y^{(d_i)}))$$, where $$d_i$$ means data - coming from domain i. We are interested in $1/n\\sum_(v_i-v)^2=1/n \\sum_i v_i^2 - v^2$. - - Input: list of dictionaries, each dictionary has the structure - {"layer1": tensor[64, 3, 11, 11], - "layer2": tensor[8, 3, 5, 5]}..... - The scalar values in the dictionary are the variances of the gradient of the loss - w.r.t. the scalar component of the weight tensor for the layer in question, where - the variance is computed w.r.t. the minibatch of a particular domain. - - Return: dictionary, containing the layers as keys and tensors as values. The variances are - stored in the tensors as scalars. - """ - - dict_d1 = list_dict_var_paragrad[0] - # first we determine \\bar(v^2) - list_dict_var_paragrad_squared = [{key:torch.pow(dict_ele[key], 2) for key in dict_d1} - for dict_ele in list_dict_var_paragrad] - dict_mean_square_var_paragrad = self.cal_mean_across_dict(list_dict_var_paragrad_squared) - - # now we determine $\\bar(v)^2$ - dict_mean_var_paragrad = \ - {key: torch.mean(torch.stack([ele[key] for ele in list_dict_var_paragrad]), dim=0) - for key in dict_d1.keys()} - dict_square_mean_var_paragrad = self.cal_power_single_dict(dict_mean_var_paragrad) - - # now we do \bar(v^2)- (\bar(v))² - dict_layerwise_var_var_grads = \ - {key:dict_mean_square_var_paragrad[key]-dict_square_mean_var_paragrad[key] - for key in dict_square_mean_var_paragrad.keys()} - return dict_layerwise_var_var_grads - - def cal_power_single_dict(self, mdict): - """ - Calculates the element-wise power of the values in a dictionary, when the values ar tensors. - Input: dictionary, where the values are tensors. - Return: dictionary, where the values are tensors. The scalar values of the tensors are the - element-wise power of the scalars in the input dictionary. - """ - - dict_rst = {key:torch.pow(mdict[key], 2) for key in mdict} - return dict_rst - - def cal_mean_across_dict(self, list_dict): - """ - Calculates the mean across several dictionaries. - Input: list of dictionaries, where the values of each dictionary are tensors. - Return: dictionary, where the values are tensors. The scalar values of the tensors contain - the mean across the first dimension of the dictionaries from the list of inputs. - """ - - dict_d1 = list_dict[0] - dict_mean_var_paragrad = \ - {key: torch.mean(torch.stack([ele[key] for ele in list_dict]), dim=0) - for key in dict_d1.keys()} - return dict_mean_var_paragrad - - def cal_dict_variance_grads(self, tensor_x, vec_y): - """ - Calculates the domain-level variances of the gradients w.r.t. the scalar component of the - weight tensor for the layer in question, i.e. - $$v_i = var(\\nabla_{\\theta}\\ell(x^{(d_i)}, y^{(d_i)}))$$, where $$d_i$$ means data - coming from domain i. The computation is done using the package backpack. - - Input: tensor_x, a tensor, where the first dimension is the batch size and vec_y, which - is a vector representing the output labels. - - Return: dictionary, where the key is the name for the layer of a neural network and the - value is the diagonal variance of each scalar component of the gradient of the loss w.r.t. - the parameter. - - Return Example: - {"layer1": Tensor[batchsize=32, 64, 3, 11, 11 ]} as a convolution kernel - """ - - loss = self.model.cal_task_loss(tensor_x.clone(), vec_y) - - with backpack(Variance()): - loss.backward( - inputs=list(self.model.parameters()), retain_graph=True, create_graph=True - ) - - for name, param in self.model.named_parameters(): - print(name) - print(".grad.shape: ", param.variance.shape) - - dict_variance = OrderedDict( - [(name, weights.variance.clone()) - for name, weights in self.model.named_parameters() - ]) - return dict_variance diff --git a/domainlab/algos/trainers/zoo_trainer.py b/domainlab/algos/trainers/zoo_trainer.py index 5579f3e7e..069e1d3d4 100644 --- a/domainlab/algos/trainers/zoo_trainer.py +++ b/domainlab/algos/trainers/zoo_trainer.py @@ -5,11 +5,9 @@ from domainlab.algos.trainers.train_dial import TrainerDIAL from domainlab.algos.trainers.train_matchdg import TrainerMatchDG from domainlab.algos.trainers.train_mldg import TrainerMLDG -from domainlab.algos.trainers.train_fishr import TrainerFishr from domainlab.algos.trainers.train_hyper_scheduler import TrainerHyperScheduler - class TrainerChainNodeGetter(object): """ Chain of Responsibility: node is named in pattern Trainer[XXX] where the string @@ -40,7 +38,6 @@ def __call__(self, lst_candidates=None, default=None, lst_excludes=None): chain = TrainerDIAL(chain) chain = TrainerMatchDG(chain) chain = TrainerMLDG(chain) - chain = TrainerFishr(chain) chain = TrainerHyperScheduler(chain) # FIXME: change to warmup node = chain.handle(self.request) return node diff --git a/domainlab/models/a_model.py b/domainlab/models/a_model.py index 8a079a9a4..7625d246a 100644 --- a/domainlab/models/a_model.py +++ b/domainlab/models/a_model.py @@ -6,13 +6,6 @@ from torch import nn -try: - from backpack import extend -except: - backpack = None - - - class AModel(nn.Module, metaclass=abc.ABCMeta): """ @@ -65,4 +58,3 @@ def forward(self, tensor_x, tensor_y, tensor_d, others=None): :param d: """ return self.cal_loss(tensor_x, tensor_y, tensor_d, others) - diff --git a/domainlab/models/a_model_classif.py b/domainlab/models/a_model_classif.py index e3f2853e8..2e3d20b13 100644 --- a/domainlab/models/a_model_classif.py +++ b/domainlab/models/a_model_classif.py @@ -10,15 +10,6 @@ from torch import nn as nn from torch.nn import functional as F - -try: - from backpack import backpack, extend - from backpack.extensions import BatchGrad, Variance -except: - backpack = None - - - from domainlab.models.a_model import AModel from domainlab.utils.utils_class import store_args from domainlab.utils.utils_classif import get_label_na, logit2preds_vpic @@ -26,8 +17,6 @@ from domainlab.utils.perf_metrics import PerfMetricClassif from domainlab.utils.logger import Logger -loss_cross_entropy_extended = extend(nn.CrossEntropyLoss()) - class AModelClassif(AModel, metaclass=abc.ABCMeta): """ @@ -124,7 +113,7 @@ def cal_task_loss(self, tensor_x, tensor_y): y_target = tensor_y else: _, y_target = tensor_y.max(dim=1) - lc_y = loss_cross_entropy_extended(logit_y, y_target) + lc_y = F.cross_entropy(logit_y, y_target, reduction="none") # cross entropy always return a scalar, no need for inside instance reduction return lc_y diff --git a/domainlab/models/model_deep_all.py b/domainlab/models/model_deep_all.py index f8fdf8c95..85e713060 100644 --- a/domainlab/models/model_deep_all.py +++ b/domainlab/models/model_deep_all.py @@ -1,10 +1,3 @@ - -try: - from backpack import extend -except: - backpack = None - - from domainlab.models.a_model_classif import AModelClassif from domainlab.utils.override_interface import override_interface @@ -59,9 +52,4 @@ def cal_loss(self, tensor_x, tensor_y, tensor_d, others=None): lc_y = self.cal_task_loss(tensor_x, tensor_y) return lc_y - def convert4backpack(self): - """ - convert the module to backpack for 2nd order gradients - """ - self.net = extend(self.net, use_converter=True) return ModelDeepAll diff --git a/domainlab/tasks/a_task.py b/domainlab/tasks/a_task.py index c750fa765..583a2c28a 100644 --- a/domainlab/tasks/a_task.py +++ b/domainlab/tasks/a_task.py @@ -24,7 +24,6 @@ def __init__(self, succ=None): self._args = None self.dict_dset_all = {} # persist self.dict_dset_tr = {} # versatile variable: which domains to use as training - self.dict_loader_tr = {} self.dict_dset_te = {} # versatile self.dict_dset_val = {} # versatile self.dict_domain_class_count = {} diff --git a/domainlab/tasks/b_task.py b/domainlab/tasks/b_task.py index 7b27c057a..160f09ed7 100644 --- a/domainlab/tasks/b_task.py +++ b/domainlab/tasks/b_task.py @@ -40,7 +40,6 @@ def init_business(self, args, node_algo_builder=None): ddset_tr = node_algo_builder.dset_decoration_args_algo(args, ddset_tr) ddset_val = node_algo_builder.dset_decoration_args_algo(args, ddset_val) self.dict_dset_tr.update({na_domain: ddset_tr}) - self.dict_loader_tr.update({na_domain: mk_loader(ddset_tr, args.bs)}) self.dict_dset_val.update({na_domain: ddset_val}) ddset_mix = ConcatDataset(tuple(self.dict_dset_tr.values())) self._loader_tr = mk_loader(ddset_mix, args.bs) diff --git a/poetry.lock b/poetry.lock index 535f2bdf6..86a04168e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -29,25 +29,6 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["attrs[tests-no-zope]", "zope-interface"] tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -[[package]] -name = "backpack-for-pytorch" -version = "1.6.0" -description = "BackPACK: Packing more into backprop" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -einops = ">=0.3.0,<1.0.0" -torch = ">=1.9.0" -torchvision = ">=0.7.0" -unfoldNd = ">=0.2.0,<1.0.0" - -[package.extras] -docs = ["matplotlib", "memory-profiler", "sphinx-gallery", "sphinx-rtd-theme", "tabulate"] -lint = ["black", "darglint", "flake8", "flake8-bugbear", "flake8-comprehensions", "flake8-tidy-imports", "isort", "mccabe", "pep8-naming", "pycodestyle", "pydocstyle", "pyflakes"] -test = ["coveralls", "pytest (>=4.5.0,<5.0.0)", "pytest-benchmark (>=3.2.2,<4.0.0)", "pytest-cov", "pytest-optional-tests (>=0.1.1)", "scipy"] - [[package]] name = "beautifulsoup4" version = "4.12.2" @@ -157,14 +138,6 @@ category = "main" optional = false python-versions = ">=3.7" -[[package]] -name = "einops" -version = "0.6.1" -description = "A new flavour of deep learning operations" -category = "main" -optional = false -python-versions = ">=3.7" - [[package]] name = "fastjsonschema" version = "2.18.0" @@ -190,7 +163,7 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p [[package]] name = "fonttools" -version = "4.42.1" +version = "4.42.0" description = "Tools to manipulate font files" category = "main" optional = false @@ -760,15 +733,15 @@ stats = ["scipy (>=1.3)", "statsmodels (>=0.10)"] [[package]] name = "setuptools" -version = "68.1.2" +version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1020,22 +993,6 @@ category = "main" optional = false python-versions = ">=3.7" -[[package]] -name = "unfoldNd" -version = "0.2.0" -description = "N-dimensional unfold (im2col) and fold (col2im) in PyTorch" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -numpy = "*" -torch = "*" - -[package.extras] -lint = ["black", "darglint", "flake8", "flake8-bugbear", "flake8-comprehensions", "flake8-tidy-imports", "isort", "mccabe", "pep8-naming", "pycodestyle", "pydocstyle", "pyflakes"] -test = ["coveralls", "pytest", "pytest-cov", "pytest-optional-tests"] - [[package]] name = "urllib3" version = "2.0.4" @@ -1086,7 +1043,7 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "5877107da4ddaf7d716c60c46485d45678e306d5cf13d0f8c76e23a52f65f2e1" +content-hash = "e979b00df0bbb4b95dc24bc53376e7396b7a2f3ea7c1917edceb63dc1e69381c" [metadata.files] appdirs = [ @@ -1100,10 +1057,6 @@ attrs = [ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] -backpack-for-pytorch = [ - {file = "backpack-for-pytorch-1.6.0.tar.gz", hash = "sha256:af6495b71bacf82a1c7cab01aa85bebabccfe74d87d89f108ea72a4a0d384de3"}, - {file = "backpack_for_pytorch-1.6.0-py3-none-any.whl", hash = "sha256:ac708dbb86dbb36f70fc81a1ccb1df5c7ba46d62bc9d10239d4b0e406ba41a6f"}, -] beautifulsoup4 = [ {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, @@ -1280,10 +1233,6 @@ dpath = [ {file = "dpath-2.1.6-py3-none-any.whl", hash = "sha256:31407395b177ab63ef72e2f6ae268c15e938f2990a8ecf6510f5686c02b6db73"}, {file = "dpath-2.1.6.tar.gz", hash = "sha256:f1e07c72e8605c6a9e80b64bc8f42714de08a789c7de417e49c3f87a19692e47"}, ] -einops = [ - {file = "einops-0.6.1-py3-none-any.whl", hash = "sha256:99149e46cc808956b174932fe563d920db4d6e5dadb8c6ecdaa7483b7ef7cfc3"}, - {file = "einops-0.6.1.tar.gz", hash = "sha256:f95f8d00f4ded90dbc4b19b6f98b177332614b0357dde66997f3ae5d474dc8c8"}, -] fastjsonschema = [ {file = "fastjsonschema-2.18.0-py3-none-any.whl", hash = "sha256:128039912a11a807068a7c87d0da36660afbfd7202780db26c4aa7153cfdc799"}, {file = "fastjsonschema-2.18.0.tar.gz", hash = "sha256:e820349dd16f806e4bd1467a138dced9def4bc7d6213a34295272a6cac95b5bd"}, @@ -1293,40 +1242,40 @@ filelock = [ {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, ] fonttools = [ - {file = "fonttools-4.42.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ed1a13a27f59d1fc1920394a7f596792e9d546c9ca5a044419dca70c37815d7c"}, - {file = "fonttools-4.42.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c9b1ce7a45978b821a06d375b83763b27a3a5e8a2e4570b3065abad240a18760"}, - {file = "fonttools-4.42.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f720fa82a11c0f9042376fd509b5ed88dab7e3cd602eee63a1af08883b37342b"}, - {file = "fonttools-4.42.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db55cbaea02a20b49fefbd8e9d62bd481aaabe1f2301dabc575acc6b358874fa"}, - {file = "fonttools-4.42.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a35981d90feebeaef05e46e33e6b9e5b5e618504672ca9cd0ff96b171e4bfff"}, - {file = "fonttools-4.42.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:68a02bbe020dc22ee0540e040117535f06df9358106d3775e8817d826047f3fd"}, - {file = "fonttools-4.42.1-cp310-cp310-win32.whl", hash = "sha256:12a7c247d1b946829bfa2f331107a629ea77dc5391dfd34fdcd78efa61f354ca"}, - {file = "fonttools-4.42.1-cp310-cp310-win_amd64.whl", hash = "sha256:a398bdadb055f8de69f62b0fc70625f7cbdab436bbb31eef5816e28cab083ee8"}, - {file = "fonttools-4.42.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:689508b918332fb40ce117131633647731d098b1b10d092234aa959b4251add5"}, - {file = "fonttools-4.42.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e36344e48af3e3bde867a1ca54f97c308735dd8697005c2d24a86054a114a71"}, - {file = "fonttools-4.42.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19b7db825c8adee96fac0692e6e1ecd858cae9affb3b4812cdb9d934a898b29e"}, - {file = "fonttools-4.42.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:113337c2d29665839b7d90b39f99b3cac731f72a0eda9306165a305c7c31d341"}, - {file = "fonttools-4.42.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:37983b6bdab42c501202500a2be3a572f50d4efe3237e0686ee9d5f794d76b35"}, - {file = "fonttools-4.42.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6ed2662a3d9c832afa36405f8748c250be94ae5dfc5283d668308391f2102861"}, - {file = "fonttools-4.42.1-cp311-cp311-win32.whl", hash = "sha256:179737095eb98332a2744e8f12037b2977f22948cf23ff96656928923ddf560a"}, - {file = "fonttools-4.42.1-cp311-cp311-win_amd64.whl", hash = "sha256:f2b82f46917d8722e6b5eafeefb4fb585d23babd15d8246c664cd88a5bddd19c"}, - {file = "fonttools-4.42.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:62f481ac772fd68901573956231aea3e4b1ad87b9b1089a61613a91e2b50bb9b"}, - {file = "fonttools-4.42.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f2f806990160d1ce42d287aa419df3ffc42dfefe60d473695fb048355fe0c6a0"}, - {file = "fonttools-4.42.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db372213d39fa33af667c2aa586a0c1235e88e9c850f5dd5c8e1f17515861868"}, - {file = "fonttools-4.42.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d18fc642fd0ac29236ff88ecfccff229ec0386090a839dd3f1162e9a7944a40"}, - {file = "fonttools-4.42.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8708b98c278012ad267ee8a7433baeb809948855e81922878118464b274c909d"}, - {file = "fonttools-4.42.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c95b0724a6deea2c8c5d3222191783ced0a2f09bd6d33f93e563f6f1a4b3b3a4"}, - {file = "fonttools-4.42.1-cp38-cp38-win32.whl", hash = "sha256:4aa79366e442dbca6e2c8595645a3a605d9eeabdb7a094d745ed6106816bef5d"}, - {file = "fonttools-4.42.1-cp38-cp38-win_amd64.whl", hash = "sha256:acb47f6f8680de24c1ab65ebde39dd035768e2a9b571a07c7b8da95f6c8815fd"}, - {file = "fonttools-4.42.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb289b7a815638a7613d46bcf324c9106804725b2bb8ad913c12b6958ffc4ec"}, - {file = "fonttools-4.42.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:53eb5091ddc8b1199330bb7b4a8a2e7995ad5d43376cadce84523d8223ef3136"}, - {file = "fonttools-4.42.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46a0ec8adbc6ff13494eb0c9c2e643b6f009ce7320cf640de106fb614e4d4360"}, - {file = "fonttools-4.42.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cc7d685b8eeca7ae69dc6416833fbfea61660684b7089bca666067cb2937dcf"}, - {file = "fonttools-4.42.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:be24fcb80493b2c94eae21df70017351851652a37de514de553435b256b2f249"}, - {file = "fonttools-4.42.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:515607ec756d7865f23070682622c49d922901943697871fc292277cf1e71967"}, - {file = "fonttools-4.42.1-cp39-cp39-win32.whl", hash = "sha256:0eb79a2da5eb6457a6f8ab904838454accc7d4cccdaff1fd2bd3a0679ea33d64"}, - {file = "fonttools-4.42.1-cp39-cp39-win_amd64.whl", hash = "sha256:7286aed4ea271df9eab8d7a9b29e507094b51397812f7ce051ecd77915a6e26b"}, - {file = "fonttools-4.42.1-py3-none-any.whl", hash = "sha256:9398f244e28e0596e2ee6024f808b06060109e33ed38dcc9bded452fd9bbb853"}, - {file = "fonttools-4.42.1.tar.gz", hash = "sha256:c391cd5af88aacaf41dd7cfb96eeedfad297b5899a39e12f4c2c3706d0a3329d"}, + {file = "fonttools-4.42.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9c456d1f23deff64ffc8b5b098718e149279abdea4d8692dba69172fb6a0d597"}, + {file = "fonttools-4.42.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:150122ed93127a26bc3670ebab7e2add1e0983d30927733aec327ebf4255b072"}, + {file = "fonttools-4.42.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48e82d776d2e93f88ca56567509d102266e7ab2fb707a0326f032fe657335238"}, + {file = "fonttools-4.42.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58c1165f9b2662645de9b19a8c8bdd636b36294ccc07e1b0163856b74f10bafc"}, + {file = "fonttools-4.42.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2d6dc3fa91414ff4daa195c05f946e6a575bd214821e26d17ca50f74b35b0fe4"}, + {file = "fonttools-4.42.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fae4e801b774cc62cecf4a57b1eae4097903fced00c608d9e2bc8f84cd87b54a"}, + {file = "fonttools-4.42.0-cp310-cp310-win32.whl", hash = "sha256:b8600ae7dce6ec3ddfb201abb98c9d53abbf8064d7ac0c8a0d8925e722ccf2a0"}, + {file = "fonttools-4.42.0-cp310-cp310-win_amd64.whl", hash = "sha256:57b68eab183fafac7cd7d464a7bfa0fcd4edf6c67837d14fb09c1c20516cf20b"}, + {file = "fonttools-4.42.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0a1466713e54bdbf5521f2f73eebfe727a528905ff5ec63cda40961b4b1eea95"}, + {file = "fonttools-4.42.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3fb2a69870bfe143ec20b039a1c8009e149dd7780dd89554cc8a11f79e5de86b"}, + {file = "fonttools-4.42.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae881e484702efdb6cf756462622de81d4414c454edfd950b137e9a7352b3cb9"}, + {file = "fonttools-4.42.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ec3246a088555629f9f0902f7412220c67340553ca91eb540cf247aacb1983"}, + {file = "fonttools-4.42.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ece1886d12bb36c48c00b2031518877f41abae317e3a55620d38e307d799b7e"}, + {file = "fonttools-4.42.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:10dac980f2b975ef74532e2a94bb00e97a95b4595fb7f98db493c474d5f54d0e"}, + {file = "fonttools-4.42.0-cp311-cp311-win32.whl", hash = "sha256:83b98be5d291e08501bd4fc0c4e0f8e6e05b99f3924068b17c5c9972af6fff84"}, + {file = "fonttools-4.42.0-cp311-cp311-win_amd64.whl", hash = "sha256:e35bed436726194c5e6e094fdfb423fb7afaa0211199f9d245e59e11118c576c"}, + {file = "fonttools-4.42.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c36c904ce0322df01e590ba814d5d69e084e985d7e4c2869378671d79662a7d4"}, + {file = "fonttools-4.42.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d54e600a2bcfa5cdaa860237765c01804a03b08404d6affcd92942fa7315ffba"}, + {file = "fonttools-4.42.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01cfe02416b6d416c5c8d15e30315cbcd3e97d1b50d3b34b0ce59f742ef55258"}, + {file = "fonttools-4.42.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f81ed9065b4bd3f4f3ce8e4873cd6a6b3f4e92b1eddefde35d332c6f414acc3"}, + {file = "fonttools-4.42.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:685a4dd6cf31593b50d6d441feb7781a4a7ef61e19551463e14ed7c527b86f9f"}, + {file = "fonttools-4.42.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:329341ba3d86a36e482610db56b30705384cb23bd595eac8cbb045f627778e9d"}, + {file = "fonttools-4.42.0-cp38-cp38-win32.whl", hash = "sha256:4655c480a1a4d706152ff54f20e20cf7609084016f1df3851cce67cef768f40a"}, + {file = "fonttools-4.42.0-cp38-cp38-win_amd64.whl", hash = "sha256:6bd7e4777bff1dcb7c4eff4786998422770f3bfbef8be401c5332895517ba3fa"}, + {file = "fonttools-4.42.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9b55d2a3b360e0c7fc5bd8badf1503ca1c11dd3a1cd20f2c26787ffa145a9c7"}, + {file = "fonttools-4.42.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0df8ef75ba5791e873c9eac2262196497525e3f07699a2576d3ab9ddf41cb619"}, + {file = "fonttools-4.42.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd2363ea7728496827658682d049ffb2e98525e2247ca64554864a8cc945568"}, + {file = "fonttools-4.42.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d40673b2e927f7cd0819c6f04489dfbeb337b4a7b10fc633c89bf4f34ecb9620"}, + {file = "fonttools-4.42.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c8bf88f9e3ce347c716921804ef3a8330cb128284eb6c0b6c4b3574f3c580023"}, + {file = "fonttools-4.42.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:703101eb0490fae32baf385385d47787b73d9ea55253df43b487c89ec767e0d7"}, + {file = "fonttools-4.42.0-cp39-cp39-win32.whl", hash = "sha256:f0290ea7f9945174bd4dfd66e96149037441eb2008f3649094f056201d99e293"}, + {file = "fonttools-4.42.0-cp39-cp39-win_amd64.whl", hash = "sha256:ae7df0ae9ee2f3f7676b0ff6f4ebe48ad0acaeeeaa0b6839d15dbf0709f2c5ef"}, + {file = "fonttools-4.42.0-py3-none-any.whl", hash = "sha256:dfe7fa7e607f7e8b58d0c32501a3a7cac148538300626d1b930082c90ae7f6bd"}, + {file = "fonttools-4.42.0.tar.gz", hash = "sha256:614b1283dca88effd20ee48160518e6de275ce9b5456a3134d5f235523fc5065"}, ] gdown = [ {file = "gdown-4.7.1-py3-none-any.whl", hash = "sha256:65d495699e7c2c61af0d0e9c32748fb4f79abaf80d747a87456c7be14aac2560"}, @@ -1970,8 +1919,8 @@ seaborn = [ {file = "seaborn-0.12.2.tar.gz", hash = "sha256:374645f36509d0dcab895cba5b47daf0586f77bfe3b36c97c607db7da5be0139"}, ] setuptools = [ - {file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"}, - {file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"}, + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] setuptools-scm = [ {file = "setuptools_scm-7.1.0-py3-none-any.whl", hash = "sha256:73988b6d848709e2af142aa48c986ea29592bbcfca5375678064708205253d8e"}, @@ -2083,10 +2032,6 @@ typing-extensions = [ {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] -unfoldNd = [ - {file = "unfoldNd-0.2.0-py3-none-any.whl", hash = "sha256:2d4b5efa1dcd7aad3d9a1d9408fbfcbd125d106b10dc1c54c72f37bc53ea1416"}, - {file = "unfoldNd-0.2.0.tar.gz", hash = "sha256:3e159de7860c233399a777bc90a18df5d4b406349836f15fc3a077d5836ebd0a"}, -] urllib3 = [ {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, diff --git a/pyproject.toml b/pyproject.toml index e1e6824a2..30419abf9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,6 @@ pandas = "^1.5.1" scikit-learn = "^1.2.1" pyyaml = "^6.0" gdown = "^4.7.1" -backpack-for-pytorch = "^1.6.0" [tool.poetry.dev-dependencies] diff --git a/requirements.txt b/requirements.txt index f38f9e657..58baf5fec 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,5 @@ appdirs==1.4.4 ; python_version >= "3.9" and python_version < "4.0" attrs==23.1.0 ; python_version >= "3.9" and python_version < "4.0" -backpack-for-pytorch==1.6.0 ; python_version >= "3.9" and python_version < "4.0" beautifulsoup4==4.12.2 ; python_version >= "3.9" and python_version < "4.0" certifi==2023.7.22 ; python_version >= "3.9" and python_version < "4.0" charset-normalizer==3.2.0 ; python_version >= "3.9" and python_version < "4.0" @@ -12,10 +11,9 @@ cycler==0.11.0 ; python_version >= "3.9" and python_version < "4.0" datrie==0.8.2 ; python_version >= "3.9" and python_version < "4.0" docutils==0.20.1 ; python_version >= "3.9" and python_version < "4.0" dpath==2.1.6 ; python_version >= "3.9" and python_version < "4.0" -einops==0.6.1 ; python_version >= "3.9" and python_version < "4.0" fastjsonschema==2.18.0 ; python_version >= "3.9" and python_version < "4.0" filelock==3.12.2 ; python_version >= "3.9" and python_version < "4.0" -fonttools==4.42.1 ; python_version >= "3.9" and python_version < "4.0" +fonttools==4.42.0 ; python_version >= "3.9" and python_version < "4.0" gdown==4.7.1 ; python_version >= "3.9" and python_version < "4.0" gitdb==4.0.10 ; python_version >= "3.9" and python_version < "4.0" gitpython==3.1.32 ; python_version >= "3.9" and python_version < "4.0" @@ -59,7 +57,7 @@ scikit-learn==1.3.0 ; python_version >= "3.9" and python_version < "4.0" scipy==1.9.3 ; python_version >= "3.9" and python_version < "4.0" seaborn==0.12.2 ; python_version >= "3.9" and python_version < "4.0" setuptools-scm==7.1.0 ; python_version >= "3.9" and python_version < "4.0" -setuptools==68.1.2 ; python_version >= "3.9" and python_version < "4.0" +setuptools==68.0.0 ; python_version >= "3.9" and python_version < "4.0" six==1.16.0 ; python_version >= "3.9" and python_version < "4.0" smart-open==6.3.0 ; python_version >= "3.9" and python_version < "4.0" smmap==5.0.0 ; python_version >= "3.9" and python_version < "4.0" @@ -77,7 +75,6 @@ torchvision==0.13.1 ; python_version >= "3.9" and python_version < "4.0" tqdm==4.66.1 ; python_version >= "3.9" and python_version < "4.0" traitlets==5.9.0 ; python_version >= "3.9" and python_version < "4.0" typing-extensions==4.7.1 ; python_version >= "3.9" and python_version < "4.0" -unfoldnd==0.2.0 ; python_version >= "3.9" and python_version < "4.0" urllib3==2.0.4 ; python_version >= "3.9" and python_version < "4.0" wrapt==1.15.0 ; python_version >= "3.9" and python_version < "4.0" yte==1.5.1 ; python_version >= "3.9" and python_version < "4.0" diff --git a/test_fishr.sh b/test_fishr.sh deleted file mode 100644 index 72bf16311..000000000 --- a/test_fishr.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -export CUDA_VISIBLE_DEVICES="" -# although garbage collector has been explicitly called, sometimes there is still CUDA out of memory error -# so it is better not to use GPU to do the pytest to ensure every time there is no CUDA out of memory error occuring -pytest tests/test_fishr.py diff --git a/tests/test_fishr.py b/tests/test_fishr.py deleted file mode 100644 index 276058b5e..000000000 --- a/tests/test_fishr.py +++ /dev/null @@ -1,12 +0,0 @@ -""" -unit and end-end test for deep all, mldg -""" -from tests.utils_test import utils_test_algo - - -def test_deepall_mldg(): - """ - train DeepAll with MLDG - """ - args = "--te_d=caltech --task=mini_vlcs --debug --bs=2 --aname=deepall --trainer=fishr --nname=alexnet" - utils_test_algo(args)