You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
]
Traceback (most recent call last): | 0/1024 [00:00<?, ?it/s]
File "src/sparseml/transformers/question_answering.py", line 945, in
main()
File "/venv/lib/python3.8/site-packages/torch/distributed/elastic/multiprocessing/errors/init.py", line 346, in wrapper
return f(*args, **kwargs)
File "src/sparseml/transformers/question_answering.py", line 518, in main
train_result = trainer.train(resume_from_checkpoint=checkpoint)
File "/host/code/sparseml/src/sparseml/transformers/sparsification/trainer.py", line 793, in train
output = super().train(*args, **kwargs)
File "/venv/lib/python3.8/site-packages/transformers/trainer.py", line 1591, in train
return inner_training_loop(
File "/venv/lib/python3.8/site-packages/transformers/trainer.py", line 1971, in _inner_training_loop
self.optimizer.step()
File "/venv/lib/python3.8/site-packages/accelerate/optimizer.py", line 159, in step
self.scaler.step(self.optimizer, closure)
File "/venv/lib/python3.8/site-packages/torch/cuda/amp/grad_scaler.py", line 416, in step
retval = self._maybe_opt_step(optimizer, optimizer_state, *args, **kwargs)
File "/venv/lib/python3.8/site-packages/torch/cuda/amp/grad_scaler.py", line 315, in _maybe_opt_step
retval = optimizer.step(*args, **kwargs)
File "/venv/lib/python3.8/site-packages/accelerate/optimizer.py", line 214, in patched_step
return method(*args, **kwargs)
File "/venv/lib/python3.8/site-packages/torch/optim/lr_scheduler.py", line 68, in wrapper
return wrapped(*args, **kwargs)
File "/host/code/sparseml/src/sparseml/pytorch/optim/optimizer.py", line 165, in step
self._wrapper.step(closure)
File "/host/code/sparseml/src/sparseml/pytorch/optim/manager.py", line 173, in step
return self._perform_wrapped_step(*args, **kwargs)
File "/host/code/sparseml/src/sparseml/pytorch/optim/manager.py", line 210, in _perform_wrapped_step
self._wrapped_manager.update(
File "/host/code/sparseml/src/sparseml/pytorch/optim/manager.py", line 591, in update
mod.scheduled_update(module, optimizer, epoch, steps_per_epoch)
File "/host/code/sparseml/src/sparseml/pytorch/sparsification/modifier.py", line 624, in scheduled_update
self.update(module, optimizer, epoch=epoch, steps_per_epoch=steps_per_epoch)
File "/host/code/sparseml/src/sparseml/pytorch/sparsification/modifier.py", line 454, in wrapper
out = func(*args, **kwargs)
File "/host/code/sparseml/src/sparseml/pytorch/sparsification/pruning/modifier_pruning_base.py", line 354, in update
self.check_mask_update(module, epoch, steps_per_epoch)
File "/host/code/sparseml/src/sparseml/pytorch/sparsification/pruning/modifier_pruning_obs.py", line 277, in check_mask_update
self._collect_grad_samples(module, self._grad_sampler)
File "/host/code/sparseml/src/sparseml/pytorch/sparsification/pruning/modifier_pruning_obs.py", line 340, in _collect_grad_samples
for _ in grad_sampler.iter_module_backwards(module, self._num_grads):
File "/host/code/sparseml/src/sparseml/pytorch/utils/sparsification.py", line 246, in iter_module_backwards
for forward_args, forward_kwargs, loss_target in data_loader:
File "/host/code/sparseml/src/sparseml/transformers/sparsification/trainer.py", line 661, in _data_loader_builder
data_loader = type(default_loader)(**template)
TypeError: init() missing 1 required positional argument: 'dataset'
How to solve this error?
sparseml: 1.7.0
transformers: 4.34.1
The text was updated successfully, but these errors were encountered:
]
Traceback (most recent call last): | 0/1024 [00:00<?, ?it/s]
File "src/sparseml/transformers/question_answering.py", line 945, in
main()
File "/venv/lib/python3.8/site-packages/torch/distributed/elastic/multiprocessing/errors/init.py", line 346, in wrapper
return f(*args, **kwargs)
File "src/sparseml/transformers/question_answering.py", line 518, in main
train_result = trainer.train(resume_from_checkpoint=checkpoint)
File "/host/code/sparseml/src/sparseml/transformers/sparsification/trainer.py", line 793, in train
output = super().train(*args, **kwargs)
File "/venv/lib/python3.8/site-packages/transformers/trainer.py", line 1591, in train
return inner_training_loop(
File "/venv/lib/python3.8/site-packages/transformers/trainer.py", line 1971, in _inner_training_loop
self.optimizer.step()
File "/venv/lib/python3.8/site-packages/accelerate/optimizer.py", line 159, in step
self.scaler.step(self.optimizer, closure)
File "/venv/lib/python3.8/site-packages/torch/cuda/amp/grad_scaler.py", line 416, in step
retval = self._maybe_opt_step(optimizer, optimizer_state, *args, **kwargs)
File "/venv/lib/python3.8/site-packages/torch/cuda/amp/grad_scaler.py", line 315, in _maybe_opt_step
retval = optimizer.step(*args, **kwargs)
File "/venv/lib/python3.8/site-packages/accelerate/optimizer.py", line 214, in patched_step
return method(*args, **kwargs)
File "/venv/lib/python3.8/site-packages/torch/optim/lr_scheduler.py", line 68, in wrapper
return wrapped(*args, **kwargs)
File "/host/code/sparseml/src/sparseml/pytorch/optim/optimizer.py", line 165, in step
self._wrapper.step(closure)
File "/host/code/sparseml/src/sparseml/pytorch/optim/manager.py", line 173, in step
return self._perform_wrapped_step(*args, **kwargs)
File "/host/code/sparseml/src/sparseml/pytorch/optim/manager.py", line 210, in _perform_wrapped_step
self._wrapped_manager.update(
File "/host/code/sparseml/src/sparseml/pytorch/optim/manager.py", line 591, in update
mod.scheduled_update(module, optimizer, epoch, steps_per_epoch)
File "/host/code/sparseml/src/sparseml/pytorch/sparsification/modifier.py", line 624, in scheduled_update
self.update(module, optimizer, epoch=epoch, steps_per_epoch=steps_per_epoch)
File "/host/code/sparseml/src/sparseml/pytorch/sparsification/modifier.py", line 454, in wrapper
out = func(*args, **kwargs)
File "/host/code/sparseml/src/sparseml/pytorch/sparsification/pruning/modifier_pruning_base.py", line 354, in update
self.check_mask_update(module, epoch, steps_per_epoch)
File "/host/code/sparseml/src/sparseml/pytorch/sparsification/pruning/modifier_pruning_obs.py", line 277, in check_mask_update
self._collect_grad_samples(module, self._grad_sampler)
File "/host/code/sparseml/src/sparseml/pytorch/sparsification/pruning/modifier_pruning_obs.py", line 340, in _collect_grad_samples
for _ in grad_sampler.iter_module_backwards(module, self._num_grads):
File "/host/code/sparseml/src/sparseml/pytorch/utils/sparsification.py", line 246, in iter_module_backwards
for forward_args, forward_kwargs, loss_target in data_loader:
File "/host/code/sparseml/src/sparseml/transformers/sparsification/trainer.py", line 661, in _data_loader_builder
data_loader = type(default_loader)(**template)
TypeError: init() missing 1 required positional argument: 'dataset'
How to solve this error?
sparseml: 1.7.0
transformers: 4.34.1
The text was updated successfully, but these errors were encountered: