Skip to content

Commit

Permalink
test with memory usage of exp_avg
Browse files Browse the repository at this point in the history
  • Loading branch information
youssef62 committed Dec 9, 2024
1 parent 57292cc commit ec9cfee
Showing 1 changed file with 9 additions and 24 deletions.
33 changes: 9 additions & 24 deletions test/test_optim.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
register_optimizer_step_post_hook,
register_optimizer_step_pre_hook,
)
from torch.profiler import profile, ProfilerActivity
from torch.testing._internal.common_cuda import TEST_MULTIGPU
from torch.testing._internal.common_device_type import (
instantiate_device_type_tests,
Expand Down Expand Up @@ -2182,32 +2181,18 @@ def test_less_mem_beta1_zero_adam(self, device, dtype, optim_info):
"betas" in optim_input.kwargs and optim_input.kwargs["betas"][0] == 0.0
)
if beta1_zero or optim_input.desc == "default":
activities = (
[ProfilerActivity.CUDA]
if device == "cuda"
else [ProfilerActivity.CPU]
)

with profile(
activities=activities, profile_memory=True, record_shapes=True
) as prof:
optim = optim_info.optim_cls(
model.parameters(), **optim_input.kwargs
)
optim.zero_grad()
output = model(inpt)
loss = output.sum()
loss.backward()
optim = optim_info.optim_cls(model.parameters(), **optim_input.kwargs)
optim.zero_grad()
output = model(inpt)
loss = output.sum()
loss.backward()

optim.step()
optim.step()

case_to_mem_usage["beta1-zero" if beta1_zero else "default"] = sum(
[
item.cuda_memory_usage
if device == "cuda"
else item.cpu_memory_usage
for item in prof.key_averages()
]
optim.state[p]["exp_avg"].element_size()
* optim.state[p]["exp_avg"].numel()
for p in model.parameters()
)

self.assertGreater(
Expand Down

0 comments on commit ec9cfee

Please sign in to comment.