Skip to content

Commit

Permalink
[tests] adjust tolerance of adjoint tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Zymrael authored Aug 24, 2020
1 parent a5cd555 commit 145a2bf
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions test/test_adjoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def test_adjoint_autograd():
loss = nn.CrossEntropyLoss()(y_hat, y)
loss.backward()
bp_grad = torch.cat([p.grad.flatten() for p in model.parameters()])
assert (torch.abs(bp_grad - adj_grad) <= 1e-4).all(), f'Gradient error: {torch.abs(bp_grad - adj_grad).sum()}'
assert (torch.abs(bp_grad - adj_grad) <= 1e-3).all(), f'Gradient error: {torch.abs(bp_grad - adj_grad).sum()}'



Expand Down Expand Up @@ -74,4 +74,4 @@ def test_integral_adjoint_integral_autograd():
loss = loss.backward()
g_adjoint= deepcopy(x.grad)

assert torch.abs(g_autograd - g_adjoint).norm(dim=1, p=2).mean() < 1e-4
assert torch.abs(g_autograd - g_adjoint).norm(dim=1, p=2).mean() < 1e-3

0 comments on commit 145a2bf

Please sign in to comment.