Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

print NaN loss when labeled data is not found #3047

Merged
merged 2 commits into from
Dec 12, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 8 additions & 4 deletions deepmd/loss/dos.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,16 +143,20 @@
more_loss = {}
if self.has_dos:
l2_loss += atom_norm_ener * (pref_dos * l2_dos_loss)
more_loss["l2_dos_loss"] = l2_dos_loss
more_loss["l2_dos_loss"] = self.display_if_exist(l2_dos_loss, find_dos)

Check warning on line 146 in deepmd/loss/dos.py

View check run for this annotation

Codecov / codecov/patch

deepmd/loss/dos.py#L146

Added line #L146 was not covered by tests
if self.has_cdf:
l2_loss += atom_norm_ener * (pref_cdf * l2_cdf_loss)
more_loss["l2_cdf_loss"] = l2_cdf_loss
more_loss["l2_cdf_loss"] = self.display_if_exist(l2_cdf_loss, find_dos)

Check warning on line 149 in deepmd/loss/dos.py

View check run for this annotation

Codecov / codecov/patch

deepmd/loss/dos.py#L149

Added line #L149 was not covered by tests
if self.has_ados:
l2_loss += global_cvt_2_ener_float(pref_ados * l2_atom_dos_loss)
more_loss["l2_atom_dos_loss"] = l2_atom_dos_loss
more_loss["l2_atom_dos_loss"] = self.display_if_exist(

Check warning on line 152 in deepmd/loss/dos.py

View check run for this annotation

Codecov / codecov/patch

deepmd/loss/dos.py#L152

Added line #L152 was not covered by tests
l2_atom_dos_loss, find_atom_dos
)
if self.has_acdf:
l2_loss += global_cvt_2_ener_float(pref_acdf * l2_atom_cdf_loss)
more_loss["l2_atom_cdf_loss"] = l2_atom_cdf_loss
more_loss["l2_atom_cdf_loss"] = self.display_if_exist(

Check warning on line 157 in deepmd/loss/dos.py

View check run for this annotation

Codecov / codecov/patch

deepmd/loss/dos.py#L157

Added line #L157 was not covered by tests
l2_atom_cdf_loss, find_atom_dos
)

# only used when tensorboard was set as true
self.l2_loss_summary = tf.summary.scalar("l2_loss_" + suffix, tf.sqrt(l2_loss))
Expand Down
44 changes: 31 additions & 13 deletions deepmd/loss/ener.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,22 +291,32 @@
more_loss = {}
if self.has_e:
l2_loss += atom_norm_ener * (pref_e * l2_ener_loss)
more_loss["l2_ener_loss"] = l2_ener_loss
more_loss["l2_ener_loss"] = self.display_if_exist(l2_ener_loss, find_energy)
if self.has_f:
l2_loss += global_cvt_2_ener_float(pref_f * l2_force_loss)
more_loss["l2_force_loss"] = l2_force_loss
more_loss["l2_force_loss"] = self.display_if_exist(
l2_force_loss, find_force
)
if self.has_v:
l2_loss += global_cvt_2_ener_float(atom_norm * (pref_v * l2_virial_loss))
more_loss["l2_virial_loss"] = l2_virial_loss
more_loss["l2_virial_loss"] = self.display_if_exist(
l2_virial_loss, find_virial
)
if self.has_ae:
l2_loss += global_cvt_2_ener_float(pref_ae * l2_atom_ener_loss)
more_loss["l2_atom_ener_loss"] = l2_atom_ener_loss
more_loss["l2_atom_ener_loss"] = self.display_if_exist(

Check warning on line 307 in deepmd/loss/ener.py

View check run for this annotation

Codecov / codecov/patch

deepmd/loss/ener.py#L307

Added line #L307 was not covered by tests
l2_atom_ener_loss, find_atom_ener
)
if self.has_pf:
l2_loss += global_cvt_2_ener_float(pref_pf * l2_pref_force_loss)
more_loss["l2_pref_force_loss"] = l2_pref_force_loss
more_loss["l2_pref_force_loss"] = self.display_if_exist(

Check warning on line 312 in deepmd/loss/ener.py

View check run for this annotation

Codecov / codecov/patch

deepmd/loss/ener.py#L312

Added line #L312 was not covered by tests
l2_pref_force_loss, find_atom_pref
)
if self.has_gf:
l2_loss += global_cvt_2_ener_float(pref_gf * l2_gen_force_loss)
more_loss["l2_gen_force_loss"] = l2_gen_force_loss
more_loss["l2_gen_force_loss"] = self.display_if_exist(
l2_gen_force_loss, find_drdq
)

# only used when tensorboard was set as true
self.l2_loss_summary = tf.summary.scalar("l2_loss_" + suffix, tf.sqrt(l2_loss))
Expand Down Expand Up @@ -553,19 +563,25 @@
more_loss = {}
if self.has_e:
l2_loss += atom_norm_ener * (pref_e * l2_ener_loss)
more_loss["l2_ener_loss"] = l2_ener_loss
more_loss["l2_ener_loss"] = self.display_if_exist(l2_ener_loss, find_energy)
if self.has_fr:
l2_loss += global_cvt_2_ener_float(pref_fr * l2_force_r_loss)
more_loss["l2_force_r_loss"] = l2_force_r_loss
more_loss["l2_force_r_loss"] = self.display_if_exist(
l2_force_r_loss, find_force
)
if self.has_fm:
l2_loss += global_cvt_2_ener_float(pref_fm * l2_force_m_loss)
more_loss["l2_force_m_loss"] = l2_force_m_loss
more_loss["l2_force_m_loss"] = self.display_if_exist(
l2_force_m_loss, find_force
)
if self.has_v:
l2_loss += global_cvt_2_ener_float(atom_norm * (pref_v * l2_virial_loss))
more_loss["l2_virial_loss"] = l2_virial_loss
more_loss["l2_virial_loss"] = self.display_if_exist(l2_virial_loss, find_virial)
if self.has_ae:
l2_loss += global_cvt_2_ener_float(pref_ae * l2_atom_ener_loss)
more_loss["l2_atom_ener_loss"] = l2_atom_ener_loss
more_loss["l2_atom_ener_loss"] = self.display_if_exist(
l2_atom_ener_loss, find_atom_ener
)

# only used when tensorboard was set as true
self.l2_loss_summary = tf.summary.scalar("l2_loss", tf.sqrt(l2_loss))
Expand Down Expand Up @@ -785,8 +801,10 @@
more_loss = {}
l2_loss += atom_norm_ener * (pref_e * l2_ener_loss)
l2_loss += global_cvt_2_ener_float(pref_ed * l2_ener_dipole_loss)
more_loss["l2_ener_loss"] = l2_ener_loss
more_loss["l2_ener_dipole_loss"] = l2_ener_dipole_loss
more_loss["l2_ener_loss"] = self.display_if_exist(l2_ener_loss, find_energy)
more_loss["l2_ener_dipole_loss"] = self.display_if_exist(

Check warning on line 805 in deepmd/loss/ener.py

View check run for this annotation

Codecov / codecov/patch

deepmd/loss/ener.py#L804-L805

Added lines #L804 - L805 were not covered by tests
l2_ener_dipole_loss, find_ener_dipole
)

self.l2_loss_summary = tf.summary.scalar("l2_loss_" + suffix, tf.sqrt(l2_loss))
self.l2_loss_ener_summary = tf.summary.scalar(
Expand Down
19 changes: 19 additions & 0 deletions deepmd/loss/loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
Tuple,
)

import numpy as np

from deepmd.env import (
tf,
)
Expand Down Expand Up @@ -72,3 +74,20 @@ def eval(
A dictionary that maps keys to values. It
should contain key `natoms`
"""

@staticmethod
def display_if_exist(loss: tf.Tensor, find_property: float) -> tf.Tensor:
"""Display NaN if labeled property is not found.

Parameters
----------
loss : tf.Tensor
the loss tensor
find_property : float
whether the property is found
"""
return tf.cond(
tf.cast(find_property, tf.bool),
lambda: loss,
lambda: tf.cast(np.nan, dtype=loss.dtype),
)
4 changes: 2 additions & 2 deletions deepmd/loss/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@
local_loss = global_cvt_2_tf_float(find_atomic) * tf.reduce_mean(
tf.square(self.scale * (polar - atomic_polar_hat)), name="l2_" + suffix
)
more_loss["local_loss"] = local_loss
more_loss["local_loss"] = self.display_if_exist(local_loss, find_atomic)

Check warning on line 90 in deepmd/loss/tensor.py

View check run for this annotation

Codecov / codecov/patch

deepmd/loss/tensor.py#L90

Added line #L90 was not covered by tests
l2_loss += self.local_weight * local_loss
self.l2_loss_local_summary = tf.summary.scalar(
"l2_local_loss_" + suffix, tf.sqrt(more_loss["local_loss"])
Expand Down Expand Up @@ -118,7 +118,7 @@
tf.square(self.scale * (global_polar - polar_hat)), name="l2_" + suffix
)

more_loss["global_loss"] = global_loss
more_loss["global_loss"] = self.display_if_exist(global_loss, find_global)

Check warning on line 121 in deepmd/loss/tensor.py

View check run for this annotation

Codecov / codecov/patch

deepmd/loss/tensor.py#L121

Added line #L121 was not covered by tests
self.l2_loss_global_summary = tf.summary.scalar(
"l2_global_loss_" + suffix,
tf.sqrt(more_loss["global_loss"]) / global_cvt_2_tf_float(atoms),
Expand Down
Loading