Skip to content

Commit

Permalink
ci imports fix
Browse files Browse the repository at this point in the history
Signed-off-by: dimapihtar <[email protected]>
  • Loading branch information
dimapihtar committed Jan 30, 2024
1 parent 608197b commit 95104f9
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 13 deletions.
2 changes: 1 addition & 1 deletion nemo/collections/nlp/parts/nlp_overrides.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,14 +60,14 @@
from nemo.collections.nlp.parts import utils_funcs
from nemo.core.connectors.save_restore_connector import SaveRestoreConnector
from nemo.core.optim import MainParamsOptimizerWrapper
from nemo.core.optim.distributed_adam import MegatronDistributedFusedAdam
from nemo.core.optim.optimizers import init_optimizer_states
from nemo.utils import AppState, logging
from nemo.utils.get_rank import is_global_rank_zero
from nemo.utils.model_utils import ckpt_to_dir, inject_model_parallel_rank, uninject_model_parallel_rank

try:
from apex.transformer.pipeline_parallel.utils import get_num_microbatches
from nemo.core.optim.distributed_adam import MegatronDistributedFusedAdam

HAVE_APEX = True

Expand Down
17 changes: 5 additions & 12 deletions nemo/core/optim/distributed_adam.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,11 @@

from nemo.utils import str_to_dtype

try:
from apex.contrib.optimizers.distributed_fused_adam import (
DistributedFusedAdam,
_disable_pre_forward_hook,
_multi_tensor_copy,
)

HAVE_APEX = True

except (ImportError, ModuleNotFoundError):

HAVE_APEX = False
from apex.contrib.optimizers.distributed_fused_adam import (
DistributedFusedAdam,
_disable_pre_forward_hook,
_multi_tensor_copy,
)

# Check if Transformer Engine has FP8 tensor class
HAVE_TE_FP8TENSOR = False
Expand Down

0 comments on commit 95104f9

Please sign in to comment.