Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Apr 8, 2024
1 parent 2402e66 commit 8cdfcb2
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,15 @@

try:
from megatron.core import mpu, tensor_parallel
from megatron.core.datasets.retro.config import RetroGPTChunkDatasets
from megatron.core.models.retro import RetroConfig
from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder
from megatron.core.datasets.retro.config import RetroGPTChunkDatasets
from megatron.core.datasets.retro.query.multi_split_gpt_dataset import (
MultiSplitGPTDataset,
MultiSplitGPTDatasetConfig,
)
from megatron.core.datasets.retro.query.retro_dataset import get_retro_datasets
from megatron.core.models.retro import RetroConfig

from nemo.collections.nlp.modules.common.megatron.utils import get_ltor_masks_and_position_ids

HAVE_MEGATRON_CORE = True
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,15 @@
import json
import os
import queue
import warnings
import types
import warnings
from dataclasses import fields
from functools import partial
from typing import Any, Dict, Iterator, List, Optional, Union

import torch
from omegaconf import OmegaConf
from omegaconf import OmegaConf, open_dict
from omegaconf.dictconfig import DictConfig
from omegaconf import open_dict
from pytorch_lightning.accelerators import CPUAccelerator
from pytorch_lightning.trainer.trainer import Trainer

Expand Down Expand Up @@ -83,14 +82,12 @@
from megatron.core.models.retro import RetroModel as MCoreRetroModel
from megatron.core.models.retro.config import RetroConfig
from megatron.core.models.retro.decoder_spec import get_retro_decoder_block_spec
from megatron.core.models.retro.utils import get_config_path as get_retro_config_path
from megatron.core.models.retro.utils import get_gpt_data_dir as get_retro_data_dir
from megatron.core.pipeline_parallel.schedules import get_forward_backward_func
from megatron.core.transformer.module import Float16Module as MCoreFloat16Module
from megatron.core.transformer.transformer_config import TransformerConfig
from megatron.core.utils import init_method_normal, scaled_init_method_normal
from megatron.core.models.retro.utils import (
get_config_path as get_retro_config_path,
get_gpt_data_dir as get_retro_data_dir,
)

# TODO @tmoon: Use once available in Megatron-LM
# from megatron.core.pipeline_parallel.schedules import DataIteratorList
Expand Down Expand Up @@ -419,6 +416,7 @@ def build_retro_config(self) -> RetroConfig:

# Validate Transformer Engine version.
from importlib.metadata import version

from pkg_resources import packaging

te_version = packaging.version.Version(version("transformer-engine"))
Expand Down

0 comments on commit 8cdfcb2

Please sign in to comment.