Skip to content

Commit

Permalink
Apply isort and black reformatting
Browse files Browse the repository at this point in the history
Signed-off-by: yaoyu-33 <[email protected]>
  • Loading branch information
yaoyu-33 committed Nov 26, 2024
1 parent 9cdf6ae commit d9f9a4c
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 8 deletions.
7 changes: 3 additions & 4 deletions nemo/collections/vlm/recipes/llava15_13b.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import nemo_run as run
import pytorch_lightning as pl
import torch
from megatron.core.distributed import DistributedDataParallelConfig

from nemo import lightning as nl
from nemo.collections import llm, vlm
Expand All @@ -26,10 +27,8 @@
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
from nemo.collections.vlm.neva.data.mock import MockDataModule
from nemo.utils.exp_manager import TimingCallback
from nemo.lightning.pytorch.callbacks.megatron_comm_overlap import MegatronCommOverlapCallback

from megatron.core.distributed import DistributedDataParallelConfig
from nemo.utils.exp_manager import TimingCallback

NAME = "llava15_13b"

Expand Down Expand Up @@ -105,7 +104,7 @@ def finetune_recipe(
overlap_grad_reduce=True,
overlap_param_gather=True,
average_in_collective=True,
)
),
)

trainer = run.Config(
Expand Down
7 changes: 3 additions & 4 deletions nemo/collections/vlm/recipes/llava15_7b.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import nemo_run as run
import pytorch_lightning as pl
import torch
from megatron.core.distributed import DistributedDataParallelConfig

from nemo import lightning as nl
from nemo.collections import llm, vlm
Expand All @@ -26,10 +27,8 @@
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
from nemo.collections.vlm.neva.data.mock import MockDataModule
from nemo.utils.exp_manager import TimingCallback
from nemo.lightning.pytorch.callbacks.megatron_comm_overlap import MegatronCommOverlapCallback

from megatron.core.distributed import DistributedDataParallelConfig
from nemo.utils.exp_manager import TimingCallback

NAME = "llava15_7b"

Expand Down Expand Up @@ -105,7 +104,7 @@ def finetune_recipe(
overlap_grad_reduce=True,
overlap_param_gather=True,
average_in_collective=True,
)
),
)

trainer = run.Config(
Expand Down

0 comments on commit d9f9a4c

Please sign in to comment.