Skip to content
This repository has been archived by the owner on Oct 16, 2023. It is now read-only.

fixed bugs with checkpoint path check #26

Merged
merged 1 commit into from
Apr 13, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions example/bert/bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from energon.nn import Linear1D_Col, Linear1D_Row, Classifier1D
from energon.nn import LayerNorm1D
from energon.nn import VocabParallelEmbedding1D
from energon.utils import get_current_device
from energon.utils import get_current_device, is_using_pp

__all__ = [
'BertEmbedding1D'
Expand Down Expand Up @@ -208,7 +208,7 @@ def __init__(self,
layernorm_epsilon=layernorm_epsilon,
dtype=dtype)
self.blocks = nn.ModuleList()
self.pp_rank = gpc.get_local_rank(ParallelMode.PIPELINE)
self.pp_rank = gpc.get_local_rank(ParallelMode.PIPELINE) if is_using_pp() else 0
for id_ in range(depth):
self.blocks.register_module("blk_{}".format(id_ + self.pp_rank * depth),
BertTransformerLayer1D(
Expand Down Expand Up @@ -310,8 +310,9 @@ def _create_bert_pipeline_model(depth=48, num_chunks=1, layer_partitions=None, *

if "checkpoint" in model_kwargs.keys():
if model_kwargs["checkpoint"] is True:
assert "checkpoint_path" in model_kwargs.keys(), "You have to specify a file path to use checkpoint loading"
assert os.path.exists(model_kwargs["checkpoint_path"]), "Checkpoint file not found"
if gpc.get_global_rank() == 0:
assert "checkpoint_path" in model_kwargs.keys(), "You have to specify a file path to use checkpoint loading"
assert os.path.exists(model_kwargs["checkpoint_path"]), "Checkpoint file not found"
load_checkpoint(model_kwargs["checkpoint_path"], model, **model_kwargs)

logger.info(f'Rank{rank}/{pipeline_rank} model size in FP16 = {numel * 2 / 1e9} GB')
Expand Down
6 changes: 4 additions & 2 deletions example/gpt/gpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,8 +431,10 @@ def _create_gpt_pipeline_model(depth=48, num_chunks=1, layer_partitions=None, **
numel += param.numel()
if "checkpoint" in model_kwargs.keys():
if model_kwargs["checkpoint"] is True:
assert "checkpoint_path" in model_kwargs.keys(), "You have to specify a file path to use checkpoint loading"
assert os.path.exists(model_kwargs["checkpoint_path"]), "Checkpoint file not found"
if gpc.get_global_rank() == 0:
assert "checkpoint_path" in model_kwargs.keys(), "You have to specify a file path to use checkpoint loading"
print(model_kwargs["checkpoint_path"])
assert os.path.exists(model_kwargs["checkpoint_path"]), "Checkpoint file not found"
load_checkpoint(model_kwargs["checkpoint_path"], model, **model_kwargs)
logger.info(f'Rank{rank}/{pipeline_rank} model size = {numel * 2 / 1e9} GB')
return model
Expand Down