diff --git a/examples/llm/megatron_gpt_pretraining.py b/examples/llm/megatron_gpt_pretraining.py index bf36971d35d6..dd33f00c8b40 100644 --- a/examples/llm/megatron_gpt_pretraining.py +++ b/examples/llm/megatron_gpt_pretraining.py @@ -102,7 +102,7 @@ def get_args(): ) nemo_logger = NeMoLogger( - dir=args.experiment_dir, + log_dir=args.experiment_dir, ) train( diff --git a/nemo/collections/llm/recipes/log/default.py b/nemo/collections/llm/recipes/log/default.py index b59d549726c6..93bd9f9470fa 100644 --- a/nemo/collections/llm/recipes/log/default.py +++ b/nemo/collections/llm/recipes/log/default.py @@ -60,7 +60,7 @@ def default_log( name=name, tensorboard=tensorboard_logger, wandb=wandb_logger, - dir=dir, + log_dir=dir, ) diff --git a/nemo/collections/llm/tools/auto_configurator/core/base_config.py b/nemo/collections/llm/tools/auto_configurator/core/base_config.py index 9e7af4d5d189..a82823c71248 100644 --- a/nemo/collections/llm/tools/auto_configurator/core/base_config.py +++ b/nemo/collections/llm/tools/auto_configurator/core/base_config.py @@ -203,7 +203,7 @@ def get_logger(self) -> Config[nl.NeMoLogger]: ckpt=ckpt, tensorboard=tb_logger, wandb=None, - dir=self.config.path_to_logs, + log_dir=self.config.path_to_logs, ) def get_run_config(self) -> dict: diff --git a/nemo/lightning/nemo_logger.py b/nemo/lightning/nemo_logger.py index 8d8932edbb57..d611dccdcf5f 100644 --- a/nemo/lightning/nemo_logger.py +++ b/nemo/lightning/nemo_logger.py @@ -35,7 +35,7 @@ class NeMoLogger(IOMixin): Args: name (str): Name of the experiment. - dir (Optional[str]): Directory to save logs. + log_dir (Optional[str]): Directory to save logs. explicit_log_dir (Optional[str]): Explicit log directory. version (Optional[str]): Version of the experiment. use_datetime_version (bool): Whether to use datetime as version. @@ -56,7 +56,7 @@ class NeMoLogger(IOMixin): """ name: str = "default" - dir: Optional[str] = None + log_dir: Optional[str] = None explicit_log_dir: Optional[str] = None version: Optional[str] = None use_datetime_version: bool = True @@ -99,9 +99,9 @@ def setup(self, trainer: Union[pl.Trainer, fl.Fabric], resume_if_exists: bool = f"that was passed to nemo_logger container a logger, but update_logger_directory is False. This means " f"that the trainer's logger directory may not match with the explicit_log_dir." ) - if self.dir or self.version: + if self.log_dir or self.version: logging.error( - f"nemo logger received explicit_log_dir: {self.explicit_log_dir} and at least one of dir: {self.dir}, " + f"nemo logger received explicit_log_dir: {self.explicit_log_dir} and at least one of dir: {self.log_dir}, " f"or version: {self.version}. Please note that dir, name, and version will be ignored." ) if is_global_rank_zero() and Path(self.explicit_log_dir).exists(): @@ -110,8 +110,8 @@ def setup(self, trainer: Union[pl.Trainer, fl.Fabric], resume_if_exists: bool = else: # Default dir to ./nemo_experiments if None was passed - _dir = self.dir - if self.dir is None: + _dir = self.log_dir + if self.log_dir is None: _dir = str(Path.cwd() / "nemo_experiments") if not self.name: diff --git a/tests/collections/llm/auto_conf/test_base_configs.py b/tests/collections/llm/auto_conf/test_base_configs.py index 7c86d6758ad3..bfcf40315a63 100644 --- a/tests/collections/llm/auto_conf/test_base_configs.py +++ b/tests/collections/llm/auto_conf/test_base_configs.py @@ -118,7 +118,7 @@ def get_logger() -> run.Config[nl.NeMoLogger]: ckpt=ckpt, tensorboard=tb_logger, wandb=None, - dir="/", + log_dir="/", ) diff --git a/tests/collections/llm/gpt/model/megatron_ssm_pretraining.py b/tests/collections/llm/gpt/model/megatron_ssm_pretraining.py index 52daa21c2279..30284bb5b6f1 100644 --- a/tests/collections/llm/gpt/model/megatron_ssm_pretraining.py +++ b/tests/collections/llm/gpt/model/megatron_ssm_pretraining.py @@ -116,7 +116,7 @@ def get_args(): ) nemo_logger = NeMoLogger( - dir=args.experiment_dir, + log_dir=args.experiment_dir, ) train( diff --git a/tests/collections/llm/test_mnist_model_nemo2.py b/tests/collections/llm/test_mnist_model_nemo2.py index cd1d8c866b06..3f0b804e8bd6 100644 --- a/tests/collections/llm/test_mnist_model_nemo2.py +++ b/tests/collections/llm/test_mnist_model_nemo2.py @@ -509,7 +509,7 @@ def run_train_mnist_litautoencoder_with_megatron_strategy_single_gpu(): tb_logger = TensorBoardLogger(save_dir=str(save_dir), name=name) # Setup the logger and train the model nemo_logger = NeMoLogger( - dir=str(root_dir), # WARNING: passing a path in here results in mutating the Path class. + log_dir=str(root_dir), # WARNING: passing a path in here results in mutating the Path class. name=name, tensorboard=tb_logger, ckpt=checkpoint_callback, diff --git a/tests/collections/llm/test_mnist_model_nemo2_fsdp.py b/tests/collections/llm/test_mnist_model_nemo2_fsdp.py index 3ef0f14f10d8..8a6c1f993d28 100644 --- a/tests/collections/llm/test_mnist_model_nemo2_fsdp.py +++ b/tests/collections/llm/test_mnist_model_nemo2_fsdp.py @@ -531,7 +531,7 @@ def run_train_mnist_litautoencoder_with_fsdp_strategy_single_gpu(): tb_logger = TensorBoardLogger(save_dir=str(save_dir), name=name) # Setup the logger and train the model nemo_logger = NeMoLogger( - dir=str(root_dir), # WARNING: passing a path in here results in mutating the Path class. + log_dir=str(root_dir), # WARNING: passing a path in here results in mutating the Path class. name=name, tensorboard=tb_logger, ckpt=checkpoint_callback, diff --git a/tests/lightning/test_nemo_logger.py b/tests/lightning/test_nemo_logger.py index 387d3540930f..3f8f7a1e0bb8 100644 --- a/tests/lightning/test_nemo_logger.py +++ b/tests/lightning/test_nemo_logger.py @@ -180,7 +180,7 @@ def test_resume(self, trainer, tmp_path): logger = nl.NeMoLogger( name="default", - dir=str(tmp_path) + "/test_resume", + log_dir=str(tmp_path) + "/test_resume", version="version_0", use_datetime_version=False, )