diff --git a/nequip/train/lightning.py b/nequip/train/lightning.py index b96a17e9..d6ce9ec5 100644 --- a/nequip/train/lightning.py +++ b/nequip/train/lightning.py @@ -4,11 +4,15 @@ from hydra.utils import instantiate from nequip.model import model_from_config from nequip.data import AtomicDataDict +from nequip.utils import RankedLogger from ._metrics_utils import gather_all_tensors import warnings from typing import Optional, Dict +logger = RankedLogger(__name__, rank_zero_only=True) + + # metrics are already synced before logging, but Lightning still sends a PossibleUserWarning about setting sync_dist=True in self.logdict() warnings.filterwarnings( "ignore", @@ -62,6 +66,7 @@ def __init__( super().__init__() self.save_hyperparameters() self.model = model_from_config(config=model, initialize=True) + logger.debug(f"Built Model Details:\n{str(self.model)}") self.optimizer_config = optimizer self.lr_scheduler_config = lr_scheduler diff --git a/nequip/utils/logger.py b/nequip/utils/logger.py index 02e4046c..205d26b3 100644 --- a/nequip/utils/logger.py +++ b/nequip/utils/logger.py @@ -1,5 +1,6 @@ """From https://github.com/ashleve/lightning-hydra-template""" +import os import logging from typing import Mapping, Optional @@ -26,6 +27,11 @@ def __init__( super().__init__(logger=logger, extra=extra) self.rank_zero_only = rank_zero_only + # get log level from the environment variable, default to 'INFO' + log_level = os.getenv("_NEQUIP_LOG_LEVEL", "INFO").upper() + log_level = getattr(logging, log_level, logging.INFO) + self.logger.setLevel(log_level) + def log( self, level: int, msg: str, rank: Optional[int] = None, *args, **kwargs ) -> None: