From 15e91ca54f10f57274ce17c4f5b3014fd084ddc5 Mon Sep 17 00:00:00 2001 From: Onuralp SEZER Date: Tue, 4 Feb 2025 18:22:27 +0300 Subject: [PATCH 1/3] =?UTF-8?q?feat(logging):=20=F0=9F=94=8A=20add=20loggi?= =?UTF-8?q?ng=20configuration=20for=20PyTorch=20Lightning=20and=20Transfor?= =?UTF-8?q?mers?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Onuralp SEZER --- maestro/trainer/__init__.py | 6 ++++ maestro/trainer/logger.py | 63 +++++++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+) create mode 100644 maestro/trainer/logger.py diff --git a/maestro/trainer/__init__.py b/maestro/trainer/__init__.py index e69de29..f335890 100644 --- a/maestro/trainer/__init__.py +++ b/maestro/trainer/__init__.py @@ -0,0 +1,6 @@ +from .logger import configure_logging, set_lightning_logging, set_transformers_progress + +# Configure default logging settings on import +configure_logging() + +__all__ = ["set_lightning_logging", "set_transformers_progress"] diff --git a/maestro/trainer/logger.py b/maestro/trainer/logger.py new file mode 100644 index 0000000..f2bd205 --- /dev/null +++ b/maestro/trainer/logger.py @@ -0,0 +1,63 @@ +import logging +import os + +from transformers.utils.logging import disable_progress_bar, enable_progress_bar + + +def configure_logging() -> None: + """Configure global logging settings for PyTorch Lightning and Transformers. + + Sets up logging based on environment variables: + MAESTRO_LIGHTNING_LOG_LEVEL: Sets Lightning's logging level (default: "INFO") + MAESTRO_TRANSFORMERS_PROGRESS: Controls Transformers progress bar (1=enabled, 0=disabled) + + Example: + import os + from maestro.trainer import configure_logging + os.environ["MAESTRO_LIGHTNING_LOG_LEVEL"] = "DEBUG" + os.environ["MAESTRO_TRANSFORMERS_PROGRESS"] = "1" + configure_logging() + """ + lightning_level = os.getenv("MAESTRO_LIGHTNING_LOG_LEVEL", "INFO") + set_lightning_logging(lightning_level) + + if os.getenv("MAESTRO_TRANSFORMERS_PROGRESS", "") == "1": + set_transformers_progress(True) + else: + set_transformers_progress(False) + + +def set_lightning_logging(level: str) -> None: + """Set PyTorch Lightning logging level while preserving transformers state. + + Args: + level (str): Logging level (e.g., "INFO", "DEBUG", "WARNING", "ERROR") + + Example: + from maestro.trainer import set_lightning_logging + set_lightning_logging("DEBUG") + """ + pytorch_lightning_logging = logging.getLogger("pytorch_lightning") + cuda_log = logging.getLogger("lightning.pytorch.accelerators.cuda") + rank_zero = logging.getLogger("lightning.pytorch.utilities.rank_zero") + + pytorch_lightning_logging.setLevel(getattr(logging, level)) + cuda_log.setLevel(getattr(logging, level)) + rank_zero.setLevel(getattr(logging, level)) + + +def set_transformers_progress(status: bool) -> None: + """Control visibility of Transformers progress bars. + + Args: + status (bool): True to enable progress bars, False to disable + + Example: + from maestro.trainer import set_transformers_progress + set_transformers_progress(True) # Enable progress bars + set_transformers_progress(False) # Disable progress bars + """ + if status: + enable_progress_bar() + else: + disable_progress_bar() From c82738afdb2f028256b90f06c129743f70491939 Mon Sep 17 00:00:00 2001 From: Onuralp SEZER Date: Tue, 4 Feb 2025 22:31:13 +0300 Subject: [PATCH 2/3] =?UTF-8?q?fix(logging):=20=F0=9F=90=9E=20correct=20lo?= =?UTF-8?q?gger=20variable=20names=20and=20set=20logging=20levels=20for=20?= =?UTF-8?q?clarity?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Onuralp SEZER --- maestro/trainer/logger.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/maestro/trainer/logger.py b/maestro/trainer/logger.py index f2bd205..0520b59 100644 --- a/maestro/trainer/logger.py +++ b/maestro/trainer/logger.py @@ -37,13 +37,16 @@ def set_lightning_logging(level: str) -> None: from maestro.trainer import set_lightning_logging set_lightning_logging("DEBUG") """ + + lightning_logging = logging.getLogger("lightning") pytorch_lightning_logging = logging.getLogger("pytorch_lightning") - cuda_log = logging.getLogger("lightning.pytorch.accelerators.cuda") - rank_zero = logging.getLogger("lightning.pytorch.utilities.rank_zero") + cuda_logging = logging.getLogger("lightning.pytorch.accelerators.cuda") + rank_zero_logging = logging.getLogger("lightning.pytorch.utilities.rank_zero") + lightning_logging.setLevel(getattr(logging, level)) pytorch_lightning_logging.setLevel(getattr(logging, level)) - cuda_log.setLevel(getattr(logging, level)) - rank_zero.setLevel(getattr(logging, level)) + cuda_logging.setLevel(getattr(logging, level)) + rank_zero_logging.setLevel(getattr(logging, level)) def set_transformers_progress(status: bool) -> None: From 380f7d5426fb3ef299407972e4cf92cb34697892 Mon Sep 17 00:00:00 2001 From: Onuralp SEZER Date: Tue, 4 Feb 2025 22:33:46 +0300 Subject: [PATCH 3/3] =?UTF-8?q?fix(logging):=20=F0=9F=90=9E=20reorder=20im?= =?UTF-8?q?port=20statement=20for=20configure=5Flogging=20in=20example=20u?= =?UTF-8?q?sage?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Onuralp SEZER --- maestro/trainer/logger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/maestro/trainer/logger.py b/maestro/trainer/logger.py index 0520b59..3171427 100644 --- a/maestro/trainer/logger.py +++ b/maestro/trainer/logger.py @@ -13,9 +13,9 @@ def configure_logging() -> None: Example: import os - from maestro.trainer import configure_logging os.environ["MAESTRO_LIGHTNING_LOG_LEVEL"] = "DEBUG" os.environ["MAESTRO_TRANSFORMERS_PROGRESS"] = "1" + from maestro.trainer import configure_logging configure_logging() """ lightning_level = os.getenv("MAESTRO_LIGHTNING_LOG_LEVEL", "INFO")