Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

rename logging -> loggers #767

Merged
merged 6 commits into from
Feb 1, 2020
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
113 changes: 113 additions & 0 deletions pytorch_lightning/loggers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
"""
Lightning supports most popular logging frameworks (Tensorboard, comet, weights and biases, etc...).
To use a logger, simply pass it into the trainer.

.. code-block:: python

from pytorch_lightning import loggers

# lightning uses tensorboard by default
tb_logger = loggers.TensorBoardLogger()
trainer = Trainer(logger=tb_logger)

# or choose from any of the others such as MLFlow, Comet, Neptune, Wandb
comet_logger = loggers.CometLogger()
trainer = Trainer(logger=comet_logger)

.. note:: All loggers log by default to `os.getcwd()`. To change the path without creating a logger set
Trainer(default_save_path='/your/path/to/save/checkpoints')

Custom logger
-------------

You can implement your own logger by writing a class that inherits from
`LightningLoggerBase`. Use the `rank_zero_only` decorator to make sure that
only the first process in DDP training logs data.

.. code-block:: python

from pytorch_lightning.loggers import LightningLoggerBase, rank_zero_only

class MyLogger(LightningLoggerBase):

@rank_zero_only
def log_hyperparams(self, params):
# params is an argparse.Namespace
# your code to record hyperparameters goes here
pass

@rank_zero_only
def log_metrics(self, metrics, step):
# metrics is a dictionary of metric names and values
# your code to record metrics goes here
pass

def save(self):
# Optional. Any code necessary to save logger data goes here
pass

@rank_zero_only
def finalize(self, status):
# Optional. Any code that needs to be run after training
# finishes goes here


If you write a logger than may be useful to others, please send
a pull request to add it to Lighting!

Using loggers
-------------

Call the logger anywhere from your LightningModule by doing:

.. code-block:: python

def train_step(...):
# example
self.logger.experiment.whatever_method_summary_writer_supports(...)

def any_lightning_module_function_or_hook(...):
self.logger.experiment.add_histogram(...)

Supported Loggers
-----------------
"""
from os import environ

from .base import LightningLoggerBase, rank_zero_only
from .tensorboard import TensorBoardLogger

__all__ = ['TensorBoardLogger']

try:
# needed to prevent ImportError and duplicated logs.
environ["COMET_DISABLE_AUTO_LOGGING"] = "1"

from .comet import CometLogger
__all__.append('CometLogger')
except ImportError:
del environ["COMET_DISABLE_AUTO_LOGGING"]

try:
from .mlflow import MLFlowLogger
__all__.append('MLFlowLogger')
except ImportError:
pass

try:
from .neptune import NeptuneLogger
__all__.append('NeptuneLogger')
except ImportError:
pass

try:
from .test_tube import TestTubeLogger
__all__.append('TestTubeLogger')
except ImportError:
pass

try:
from .wandb import WandbLogger
__all__.append('WandbLogger')
except ImportError:
pass
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(self, api_key=None, save_dir=None, workspace=None,
.. code-block:: python

# ONLINE MODE
from pytorch_lightning.logging import CometLogger
from pytorch_lightning.loggers import CometLogger

# arguments made to CometLogger are passed on to the comet_ml.Experiment class
comet_logger = CometLogger(
Expand All @@ -47,7 +47,7 @@ def __init__(self, api_key=None, save_dir=None, workspace=None,
.. code-block:: python

# OFFLINE MODE
from pytorch_lightning.logging import CometLogger
from pytorch_lightning.loggers import CometLogger

# arguments made to CometLogger are passed on to the comet_ml.Experiment class
comet_logger = CometLogger(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

.. code-block:: python

from pytorch_lightning.logging import MLFlowLogger
from pytorch_lightning.loggers import MLFlowLogger
mlf_logger = MLFlowLogger(
experiment_name="default",
tracking_uri="file:/."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

.. code-block:: python

from pytorch_lightning.logging import NeptuneLogger
from pytorch_lightning.loggers import NeptuneLogger
# arguments made to NeptuneLogger are passed on to the neptune.experiments.Experiment class

neptune_logger = NeptuneLogger(
Expand Down Expand Up @@ -66,7 +66,7 @@ def __init__(self, api_key=None, project_name=None, offline_mode=False,
.. code-block:: python

# ONLINE MODE
from pytorch_lightning.logging import NeptuneLogger
from pytorch_lightning.loggers import NeptuneLogger
# arguments made to NeptuneLogger are passed on to the neptune.experiments.Experiment class

neptune_logger = NeptuneLogger(
Expand All @@ -81,7 +81,7 @@ def __init__(self, api_key=None, project_name=None, offline_mode=False,
.. code-block:: python

# OFFLINE MODE
from pytorch_lightning.logging import NeptuneLogger
from pytorch_lightning.loggers import NeptuneLogger
# arguments made to NeptuneLogger are passed on to the neptune.experiments.Experiment class

neptune_logger = NeptuneLogger(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

.. code-block:: python

from pytorch_lightning.logging import TestTubeLogger
from pytorch_lightning.loggers import TestTubeLogger
tt_logger = TestTubeLogger(
save_dir=".",
name="default",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class WandbLogger(LightningLoggerBase):
--------
.. code-block:: python

from pytorch_lightning.logging import WandbLogger
from pytorch_lightning.loggers import WandbLogger
from pytorch_lightning import Trainer

wandb_logger = WandbLogger()
Expand Down
115 changes: 5 additions & 110 deletions pytorch_lightning/logging/__init__.py
Original file line number Diff line number Diff line change
@@ -1,115 +1,10 @@
"""
Lightning supports most popular logging frameworks (Tensorboard, comet, weights and biases, etc...).
To use a logger, simply pass it into the trainer.

.. code-block:: python

from pytorch_lightning import logging

# lightning uses tensorboard by default
tb_logger = logging.TensorBoardLogger()
trainer = Trainer(logger=tb_logger)

# or choose from any of the others such as MLFlow, Comet, Neptune, Wandb
comet_logger = logging.CometLogger()
trainer = Trainer(logger=comet_logger)

.. note:: All loggers log by default to `os.getcwd()`. To change the path without creating a logger set
Trainer(default_save_path='/your/path/to/save/checkpoints')

Custom logger
-------------

You can implement your own logger by writing a class that inherits from
`LightningLoggerBase`. Use the `rank_zero_only` decorator to make sure that
only the first process in DDP training logs data.

.. code-block:: python

from pytorch_lightning.logging import LightningLoggerBase, rank_zero_only

class MyLogger(LightningLoggerBase):

@rank_zero_only
def log_hyperparams(self, params):
# params is an argparse.Namespace
# your code to record hyperparameters goes here
pass

@rank_zero_only
def log_metrics(self, metrics, step):
# metrics is a dictionary of metric names and values
# your code to record metrics goes here
pass

def save(self):
# Optional. Any code necessary to save logger data goes here
pass

@rank_zero_only
def finalize(self, status):
# Optional. Any code that needs to be run after training
# finishes goes here


If you write a logger than may be useful to others, please send
a pull request to add it to Lighting!

Using loggers
-------------

Call the logger anywhere from your LightningModule by doing:

.. code-block:: python

def train_step(...):
# example
self.logger.experiment.whatever_method_summary_writer_supports(...)

def any_lightning_module_function_or_hook(...):
self.logger.experiment.add_histogram(...)

Supported Loggers
-----------------
.. warning:: `logging` package has been renamed to `loggers` since v0.6.1 and will be removed in v0.8.0
"""
from os import environ

from .base import LightningLoggerBase, rank_zero_only
from .tensorboard import TensorBoardLogger

loggers = ['TensorBoardLogger']

try:
# needed to prevent ImportError and duplicated logs.
environ["COMET_DISABLE_AUTO_LOGGING"] = "1"

from .comet import CometLogger
loggers.append('CometLogger')
except ImportError:
del environ["COMET_DISABLE_AUTO_LOGGING"]

try:
from .mlflow import MLFlowLogger
loggers.append('MLFlowLogger')
except ImportError:
pass

try:
from .neptune import NeptuneLogger
loggers.append('NeptuneLogger')
except ImportError:
pass

try:
from .test_tube import TestTubeLogger
loggers.append('TestTubeLogger')
except ImportError:
pass
import warnings

try:
from .wandb import WandbLogger
loggers.append('WandbLogger')
except ImportError:
pass
warnings.warn("`logging` package has been renamed to `loggers` since v0.6.1"
" and will be removed in v0.8.0", DeprecationWarning)

__all__ = loggers
from pytorch_lightning.loggers import * # noqa: E402
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def __init__(
logger (:class:`.Logger`): Logger for experiment tracking.
Example::

from pytorch_lightning.logging import TensorBoardLogger
from pytorch_lightning.loggers import TensorBoardLogger

# default logger used by trainer
logger = TensorBoardLogger(
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/training_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
.. code-block:: python

from pytorch_lightning import Trainer
from pytorch_lightning.logging import TestTubeLogger
from pytorch_lightning.loggers import TestTubeLogger

logger = TestTubeLogger(
save_dir='./savepath',
Expand Down