Skip to content

Commit c74b8e6

Browse files
committed
change default logger to dedicated one
Fix test Fix format
1 parent 774d9be commit c74b8e6

19 files changed

+28
-24
lines changed

pl_examples/basic_examples/lightning_module_template.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
"""
22
Example template for defining a system
33
"""
4-
import logging as log
54
import os
65
from argparse import ArgumentParser
76
from collections import OrderedDict
@@ -14,6 +13,7 @@
1413
from torch.utils.data import DataLoader
1514
from torchvision.datasets import MNIST
1615

16+
from pytorch_lightning import _logger as log
1717
from pytorch_lightning.core import LightningModule
1818

1919

pytorch_lightning/__init__.py

+3
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@
2424
# We are not importing the rest of the scikit during the build
2525
# process, as it may not be compiled yet
2626
else:
27+
import logging
28+
_logger = logging.getLogger("lightning")
29+
2730
from .core import LightningModule
2831
from .trainer import Trainer
2932
from .callbacks import Callback

pytorch_lightning/callbacks/early_stopping.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,12 @@
55
66
"""
77

8-
import logging as log
98
import warnings
109

1110
import numpy as np
1211

1312
from .base import Callback
13+
from pytorch_lightning import _logger as log
1414

1515

1616
class EarlyStopping(Callback):

pytorch_lightning/callbacks/model_checkpoint.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
Automatically save model checkpoints during training.
66
"""
77

8-
import logging as log
98
import os
109
import shutil
1110
import warnings
@@ -14,6 +13,7 @@
1413
import numpy as np
1514

1615
from pytorch_lightning.callbacks.base import Callback
16+
from pytorch_lightning import _logger as log
1717

1818

1919
class ModelCheckpoint(Callback):

pytorch_lightning/core/lightning.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import collections
22
import inspect
3-
import logging as log
43
import os
54
import warnings
65
from abc import ABC, abstractmethod
@@ -15,6 +14,7 @@
1514
from torch.optim.optimizer import Optimizer
1615
from torch.utils.data import DataLoader
1716

17+
from pytorch_lightning import _logger as log
1818
from pytorch_lightning.core.grads import GradInformation
1919
from pytorch_lightning.core.hooks import ModelHooks
2020
from pytorch_lightning.core.memory import ModelSummary

pytorch_lightning/core/memory.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
"""
44

55
import gc
6-
import logging as log
76
import os
87
import subprocess
98
from subprocess import PIPE
@@ -15,6 +14,8 @@
1514

1615
import pytorch_lightning as pl
1716

17+
from pytorch_lightning import _logger as log
18+
1819

1920
class ModelSummary(object):
2021

pytorch_lightning/core/saving.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
import csv
2-
import logging as log
32
import os
43
from argparse import Namespace
54
from typing import Union, Dict, Any
65

6+
from pytorch_lightning import _logger as log
7+
78

89
class ModelIO(object):
910

pytorch_lightning/loggers/comet.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
-------------
77
"""
88

9-
import logging as log
109
from argparse import Namespace
1110
from typing import Optional, Dict, Union, Any
1211

@@ -27,8 +26,9 @@
2726
import torch
2827
from torch import is_tensor
2928

29+
from pytorch_lightning import _logger as log
30+
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only
3031
from pytorch_lightning.utilities.debugging import MisconfigurationException
31-
from .base import LightningLoggerBase, rank_zero_only
3232

3333

3434
class CometLogger(LightningLoggerBase):

pytorch_lightning/loggers/mlflow.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ def any_lightning_module_function_or_hook(...):
2323
self.logger.experiment.whatever_ml_flow_supports(...)
2424
2525
"""
26-
import logging as log
2726
from argparse import Namespace
2827
from time import time
2928
from typing import Optional, Dict, Any, Union
@@ -34,7 +33,8 @@ def any_lightning_module_function_or_hook(...):
3433
raise ImportError('You want to use `mlflow` logger which is not installed yet,'
3534
' install it with `pip install mlflow`.')
3635

37-
from .base import LightningLoggerBase, rank_zero_only
36+
from pytorch_lightning import _logger as log
37+
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only
3838

3939

4040
class MLFlowLogger(LightningLoggerBase):

pytorch_lightning/loggers/neptune.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
NeptuneLogger
77
--------------
88
"""
9-
import logging as log
109
from argparse import Namespace
1110
from typing import Optional, List, Dict, Any, Union, Iterable
1211

@@ -20,6 +19,7 @@
2019
import torch
2120
from torch import is_tensor
2221

22+
from pytorch_lightning import _logger as log
2323
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only
2424

2525

pytorch_lightning/loggers/trains.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ def any_lightning_module_function_or_hook(...):
2424
2525
"""
2626

27-
import logging as log
2827
from argparse import Namespace
2928
from pathlib import Path
3029
from typing import Any, Dict, Optional, Union
@@ -38,7 +37,8 @@ def any_lightning_module_function_or_hook(...):
3837
raise ImportError('You want to use `TRAINS` logger which is not installed yet,'
3938
' install it with `pip install trains`.')
4039

41-
from .base import LightningLoggerBase, rank_zero_only
40+
from pytorch_lightning import _logger as log
41+
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only
4242

4343

4444
class TrainsLogger(LightningLoggerBase):

pytorch_lightning/profiler/profiler.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
import cProfile
22
import io
3-
import logging as log
43
import pstats
54
import time
65
from abc import ABC, abstractmethod
76
from collections import defaultdict
87
from contextlib import contextmanager
8+
from pytorch_lightning import _logger as log
99

1010
import numpy as np
1111

pytorch_lightning/trainer/auto_mix_precision.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11

22
from abc import ABC
33

4+
from pytorch_lightning import _logger as log
5+
46
try:
57
from apex import amp
68

79
APEX_AVAILABLE = True
810
except ImportError:
911
APEX_AVAILABLE = False
10-
import logging as log
1112

1213

1314
class TrainerAMPMixin(ABC):

pytorch_lightning/trainer/distrib_data_parallel.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -113,15 +113,14 @@ def train_fx(trial_hparams, cluster_manager, _):
113113
114114
"""
115115

116-
import logging as log
117116
import os
118117
import re
119118
import warnings
120119
from abc import ABC, abstractmethod
121120
from typing import Union
122121

123122
import torch
124-
123+
from pytorch_lightning import _logger as log
125124
from pytorch_lightning.loggers import LightningLoggerBase
126125
from pytorch_lightning.utilities.debugging import MisconfigurationException
127126

pytorch_lightning/trainer/distrib_parts.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -334,12 +334,12 @@
334334
335335
"""
336336

337-
import logging as log
338337
import os
339338
from abc import ABC, abstractmethod
340339

341340
import torch
342341

342+
from pytorch_lightning import _logger as log
343343
from pytorch_lightning.overrides.data_parallel import (
344344
LightningDistributedDataParallel,
345345
LightningDataParallel,

pytorch_lightning/trainer/trainer.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import inspect
2-
import logging as log
32
import os
43
import sys
54
import warnings
@@ -14,8 +13,8 @@
1413
from torch.utils.data import DataLoader
1514
from tqdm.auto import tqdm
1615

17-
from pytorch_lightning.callbacks import Callback
18-
from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping
16+
from pytorch_lightning import _logger as log
17+
from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping, Callback
1918
from pytorch_lightning.loggers import LightningLoggerBase
2019
from pytorch_lightning.profiler import Profiler, PassThroughProfiler
2120
from pytorch_lightning.profiler.profiler import BaseProfiler

pytorch_lightning/trainer/training_io.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@
8989
9090
"""
9191

92-
import logging as log
9392
import os
9493
import re
9594
import signal
@@ -102,6 +101,7 @@
102101
import torch
103102
import torch.distributed as dist
104103

104+
from pytorch_lightning import _logger as log
105105
from pytorch_lightning.core.lightning import LightningModule
106106
from pytorch_lightning.loggers import LightningLoggerBase
107107
from pytorch_lightning.overrides.data_parallel import (

pytorch_lightning/trainer/training_loop.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,6 @@ def training_step(self, batch, batch_idx):
122122
"""
123123

124124
import copy
125-
import logging as log
126125
import warnings
127126
from abc import ABC, abstractmethod
128127
from typing import Callable
@@ -131,6 +130,7 @@ def training_step(self, batch, batch_idx):
131130
import numpy as np
132131
from torch.utils.data import DataLoader
133132

133+
from pytorch_lightning import _logger as log
134134
from pytorch_lightning.callbacks.base import Callback
135135
from pytorch_lightning.core.lightning import LightningModule
136136
from pytorch_lightning.loggers import LightningLoggerBase

pytorch_lightning/trainer/training_tricks.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
1-
import logging as log
21
import math
32
from abc import ABC, abstractmethod
43

54
import torch
65

6+
from pytorch_lightning import _logger as log
77
from pytorch_lightning.callbacks import GradientAccumulationScheduler
88

99
EPSILON = 1e-6

0 commit comments

Comments
 (0)