We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent b5b77e4 commit d4571d1Copy full SHA for d4571d1
pytorch_lightning/trainer/training_tricks_mixin.py
@@ -13,7 +13,7 @@ def clip_gradients(self):
13
def print_nan_gradients(self):
14
model = self.get_model()
15
for param in model.parameters():
16
- if torch.isnan(param.grad.float()).any():
+ if (param.grad is not None) and torch.isnan(param.grad.float()).any():
17
logging.info(param, param.grad)
18
19
def configure_accumulated_gradients(self, accumulate_grad_batches):
0 commit comments