-
Notifications
You must be signed in to change notification settings - Fork 1
/
tb_logger.py
52 lines (42 loc) · 1.32 KB
/
tb_logger.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import torch.utils.tensorboard as tb
class DummyLogger:
def log_scalar(*args, **kwargs):
pass
def log_params(*args, **kwargs):
pass
def log_grad(*args, **kwargs):
pass
def train_end(*args, **kwargs):
pass
class TensorBoardLogger(DummyLogger):
def __init__(self, path, model, histogram=False):
self.writer = tb.SummaryWriter(log_dir=str(path))
self.model = model
self.histogram = histogram
def log_scalar(self, name, value, step, stage='train'):
self.writer.add_scalar(
f'{stage}/{name}',
value,
global_step=step
)
def log_grad(self, step):
if not self.histogram:
return
for name, param in self.model.named_parameters():
if param.grad is not None:
self.writer.add_histogram(
name.replace('.', '/'),
param.grad,
global_step=step
)
def log_params(self, step):
if not self.histogram:
return
for name, param in self.model.named_parameters():
self.writer.add_histogram(
name.replace('.', '/'),
param,
global_step=step
)
def train_end(self):
self.writer.close()