Skip to content

Commit

Permalink
log meta (#2086)
Browse files Browse the repository at this point in the history
* log meta

* move collect_env to mmdet/utils

* fix import error

* fix isort

* add dash line after env info
  • Loading branch information
yhcao6 authored Feb 15, 2020
1 parent 389bfeb commit 1bd65a5
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 17 deletions.
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/error-report.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ A placeholder for the command.

**Environment**

1. Please run `python tools/collect_env.py` to collect necessary environment infomation and paste it here.
1. Please run `python mmdet/utils/collect_env.py` to collect necessary environment infomation and paste it here.
2. You may add addition that may be helpful for locating the problem, such as
- How you installed PyTorch [e.g., pip, conda, source]
- Other environment variables that may be related (such as `$PATH`, `$LD_LIBRARY_PATH`, `$PYTHONPATH`, etc.)
Expand Down
29 changes: 22 additions & 7 deletions mmdet/apis/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,8 @@ def train_detector(model,
cfg,
distributed=False,
validate=False,
timestamp=None):
timestamp=None,
meta=None):
logger = get_root_logger(cfg.log_level)

# start training
Expand All @@ -97,15 +98,17 @@ def train_detector(model,
cfg,
validate=validate,
logger=logger,
timestamp=timestamp)
timestamp=timestamp,
meta=meta)
else:
_non_dist_train(
model,
dataset,
cfg,
validate=validate,
logger=logger,
timestamp=timestamp)
timestamp=timestamp,
meta=meta)


def build_optimizer(model, optimizer_cfg):
Expand Down Expand Up @@ -193,7 +196,8 @@ def _dist_train(model,
cfg,
validate=False,
logger=None,
timestamp=None):
timestamp=None,
meta=None):
# prepare data loaders
dataset = dataset if isinstance(dataset, (list, tuple)) else [dataset]
data_loaders = [
Expand All @@ -210,7 +214,12 @@ def _dist_train(model,
# build runner
optimizer = build_optimizer(model, cfg.optimizer)
runner = Runner(
model, batch_processor, optimizer, cfg.work_dir, logger=logger)
model,
batch_processor,
optimizer,
cfg.work_dir,
logger=logger,
meta=meta)
# an ugly walkaround to make the .log and .log.json filenames the same
runner.timestamp = timestamp

Expand Down Expand Up @@ -244,7 +253,8 @@ def _non_dist_train(model,
cfg,
validate=False,
logger=None,
timestamp=None):
timestamp=None,
meta=None):
if validate:
raise NotImplementedError('Built-in validation is not implemented '
'yet in not-distributed training. Use '
Expand All @@ -267,7 +277,12 @@ def _non_dist_train(model,
# build runner
optimizer = build_optimizer(model, cfg.optimizer)
runner = Runner(
model, batch_processor, optimizer, cfg.work_dir, logger=logger)
model,
batch_processor,
optimizer,
cfg.work_dir,
logger=logger,
meta=meta)
# an ugly walkaround to make the .log and .log.json filenames the same
runner.timestamp = timestamp
# fp16 setting
Expand Down
3 changes: 2 additions & 1 deletion mmdet/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from .collect_env import collect_env
from .flops_counter import get_model_complexity_info
from .logger import get_root_logger, print_log
from .registry import Registry, build_from_cfg

__all__ = [
'Registry', 'build_from_cfg', 'get_model_complexity_info',
'get_root_logger', 'print_log'
'get_root_logger', 'print_log', 'collect_env'
]
9 changes: 4 additions & 5 deletions tools/collect_env.py → mmdet/utils/collect_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import torchvision

import mmdet
from mmdet.ops import get_compiler_version, get_compiling_cuda_version


def collect_env():
Expand Down Expand Up @@ -53,12 +52,12 @@ def collect_env():

env_info['MMCV'] = mmcv.__version__
env_info['MMDetection'] = mmdet.__version__
from mmdet.ops import get_compiler_version, get_compiling_cuda_version
env_info['MMDetection Compiler'] = get_compiler_version()
env_info['MMDetection CUDA Compiler'] = get_compiling_cuda_version()

for name, val in env_info.items():
print('{}: {}'.format(name, val))
return env_info


if __name__ == "__main__":
collect_env()
for name, val in collect_env().items():
print('{}: {}'.format(name, val))
19 changes: 16 additions & 3 deletions tools/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from mmdet.apis import set_random_seed, train_detector
from mmdet.datasets import build_dataset
from mmdet.models import build_detector
from mmdet.utils import get_root_logger
from mmdet.utils import collect_env, get_root_logger


def parse_args():
Expand Down Expand Up @@ -86,9 +86,20 @@ def main():
log_file = osp.join(cfg.work_dir, '{}.log'.format(timestamp))
logger = get_root_logger(log_file=log_file, log_level=cfg.log_level)

# init the meta dict to record some important information such as
# environment info and seed, which will be logged
meta = dict()
# log env info
env_info_dict = collect_env()
env_info = '\n'.join([('{}: {}'.format(k, v))
for k, v in env_info_dict.items()])
dash_line = '-' * 60 + '\n'
logger.info('Environment info:\n' + dash_line + env_info + '\n' +
dash_line)
meta['env_info'] = env_info

# log some basic info
logger.info('Distributed training: {}'.format(distributed))
logger.info('MMDetection Version: {}'.format(__version__))
logger.info('Config:\n{}'.format(cfg.text))

# set random seeds
Expand All @@ -97,6 +108,7 @@ def main():
args.seed, args.deterministic))
set_random_seed(args.seed, deterministic=args.deterministic)
cfg.seed = args.seed
meta['seed'] = args.seed

model = build_detector(
cfg.model, train_cfg=cfg.train_cfg, test_cfg=cfg.test_cfg)
Expand All @@ -119,7 +131,8 @@ def main():
cfg,
distributed=distributed,
validate=args.validate,
timestamp=timestamp)
timestamp=timestamp,
meta=meta)


if __name__ == '__main__':
Expand Down

0 comments on commit 1bd65a5

Please sign in to comment.