123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133 |
- import argparse
- import logging
- import os
- import os.path as osp
- from mmengine.config import Config, DictAction
- from mmengine.logging import print_log
- from mmengine.registry import RUNNERS
- from mmengine.runner import Runner
- from mmdet.utils import setup_cache_size_limit_of_dynamo
- def parse_args():
- parser = argparse.ArgumentParser(description='Train a detector')
- parser.add_argument('config', help='train config file path')
- parser.add_argument('--work-dir', help='the dir to save logs and models')
- parser.add_argument(
- '--amp',
- action='store_true',
- default=False,
- help='enable automatic-mixed-precision training')
- parser.add_argument(
- '--auto-scale-lr',
- action='store_true',
- help='enable automatically scaling LR.')
- parser.add_argument(
- '--resume',
- nargs='?',
- type=str,
- const='auto',
- help='If specify checkpoint path, resume from it, while if not '
- 'specify, try to auto resume from the latest checkpoint '
- 'in the work directory.')
- parser.add_argument(
- '--cfg-options',
- nargs='+',
- action=DictAction,
- help='override some settings in the used config, the key-value pair '
- 'in xxx=yyy format will be merged into config file. If the value to '
- 'be overwritten is a list, it should be like key="[a,b]" or key=a,b '
- 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" '
- 'Note that the quotation marks are necessary and that no white space '
- 'is allowed.')
- parser.add_argument(
- '--launcher',
- choices=['none', 'pytorch', 'slurm', 'mpi'],
- default='none',
- help='job launcher')
-
-
-
- parser.add_argument('--local_rank', '--local-rank', type=int, default=0)
- args = parser.parse_args()
- if 'LOCAL_RANK' not in os.environ:
- os.environ['LOCAL_RANK'] = str(args.local_rank)
- return args
- def main():
- args = parse_args()
-
-
- setup_cache_size_limit_of_dynamo()
-
- cfg = Config.fromfile(args.config)
- cfg.launcher = args.launcher
- if args.cfg_options is not None:
- cfg.merge_from_dict(args.cfg_options)
-
- if args.work_dir is not None:
-
- cfg.work_dir = args.work_dir
- elif cfg.get('work_dir', None) is None:
-
- cfg.work_dir = osp.join('./work_dirs',
- osp.splitext(osp.basename(args.config))[0])
-
- if args.amp is True:
- optim_wrapper = cfg.optim_wrapper.type
- if optim_wrapper == 'AmpOptimWrapper':
- print_log(
- 'AMP training is already enabled in your config.',
- logger='current',
- level=logging.WARNING)
- else:
- assert optim_wrapper == 'OptimWrapper', (
- '`--amp` is only supported when the optimizer wrapper type is '
- f'`OptimWrapper` but got {optim_wrapper}.')
- cfg.optim_wrapper.type = 'AmpOptimWrapper'
- cfg.optim_wrapper.loss_scale = 'dynamic'
-
- if args.auto_scale_lr:
- if 'auto_scale_lr' in cfg and \
- 'enable' in cfg.auto_scale_lr and \
- 'base_batch_size' in cfg.auto_scale_lr:
- cfg.auto_scale_lr.enable = True
- else:
- raise RuntimeError('Can not find "auto_scale_lr" or '
- '"auto_scale_lr.enable" or '
- '"auto_scale_lr.base_batch_size" in your'
- ' configuration file.')
-
- if args.resume == 'auto':
- cfg.resume = True
- cfg.load_from = None
- elif args.resume is not None:
- cfg.resume = True
- cfg.load_from = args.resume
-
- if 'runner_type' not in cfg:
-
- runner = Runner.from_cfg(cfg)
- else:
-
-
- runner = RUNNERS.build(cfg)
-
- runner.train()
- if __name__ == '__main__':
- main()
|