12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364 |
- _base_ = ['./mask2former_r50_8xb2-8e_youtubevis2021.py']
- depths = [2, 2, 18, 2]
- model = dict(
- type='Mask2FormerVideo',
- backbone=dict(
- _delete_=True,
- type='SwinTransformer',
- pretrain_img_size=384,
- embed_dims=192,
- depths=depths,
- num_heads=[6, 12, 24, 48],
- window_size=12,
- mlp_ratio=4,
- qkv_bias=True,
- qk_scale=None,
- drop_rate=0.,
- attn_drop_rate=0.,
- drop_path_rate=0.3,
- patch_norm=True,
- out_indices=(0, 1, 2, 3),
- with_cp=False,
- convert_weights=True,
- frozen_stages=-1,
- init_cfg=None),
- track_head=dict(
- type='Mask2FormerTrackHead',
- in_channels=[192, 384, 768, 1536],
- num_queries=200),
- init_cfg=dict(
- type='Pretrained',
- checkpoint= # noqa: E251
- 'https://download.openmmlab.com/mmdetection/v3.0/mask2former/'
- 'mask2former_swin-l-p4-w12-384-in21k_16xb1-lsj-100e_coco-panoptic/'
- 'mask2former_swin-l-p4-w12-384-in21k_16xb1-lsj-100e_coco-panoptic_'
- '20220407_104949-82f8d28d.pth'))
- # set all layers in backbone to lr_mult=0.1
- # set all norm layers, position_embeding,
- # query_embeding, level_embeding to decay_multi=0.0
- backbone_norm_multi = dict(lr_mult=0.1, decay_mult=0.0)
- backbone_embed_multi = dict(lr_mult=0.1, decay_mult=0.0)
- embed_multi = dict(lr_mult=1.0, decay_mult=0.0)
- custom_keys = {
- 'backbone': dict(lr_mult=0.1, decay_mult=1.0),
- 'backbone.patch_embed.norm': backbone_norm_multi,
- 'backbone.norm': backbone_norm_multi,
- 'absolute_pos_embed': backbone_embed_multi,
- 'relative_position_bias_table': backbone_embed_multi,
- 'query_embed': embed_multi,
- 'query_feat': embed_multi,
- 'level_embed': embed_multi
- }
- custom_keys.update({
- f'backbone.stages.{stage_id}.blocks.{block_id}.norm': backbone_norm_multi
- for stage_id, num_blocks in enumerate(depths)
- for block_id in range(num_blocks)
- })
- custom_keys.update({
- f'backbone.stages.{stage_id}.downsample.norm': backbone_norm_multi
- for stage_id in range(len(depths) - 1)
- })
- # optimizer
- optim_wrapper = dict(
- paramwise_cfg=dict(custom_keys=custom_keys, norm_decay_mult=0.0))
|