forked from open-mmlab/mmdetection
-
Notifications
You must be signed in to change notification settings - Fork 0
/
grounding_dino_swin-t_finetune_8xb2_20e_cat.py
56 lines (45 loc) · 1.48 KB
/
grounding_dino_swin-t_finetune_8xb2_20e_cat.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
_base_ = 'grounding_dino_swin-t_finetune_16xb2_1x_coco.py'
data_root = 'data/cat/'
class_name = ('cat', )
num_classes = len(class_name)
metainfo = dict(classes=class_name, palette=[(220, 20, 60)])
model = dict(bbox_head=dict(num_classes=num_classes))
train_dataloader = dict(
dataset=dict(
data_root=data_root,
metainfo=metainfo,
ann_file='annotations/trainval.json',
data_prefix=dict(img='images/')))
val_dataloader = dict(
dataset=dict(
metainfo=metainfo,
data_root=data_root,
ann_file='annotations/test.json',
data_prefix=dict(img='images/')))
test_dataloader = val_dataloader
val_evaluator = dict(ann_file=data_root + 'annotations/test.json')
test_evaluator = val_evaluator
max_epoch = 20
default_hooks = dict(
checkpoint=dict(interval=1, max_keep_ckpts=1, save_best='auto'),
logger=dict(type='LoggerHook', interval=5))
train_cfg = dict(max_epochs=max_epoch, val_interval=1)
param_scheduler = [
dict(type='LinearLR', start_factor=0.001, by_epoch=False, begin=0, end=30),
dict(
type='MultiStepLR',
begin=0,
end=max_epoch,
by_epoch=True,
milestones=[15],
gamma=0.1)
]
optim_wrapper = dict(
optimizer=dict(lr=0.00005),
paramwise_cfg=dict(
custom_keys={
'absolute_pos_embed': dict(decay_mult=0.),
'backbone': dict(lr_mult=0.1),
'language_model': dict(lr_mult=0),
}))
auto_scale_lr = dict(base_batch_size=16)