aboutsummaryrefslogtreecommitdiffstats
blob: 4fc7ddfa88a9d8b7caf9996df039708f9c65ed3f (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
#!/usr/bin/env python

import sys
import os
from torch.distributed import launch as distributed_launch

########################################################################
# config

'''
Usage: 
(1) Use one of the following config files.
(2) Inside the config file, make sure that the dataset that needs to be trained on is uncommented.
(3) Use the appropriate input resolution in the config file (input_size).
(4) Recommend to run the first training with voc0712 dataset as it is widely used and reasonably small.
(5) To convert cityscapes to coco format, run the script: tools/convert_datasets/cityscapes.py

config='./configs/ssd/ssd_mobilenet.py'
config='./configs/ssd/ssd_mobilenet_fpn.py'
config='./configs/ssd/ssd_resnet_fpn.py'
config='./configs/ssd/ssd_regnet_fpn_bgr.py'

config='./configs/retinanet/retinanet_regnet_fpn_bgr.py'
config='./configs/retinanet/retinanet_resnet_fpn.py'
config='./configs/retinanet/fcos_regnet_fpn_bgr.py'
'''

config='./configs/retinanet/retinanet_regnet_fpn_bgr.py'

########################################################################
# other settings
gpus = 4
dataset_style = 'voc' #'voc' #'coco'
master_port = 29500


########################################################################
metric = ('bbox' if dataset_style=='coco' else 'mAP')
basename = os.path.splitext(os.path.basename(config))[0]
outdir = os.path.join('./work_dirs', basename)
checkpoint = f'{outdir}/latest.pth'
out_file = os.path.join(outdir, 'result.pkl')
print(f'Saving to: {outdir}')
sys.argv = [sys.argv[0], f'--nproc_per_node={gpus}', f'--master_port={master_port}',
             './tools/test.py', f'--eval={metric}', f'--out={out_file}', '--launcher=pytorch', config, checkpoint]
distributed_launch.main()