forked from tianzhi0549/FCOS
-
Notifications
You must be signed in to change notification settings - Fork 0
/
fcos_demo.py
110 lines (97 loc) · 4.14 KB
/
fcos_demo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import argparse
import cv2, os
from fcos_core.config import cfg
from predictor import COCODemo
import time
def main():
parser = argparse.ArgumentParser(description="PyTorch Object Detection Webcam Demo")
parser.add_argument(
"--config-file",
default="configs/fcos/fcos_imprv_R_50_FPN_1x.yaml",
metavar="FILE",
help="path to config file",
)
parser.add_argument(
"--weights",
default="FCOS_imprv_R_50_FPN_1x.pth",
metavar="FILE",
help="path to the trained model",
)
parser.add_argument(
"--images-dir",
default="demo/images",
metavar="DIR",
help="path to demo images directory",
)
parser.add_argument(
"--min-image-size",
type=int,
default=800,
help="Smallest size of the image to feed to the model. "
"Model was trained with 800, which gives best results",
)
parser.add_argument(
"opts",
help="Modify model config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
# load config from file and command-line arguments
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.MODEL.WEIGHT = args.weights
cfg.freeze()
# The following per-class thresholds are computed by maximizing
# per-class f-measure in their precision-recall curve.
# Please see compute_thresholds_for_classes() in coco_eval.py for details.
thresholds_for_classes = [
0.4923645853996277, 0.4928510785102844, 0.5040897727012634,
0.4912887513637543, 0.5016880631446838, 0.5278812646865845,
0.5351834893226624, 0.5003424882888794, 0.4955945909023285,
0.43564629554748535, 0.6089804172515869, 0.666087806224823,
0.5932040214538574, 0.48406165838241577, 0.4062422513961792,
0.5571075081825256, 0.5671307444572449, 0.5268378257751465,
0.5112953186035156, 0.4647842049598694, 0.5324517488479614,
0.5795850157737732, 0.5152440071105957, 0.5280804634094238,
0.4791383445262909, 0.5261335372924805, 0.4906163215637207,
0.523737907409668, 0.47027698159217834, 0.5103300213813782,
0.4645252823829651, 0.5384289026260376, 0.47796186804771423,
0.4403403103351593, 0.5101461410522461, 0.5535093545913696,
0.48472103476524353, 0.5006796717643738, 0.5485560894012451,
0.4863888621330261, 0.5061569809913635, 0.5235867500305176,
0.4745445251464844, 0.4652363359928131, 0.4162440598011017,
0.5252017974853516, 0.42710989713668823, 0.4550687372684479,
0.4943239390850067, 0.4810051918029785, 0.47629663348197937,
0.46629616618156433, 0.4662836790084839, 0.4854755401611328,
0.4156557023525238, 0.4763634502887726, 0.4724511504173279,
0.4915047585964203, 0.5006274580955505, 0.5124194622039795,
0.47004589438438416, 0.5374764204025269, 0.5876904129981995,
0.49395060539245605, 0.5102297067642212, 0.46571290493011475,
0.5164387822151184, 0.540651798248291, 0.5323763489723206,
0.5048757195472717, 0.5302401781082153, 0.48333442211151123,
0.5109739303588867, 0.4077408015727997, 0.5764586925506592,
0.5109297037124634, 0.4685552418231964, 0.5148998498916626,
0.4224434792995453, 0.4998510777950287
]
demo_im_names = os.listdir(args.images_dir)
# prepare object that handles inference plus adds predictions on top of image
coco_demo = COCODemo(
cfg,
confidence_thresholds_for_classes=thresholds_for_classes,
min_image_size=args.min_image_size
)
for im_name in demo_im_names:
img = cv2.imread(os.path.join(args.images_dir, im_name))
if img is None:
continue
start_time = time.time()
composite = coco_demo.run_on_opencv_image(img)
print("{}\tinference time: {:.2f}s".format(im_name, time.time() - start_time))
cv2.imshow(im_name, composite)
print("Press any keys to exit ...")
cv2.waitKey()
cv2.destroyAllWindows()
if __name__ == "__main__":
main()