-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathtest.py
149 lines (130 loc) · 7.81 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
import time
import logging
import torch
import torch.nn.functional as F
from torch import distributed as dist
from tools.eval_metrics import evaluate, evaluate_with_clothes
def concat_all_gather(tensors, num_total_examples):
'''
Performs all_gather operation on the provided tensor list.
'''
outputs = []
for tensor in tensors:
tensor = tensor.cuda()
tensors_gather = [tensor.clone() for _ in range(dist.get_world_size())]
dist.all_gather(tensors_gather, tensor)
output = torch.cat(tensors_gather, dim=0).cpu()
# truncate the dummy elements added by DistributedInferenceSampler
outputs.append(output[:num_total_examples])
return outputs
@torch.no_grad()
def extract_img_feature(model, dataloader):
features, pids, camids, clothes_ids = [], torch.tensor([]), torch.tensor([]), torch.tensor([])
for batch_idx, (imgs, batch_pids, batch_camids, batch_clothes_ids, batch_img_path) in enumerate(dataloader):
flip_imgs = torch.flip(imgs, [3])
imgs, flip_imgs = imgs.cuda(), flip_imgs.cuda()
_, batch_features = model(imgs)
_, batch_features_flip = model(flip_imgs)
batch_features += batch_features_flip
batch_features = F.normalize(batch_features, p=2, dim=1)
features.append(batch_features.cpu())
pids = torch.cat((pids, batch_pids.cpu()), dim=0)
camids = torch.cat((camids, batch_camids.cpu()), dim=0)
clothes_ids = torch.cat((clothes_ids, batch_clothes_ids.cpu()), dim=0)
features = torch.cat(features, 0)
return features, pids, camids, clothes_ids
def test(config, model, queryloader, galleryloader, dataset):
logger = logging.getLogger('reid.test')
since = time.time()
model.eval()
local_rank = dist.get_rank()
# Extract features
qf, q_pids, q_camids, q_clothes_ids = extract_img_feature(model, queryloader)
gf, g_pids, g_camids, g_clothes_ids = extract_img_feature(model, galleryloader)
# Gather samples from different GPUs
torch.cuda.empty_cache()
qf, q_pids, q_camids, q_clothes_ids = concat_all_gather([qf, q_pids, q_camids, q_clothes_ids], len(dataset.query))
gf, g_pids, g_camids, g_clothes_ids = concat_all_gather([gf, g_pids, g_camids, g_clothes_ids], len(dataset.gallery))
torch.cuda.empty_cache()
time_elapsed = time.time() - since
logger.info("Extracted features for query set, obtained {} matrix".format(qf.shape))
logger.info("Extracted features for gallery set, obtained {} matrix".format(gf.shape))
logger.info('Extracting features complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))
# Compute distance matrix between query and gallery
since = time.time()
m, n = qf.size(0), gf.size(0)
distmat = torch.zeros((m,n))
qf, gf = qf.cuda(), gf.cuda()
# Cosine similarity
for i in range(m):
distmat[i] = (- torch.mm(qf[i:i+1], gf.t())).cpu()
distmat = distmat.numpy()
q_pids, q_camids, q_clothes_ids = q_pids.numpy(), q_camids.numpy(), q_clothes_ids.numpy()
g_pids, g_camids, g_clothes_ids = g_pids.numpy(), g_camids.numpy(), g_clothes_ids.numpy()
time_elapsed = time.time() - since
logger.info('Distance computing in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))
since = time.time()
logger.info("Computing CMC and mAP")
cmc, mAP = evaluate(distmat, q_pids, g_pids, q_camids, g_camids)
logger.info("Results ---------------------------------------------------")
logger.info('top1:{:.1%} top5:{:.1%} top10:{:.1%} top20:{:.1%} mAP:{:.1%}'.format(cmc[0], cmc[4], cmc[9], cmc[19], mAP))
logger.info("-----------------------------------------------------------")
time_elapsed = time.time() - since
logger.info('Using {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))
logger.info("Computing CMC and mAP only for the same clothes setting")
cmc, mAP = evaluate_with_clothes(distmat, q_pids, g_pids, q_camids, g_camids, q_clothes_ids, g_clothes_ids, mode='SC')
logger.info("Results ---------------------------------------------------")
logger.info('top1:{:.1%} top5:{:.1%} top10:{:.1%} top20:{:.1%} mAP:{:.1%}'.format(cmc[0], cmc[4], cmc[9], cmc[19], mAP))
logger.info("-----------------------------------------------------------")
logger.info("Computing CMC and mAP only for clothes-changing")
cmc, mAP = evaluate_with_clothes(distmat, q_pids, g_pids, q_camids, g_camids, q_clothes_ids, g_clothes_ids, mode='CC')
logger.info("Results ---------------------------------------------------")
logger.info('top1:{:.1%} top5:{:.1%} top10:{:.1%} top20:{:.1%} mAP:{:.1%}'.format(cmc[0], cmc[4], cmc[9], cmc[19], mAP))
logger.info("-----------------------------------------------------------")
return cmc[0]
def test_prcc(model, queryloader_same, queryloader_diff, galleryloader, dataset):
logger = logging.getLogger('reid.test')
since = time.time()
model.eval()
local_rank = dist.get_rank()
# Extract features for query set
qsf, qs_pids, qs_camids, qs_clothes_ids = extract_img_feature(model, queryloader_same)
qdf, qd_pids, qd_camids, qd_clothes_ids = extract_img_feature(model, queryloader_diff)
# Extract features for gallery set
gf, g_pids, g_camids, g_clothes_ids = extract_img_feature(model, galleryloader)
# Gather samples from different GPUs
torch.cuda.empty_cache()
qsf, qs_pids, qs_camids, qs_clothes_ids = concat_all_gather([qsf, qs_pids, qs_camids, qs_clothes_ids], len(dataset.query_same))
qdf, qd_pids, qd_camids, qd_clothes_ids = concat_all_gather([qdf, qd_pids, qd_camids, qd_clothes_ids], len(dataset.query_diff))
gf, g_pids, g_camids, g_clothes_ids = concat_all_gather([gf, g_pids, g_camids, g_clothes_ids], len(dataset.gallery))
time_elapsed = time.time() - since
logger.info("Extracted features for query set (with same clothes), obtained {} matrix".format(qsf.shape))
logger.info("Extracted features for query set (with different clothes), obtained {} matrix".format(qdf.shape))
logger.info("Extracted features for gallery set, obtained {} matrix".format(gf.shape))
logger.info('Extracting features complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))
# Compute distance matrix between query and gallery
m, n, k = qsf.size(0), qdf.size(0), gf.size(0)
distmat_same = torch.zeros((m, k))
distmat_diff = torch.zeros((n, k))
qsf, qdf, gf = qsf.cuda(), qdf.cuda(), gf.cuda()
# Cosine similarity
for i in range(m):
distmat_same[i] = (- torch.mm(qsf[i:i+1], gf.t())).cpu()
for i in range(n):
distmat_diff[i] = (- torch.mm(qdf[i:i+1], gf.t())).cpu()
distmat_same = distmat_same.numpy()
distmat_diff = distmat_diff.numpy()
qs_pids, qs_camids, qs_clothes_ids = qs_pids.numpy(), qs_camids.numpy(), qs_clothes_ids.numpy()
qd_pids, qd_camids, qd_clothes_ids = qd_pids.numpy(), qd_camids.numpy(), qd_clothes_ids.numpy()
g_pids, g_camids, g_clothes_ids = g_pids.numpy(), g_camids.numpy(), g_clothes_ids.numpy()
logger.info("Computing CMC and mAP for the same clothes setting")
cmc, mAP = evaluate(distmat_same, qs_pids, g_pids, qs_camids, g_camids)
logger.info("Results ---------------------------------------------------")
logger.info('top1:{:.1%} top5:{:.1%} top10:{:.1%} top20:{:.1%} mAP:{:.1%}'.format(cmc[0], cmc[4], cmc[9], cmc[19], mAP))
logger.info("-----------------------------------------------------------")
logger.info("Computing CMC and mAP only for clothes changing")
cmc, mAP = evaluate(distmat_diff, qd_pids, g_pids, qd_camids, g_camids)
logger.info("Results ---------------------------------------------------")
logger.info('top1:{:.1%} top5:{:.1%} top10:{:.1%} top20:{:.1%} mAP:{:.1%}'.format(cmc[0], cmc[4], cmc[9], cmc[19], mAP))
logger.info("-----------------------------------------------------------")
return cmc[0]