CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
hukaixuan19970627

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.

GitHub Repository: hukaixuan19970627/yolov5_obb
Path: blob/master/DOTA_devkit/ResultEnsembleNMS_multi_process.py
Views: 475
1
"""
2
To use the code, users should to config detpath, annopath and imagesetfile
3
detpath is the path for 15 result files, for the format, you can refer to "http://captain.whu.edu.cn/DOTAweb/tasks.html"
4
search for PATH_TO_BE_CONFIGURED to config the paths
5
Note, the evaluation is on the large scale images
6
"""
7
import os
8
import numpy as np
9
import re
10
import time
11
import sys
12
sys.path.insert(0,'..')
13
import DOTA_devkit.dota_utils as util
14
import DOTA_devkit.polyiou as polyiou
15
import pdb
16
import math
17
from multiprocessing import Pool
18
from functools import partial
19
import shutil
20
import argparse
21
22
## the thresh for nms when merge image
23
nms_thresh = 0.2
24
25
def py_cpu_nms_poly(dets, thresh):
26
scores = dets[:, 8]
27
polys = []
28
areas = []
29
for i in range(len(dets)):
30
tm_polygon = polyiou.VectorDouble([dets[i][0], dets[i][1],
31
dets[i][2], dets[i][3],
32
dets[i][4], dets[i][5],
33
dets[i][6], dets[i][7]])
34
polys.append(tm_polygon)
35
order = scores.argsort()[::-1]
36
37
keep = []
38
while order.size > 0:
39
ovr = []
40
i = order[0]
41
keep.append(i)
42
for j in range(order.size - 1):
43
iou = polyiou.iou_poly(polys[i], polys[order[j + 1]])
44
ovr.append(iou)
45
ovr = np.array(ovr)
46
47
# print('ovr: ', ovr)
48
# print('thresh: ', thresh)
49
try:
50
if math.isnan(ovr[0]):
51
pdb.set_trace()
52
except:
53
pass
54
inds = np.where(ovr <= thresh)[0]
55
# print('inds: ', inds)
56
57
order = order[inds + 1]
58
59
return keep
60
61
62
def py_cpu_nms_poly_fast(dets, thresh):
63
obbs = dets[:, 0:-1]
64
x1 = np.min(obbs[:, 0::2], axis=1)
65
y1 = np.min(obbs[:, 1::2], axis=1)
66
x2 = np.max(obbs[:, 0::2], axis=1)
67
y2 = np.max(obbs[:, 1::2], axis=1)
68
scores = dets[:, 8]
69
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
70
71
polys = []
72
for i in range(len(dets)):
73
tm_polygon = polyiou.VectorDouble([dets[i][0], dets[i][1],
74
dets[i][2], dets[i][3],
75
dets[i][4], dets[i][5],
76
dets[i][6], dets[i][7]])
77
polys.append(tm_polygon)
78
order = scores.argsort()[::-1]
79
80
keep = []
81
while order.size > 0:
82
ovr = []
83
i = order[0]
84
keep.append(i)
85
# if order.size == 0:
86
# break
87
xx1 = np.maximum(x1[i], x1[order[1:]])
88
yy1 = np.maximum(y1[i], y1[order[1:]])
89
xx2 = np.minimum(x2[i], x2[order[1:]])
90
yy2 = np.minimum(y2[i], y2[order[1:]])
91
# w = np.maximum(0.0, xx2 - xx1 + 1)
92
# h = np.maximum(0.0, yy2 - yy1 + 1)
93
w = np.maximum(0.0, xx2 - xx1)
94
h = np.maximum(0.0, yy2 - yy1)
95
hbb_inter = w * h
96
hbb_ovr = hbb_inter / (areas[i] + areas[order[1:]] - hbb_inter)
97
# h_keep_inds = np.where(hbb_ovr == 0)[0]
98
h_inds = np.where(hbb_ovr > 0)[0]
99
tmp_order = order[h_inds + 1]
100
for j in range(tmp_order.size):
101
iou = polyiou.iou_poly(polys[i], polys[tmp_order[j]])
102
hbb_ovr[h_inds[j]] = iou
103
# ovr.append(iou)
104
# ovr_index.append(tmp_order[j])
105
106
# ovr = np.array(ovr)
107
# ovr_index = np.array(ovr_index)
108
# print('ovr: ', ovr)
109
# print('thresh: ', thresh)
110
try:
111
if math.isnan(ovr[0]):
112
pdb.set_trace()
113
except:
114
pass
115
inds = np.where(hbb_ovr <= thresh)[0]
116
117
# order_obb = ovr_index[inds]
118
# print('inds: ', inds)
119
# order_hbb = order[h_keep_inds + 1]
120
order = order[inds + 1]
121
# pdb.set_trace()
122
# order = np.concatenate((order_obb, order_hbb), axis=0).astype(np.int)
123
return keep
124
125
def py_cpu_nms(dets, thresh):
126
"""Pure Python NMS baseline."""
127
#print('dets:', dets)
128
x1 = dets[:, 0]
129
y1 = dets[:, 1]
130
x2 = dets[:, 2]
131
y2 = dets[:, 3]
132
scores = dets[:, 4]
133
134
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
135
## index for dets
136
order = scores.argsort()[::-1]
137
138
139
keep = []
140
while order.size > 0:
141
i = order[0]
142
keep.append(i)
143
xx1 = np.maximum(x1[i], x1[order[1:]])
144
yy1 = np.maximum(y1[i], y1[order[1:]])
145
xx2 = np.minimum(x2[i], x2[order[1:]])
146
yy2 = np.minimum(y2[i], y2[order[1:]])
147
148
w = np.maximum(0.0, xx2 - xx1 + 1)
149
h = np.maximum(0.0, yy2 - yy1 + 1)
150
inter = w * h
151
ovr = inter / (areas[i] + areas[order[1:]] - inter)
152
153
inds = np.where(ovr <= thresh)[0]
154
order = order[inds + 1]
155
156
return keep
157
158
def nmsbynamedict(nameboxdict, nms, thresh):
159
nameboxnmsdict = {x: [] for x in nameboxdict}
160
for imgname in nameboxdict:
161
#print('imgname:', imgname)
162
#keep = py_cpu_nms(np.array(nameboxdict[imgname]), thresh)
163
#print('type nameboxdict:', type(nameboxnmsdict))
164
#print('type imgname:', type(imgname))
165
#print('type nms:', type(nms))
166
keep = nms(np.array(nameboxdict[imgname]), thresh)
167
#print('keep:', keep)
168
outdets = []
169
#print('nameboxdict[imgname]: ', nameboxnmsdict[imgname])
170
for index in keep:
171
# print('index:', index)
172
outdets.append(nameboxdict[imgname][index])
173
nameboxnmsdict[imgname] = outdets
174
return nameboxnmsdict
175
def poly2origpoly(poly, x, y, rate):
176
origpoly = []
177
for i in range(int(len(poly)/2)):
178
tmp_x = float(poly[i * 2] + x) / float(rate)
179
tmp_y = float(poly[i * 2 + 1] + y) / float(rate)
180
origpoly.append(tmp_x)
181
origpoly.append(tmp_y)
182
return origpoly
183
184
def mergesingle(dstpath, nms, fullname):
185
name = util.custombasename(fullname)
186
#print('name:', name)
187
dstname = os.path.join(dstpath, name + '.txt')
188
print(dstname)
189
with open(fullname, 'r') as f_in:
190
nameboxdict = {}
191
lines = f_in.readlines()
192
splitlines = [x.strip().split(' ') for x in lines]
193
for splitline in splitlines:
194
# splitline = [(该目标所处图片名称), confidence, x1, y1, x2, y2, x3, y3, x4, y4]
195
#subname = splitline[0]
196
oriname = splitline[0]
197
#splitname = subname.split('__')
198
#oriname = splitname[0]
199
# pattern1 = re.compile(r'__\d+___\d+')
200
# #print('subname:', subname)
201
# x_y = re.findall(pattern1, subname)
202
# x_y_2 = re.findall(r'\d+', x_y[0])
203
# x, y = int(x_y_2[0]), int(x_y_2[1])
204
205
# pattern2 = re.compile(r'__([\d+\.]+)__\d+___')
206
207
# rate = re.findall(pattern2, subname)[0]
208
209
confidence = splitline[1]
210
poly = list(map(float, splitline[2:]))
211
#origpoly = poly2origpoly(poly, x, y, rate)
212
det = poly # shape(8)
213
det.append(confidence)
214
det = list(map(float, det))
215
if (oriname not in nameboxdict):
216
nameboxdict[oriname] = []
217
nameboxdict[oriname].append(det)
218
nameboxnmsdict = nmsbynamedict(nameboxdict, nms, nms_thresh)
219
with open(dstname, 'w') as f_out:
220
for imgname in nameboxnmsdict:
221
for det in nameboxnmsdict[imgname]:
222
#print('det:', det)
223
confidence = det[-1]
224
confidence = round(confidence, 2)
225
bbox = det[0:-1]
226
227
bbox[0] = round(bbox[0], 1)
228
bbox[1] = round(bbox[1], 1)
229
bbox[2] = round(bbox[2], 1)
230
bbox[3] = round(bbox[3], 1)
231
bbox[4] = round(bbox[4], 1)
232
bbox[5] = round(bbox[5], 1)
233
bbox[6] = round(bbox[6], 1)
234
bbox[7] = round(bbox[7], 1)
235
236
outline = imgname + ' ' + str(confidence) + ' ' + ' '.join(map(str, bbox))
237
#print('outline:', outline)
238
f_out.write(outline + '\n')
239
240
def mergebase_parallel(srcpath, dstpath, nms):
241
pool = Pool(16)
242
filelist = util.GetFileFromThisRootDir(srcpath)
243
244
mergesingle_fn = partial(mergesingle, dstpath, nms)
245
# pdb.set_trace()
246
pool.map(mergesingle_fn, filelist)
247
248
def mergebase(srcpath, dstpath, nms):
249
filelist = util.GetFileFromThisRootDir(srcpath)
250
for filename in filelist:
251
mergesingle(dstpath, nms, filename)
252
253
def mergebyrec(srcpath, dstpath):
254
"""
255
srcpath: result files before merge and nms
256
dstpath: result files after merge and nms
257
"""
258
# srcpath = r'E:\bod-dataset\results\bod-v3_rfcn_2000000'
259
# dstpath = r'E:\bod-dataset\results\bod-v3_rfcn_2000000_nms'
260
if os.path.exists(dstpath):
261
shutil.rmtree(dstpath) # delete output folderX
262
os.makedirs(dstpath)
263
264
mergebase(srcpath,
265
dstpath,
266
py_cpu_nms)
267
def mergebypoly(srcpath, dstpath):
268
"""
269
srcpath: result files before merge and nms
270
dstpath: result files after merge and nms
271
"""
272
# srcpath = r'/home/dingjian/evaluation_task1/result/faster-rcnn-59/comp4_test_results'
273
# dstpath = r'/home/dingjian/evaluation_task1/result/faster-rcnn-59/testtime'
274
if os.path.exists(dstpath):
275
shutil.rmtree(dstpath) # delete output folderX
276
os.makedirs(dstpath)
277
278
# mergebase(srcpath,
279
# dstpath,
280
# py_cpu_nms_poly)
281
mergebase_parallel(srcpath,
282
dstpath,
283
py_cpu_nms_poly_fast)
284
285
def parse_args():
286
parser = argparse.ArgumentParser(description='MMDet test (and eval) a model')
287
parser.add_argument('--scrpath', default='/OrientedRepPoints/tools/parse_pkl/evaluation_results/orientedreppoints_ROIRT_ensemble', help='test config file path')
288
parser.add_argument('--dstpath', default='/OrientedRepPoints/tools/parse_pkl/evaluation_results/orientedreppoints_ROIRT_ensemble_nms', help='checkpoint file')
289
args = parser.parse_args()
290
return args
291
292
if __name__ == '__main__':
293
args = parse_args()
294
295
mergebypoly(srcpath=args.scrpath,
296
dstpath=args.dstpath)
297
print('Result Merge Done!')
298
# mergebyrec()
299