box_utils.py 6.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217
  1. import numpy as np
  2. from itertools import product as product
  3. import torch
  4. from torch.autograd import Function
  5. def nms_(dets, thresh):
  6. """
  7. Courtesy of Ross Girshick
  8. [https://github.com/rbgirshick/py-faster-rcnn/blob/master/lib/nms/py_cpu_nms.py]
  9. """
  10. x1 = dets[:, 0]
  11. y1 = dets[:, 1]
  12. x2 = dets[:, 2]
  13. y2 = dets[:, 3]
  14. scores = dets[:, 4]
  15. areas = (x2 - x1) * (y2 - y1)
  16. order = scores.argsort()[::-1]
  17. keep = []
  18. while order.size > 0:
  19. i = order[0]
  20. keep.append(int(i))
  21. xx1 = np.maximum(x1[i], x1[order[1:]])
  22. yy1 = np.maximum(y1[i], y1[order[1:]])
  23. xx2 = np.minimum(x2[i], x2[order[1:]])
  24. yy2 = np.minimum(y2[i], y2[order[1:]])
  25. w = np.maximum(0.0, xx2 - xx1)
  26. h = np.maximum(0.0, yy2 - yy1)
  27. inter = w * h
  28. ovr = inter / (areas[i] + areas[order[1:]] - inter)
  29. inds = np.where(ovr <= thresh)[0]
  30. order = order[inds + 1]
  31. return np.array(keep).astype(np.int)
  32. def decode(loc, priors, variances):
  33. """Decode locations from predictions using priors to undo
  34. the encoding we did for offset regression at train time.
  35. Args:
  36. loc (tensor): location predictions for loc layers,
  37. Shape: [num_priors,4]
  38. priors (tensor): Prior boxes in center-offset form.
  39. Shape: [num_priors,4].
  40. variances: (list[float]) Variances of priorboxes
  41. Return:
  42. decoded bounding box predictions
  43. """
  44. boxes = torch.cat((
  45. priors[:, :2] + loc[:, :2] * variances[0] * priors[:, 2:],
  46. priors[:, 2:] * torch.exp(loc[:, 2:] * variances[1])), 1)
  47. boxes[:, :2] -= boxes[:, 2:] / 2
  48. boxes[:, 2:] += boxes[:, :2]
  49. return boxes
  50. def nms(boxes, scores, overlap=0.5, top_k=200):
  51. """Apply non-maximum suppression at test time to avoid detecting too many
  52. overlapping bounding boxes for a given object.
  53. Args:
  54. boxes: (tensor) The location preds for the img, Shape: [num_priors,4].
  55. scores: (tensor) The class predscores for the img, Shape:[num_priors].
  56. overlap: (float) The overlap thresh for suppressing unnecessary boxes.
  57. top_k: (int) The Maximum number of box preds to consider.
  58. Return:
  59. The indices of the kept boxes with respect to num_priors.
  60. """
  61. keep = scores.new(scores.size(0)).zero_().long()
  62. if boxes.numel() == 0:
  63. return keep, 0
  64. x1 = boxes[:, 0]
  65. y1 = boxes[:, 1]
  66. x2 = boxes[:, 2]
  67. y2 = boxes[:, 3]
  68. area = torch.mul(x2 - x1, y2 - y1)
  69. v, idx = scores.sort(0) # sort in ascending order
  70. # I = I[v >= 0.01]
  71. idx = idx[-top_k:] # indices of the top-k largest vals
  72. xx1 = boxes.new()
  73. yy1 = boxes.new()
  74. xx2 = boxes.new()
  75. yy2 = boxes.new()
  76. w = boxes.new()
  77. h = boxes.new()
  78. # keep = torch.Tensor()
  79. count = 0
  80. while idx.numel() > 0:
  81. i = idx[-1] # index of current largest val
  82. # keep.append(i)
  83. keep[count] = i
  84. count += 1
  85. if idx.size(0) == 1:
  86. break
  87. idx = idx[:-1] # remove kept element from view
  88. # load bboxes of next highest vals
  89. torch.index_select(x1, 0, idx, out=xx1)
  90. torch.index_select(y1, 0, idx, out=yy1)
  91. torch.index_select(x2, 0, idx, out=xx2)
  92. torch.index_select(y2, 0, idx, out=yy2)
  93. # store element-wise max with next highest score
  94. xx1 = torch.clamp(xx1, min=x1[i])
  95. yy1 = torch.clamp(yy1, min=y1[i])
  96. xx2 = torch.clamp(xx2, max=x2[i])
  97. yy2 = torch.clamp(yy2, max=y2[i])
  98. w.resize_as_(xx2)
  99. h.resize_as_(yy2)
  100. w = xx2 - xx1
  101. h = yy2 - yy1
  102. # check sizes of xx1 and xx2.. after each iteration
  103. w = torch.clamp(w, min=0.0)
  104. h = torch.clamp(h, min=0.0)
  105. inter = w * h
  106. # IoU = i / (area(a) + area(b) - i)
  107. rem_areas = torch.index_select(area, 0, idx) # load remaining areas)
  108. union = (rem_areas - inter) + area[i]
  109. IoU = inter / union # store result in iou
  110. # keep only elements with an IoU <= overlap
  111. idx = idx[IoU.le(overlap)]
  112. return keep, count
  113. class Detect(object):
  114. def __init__(self, num_classes=2,
  115. top_k=750, nms_thresh=0.3, conf_thresh=0.05,
  116. variance=[0.1, 0.2], nms_top_k=5000):
  117. self.num_classes = num_classes
  118. self.top_k = top_k
  119. self.nms_thresh = nms_thresh
  120. self.conf_thresh = conf_thresh
  121. self.variance = variance
  122. self.nms_top_k = nms_top_k
  123. def forward(self, loc_data, conf_data, prior_data):
  124. num = loc_data.size(0)
  125. num_priors = prior_data.size(0)
  126. conf_preds = conf_data.view(num, num_priors, self.num_classes).transpose(2, 1)
  127. batch_priors = prior_data.view(-1, num_priors, 4).expand(num, num_priors, 4)
  128. batch_priors = batch_priors.contiguous().view(-1, 4)
  129. decoded_boxes = decode(loc_data.view(-1, 4), batch_priors, self.variance)
  130. decoded_boxes = decoded_boxes.view(num, num_priors, 4)
  131. output = torch.zeros(num, self.num_classes, self.top_k, 5)
  132. for i in range(num):
  133. boxes = decoded_boxes[i].clone()
  134. conf_scores = conf_preds[i].clone()
  135. for cl in range(1, self.num_classes):
  136. c_mask = conf_scores[cl].gt(self.conf_thresh)
  137. scores = conf_scores[cl][c_mask]
  138. if scores.dim() == 0:
  139. continue
  140. l_mask = c_mask.unsqueeze(1).expand_as(boxes)
  141. boxes_ = boxes[l_mask].view(-1, 4)
  142. ids, count = nms(boxes_, scores, self.nms_thresh, self.nms_top_k)
  143. count = count if count < self.top_k else self.top_k
  144. output[i, cl, :count] = torch.cat((scores[ids[:count]].unsqueeze(1), boxes_[ids[:count]]), 1)
  145. return output
  146. class PriorBox(object):
  147. def __init__(self, input_size, feature_maps,
  148. variance=[0.1, 0.2],
  149. min_sizes=[16, 32, 64, 128, 256, 512],
  150. steps=[4, 8, 16, 32, 64, 128],
  151. clip=False):
  152. super(PriorBox, self).__init__()
  153. self.imh = input_size[0]
  154. self.imw = input_size[1]
  155. self.feature_maps = feature_maps
  156. self.variance = variance
  157. self.min_sizes = min_sizes
  158. self.steps = steps
  159. self.clip = clip
  160. def forward(self):
  161. mean = []
  162. for k, fmap in enumerate(self.feature_maps):
  163. feath = fmap[0]
  164. featw = fmap[1]
  165. for i, j in product(range(feath), range(featw)):
  166. f_kw = self.imw / self.steps[k]
  167. f_kh = self.imh / self.steps[k]
  168. cx = (j + 0.5) / f_kw
  169. cy = (i + 0.5) / f_kh
  170. s_kw = self.min_sizes[k] / self.imw
  171. s_kh = self.min_sizes[k] / self.imh
  172. mean += [cx, cy, s_kw, s_kh]
  173. output = torch.FloatTensor(mean).view(-1, 4)
  174. if self.clip:
  175. output.clamp_(max=1, min=0)
  176. return output