You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

postprocess.py 5.3 kB

2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138
  1. import numpy as np
  2. import math
  3. from ext_pb2 import Predict
  4. from google.protobuf.any_pb2 import Any
  5. from scipy.spatial.distance import mahalanobis
  6. # 余弦相似度
  7. def get_cosine_similarity(vec1, vec2):
  8. # print(vec1)
  9. # print(vec2)
  10. cos_sim = vec1.dot(vec2) / (np.linalg.norm(vec1) * np.linalg.norm(vec2))
  11. return cos_sim
  12. # ENMS
  13. def get_entropy_by_enms(classes, scores, instance_features, threshold=0.5):
  14. i_entropy = 0
  15. entropys = -scores*np.log(scores+0.0000001)-(1-scores)*np.log(1-scores+0.0000001)
  16. # classes, scores, instance_features, entropys = classes[0], scores[0], instance_features[0], entropys[0]
  17. while len(entropys):
  18. pick = np.argmax(entropys)
  19. c_pick = classes[pick]
  20. f_pick = instance_features[pick]
  21. e_pick = entropys[pick]
  22. classes = np.delete(classes, pick)
  23. scores = np.delete(scores, pick)
  24. instance_features = np.delete(instance_features, pick, axis=0)
  25. entropys = np.delete(entropys, pick)
  26. i_entropy += e_pick
  27. remove_index = []
  28. for j in range(len(entropys)):
  29. if classes[j] == c_pick and get_cosine_similarity(instance_features[j], f_pick) > threshold:
  30. remove_index.append(j)
  31. classes = np.delete(classes, remove_index)
  32. scores = np.delete(scores, remove_index)
  33. instance_features = np.delete(instance_features, remove_index, axis=0)
  34. entropys = np.delete(entropys, remove_index)
  35. return i_entropy
  36. # LNMS
  37. def get_lloss_by_lnms(classes, llosses, instance_features, threshold=0.5):
  38. i_lloss = 0
  39. llosses = np.exp(llosses)
  40. # classes, llosses, instance_features = classes[0], llosses[0], instance_features[0]
  41. while len(llosses):
  42. pick = np.argmax(llosses)
  43. c_pick = classes[pick]
  44. l_pick = llosses[pick]
  45. f_pick = instance_features[pick]
  46. classes = np.delete(classes, pick)
  47. llosses = np.delete(llosses, pick)
  48. instance_features = np.delete(instance_features, pick, axis=0)
  49. i_lloss += l_pick
  50. remove_index = []
  51. for j in range(len(llosses)):
  52. if classes[j] == c_pick and get_cosine_similarity(instance_features[j], f_pick) > threshold:
  53. remove_index.append(j)
  54. classes = np.delete(classes, remove_index)
  55. llosses = np.delete(llosses, remove_index)
  56. instance_features = np.delete(instance_features, remove_index, axis=0)
  57. return i_lloss
  58. class Filter:
  59. # 可选提供init函数
  60. def init(self, params):
  61. self.class_name = []
  62. f = open(params['class_name_file'],'r',encoding='utf-8')
  63. names = f.readlines()
  64. for name in names:
  65. self.class_name.append(name)
  66. self.threshold = params['threshold']
  67. # 必须提供filter函数
  68. def filter(self, inputs, meta_list):
  69. results = []
  70. for dets, labels, feature, det_features, det_lloss, meta in zip(inputs['dets'], inputs['labels'], inputs['feature'], inputs['entropy'], inputs['learning_loss'], meta_list):
  71. pred = Predict()
  72. if len(labels) == 0:
  73. result = Any()
  74. result.Pack(pred, pred.DESCRIPTOR.file.package)
  75. results.append([result.SerializeToString()])
  76. continue
  77. meta['result'] = 'OK'
  78. meta['score'] = 0
  79. entropy = 0
  80. learning_loss = np.exp(det_lloss).sum()
  81. for det, index in zip(dets, labels):
  82. entropy += (-det[4]*math.log(det[4]+0.0000001)-(1-det[4])*math.log(1-det[4]+0.0000001))
  83. if meta['score'] < det[4]:
  84. meta['score'] = float(det[4])
  85. if det[4] > float(self.threshold):
  86. meta['result'] = 'NG'
  87. pred.bboxes.append(
  88. Predict.BBox(
  89. x0=det[0]/meta['scale_factor'],
  90. y0=det[1]/meta['scale_factor'],
  91. x1=det[2]/meta['scale_factor'],
  92. y1=det[3]/meta['scale_factor'],
  93. score=det[4],
  94. label_name=self.class_name[index],
  95. label_id=index)
  96. )
  97. #scores = dets[:,4]
  98. #entropy_nms = get_entropy_by_enms(labels, scores, det_features)
  99. #learning_loss_nms = get_lloss_by_lnms(labels, det_lloss, det_features)
  100. #meta['attributes']={}
  101. #meta['attributes']['feature'] = feature.tolist()
  102. #meta['attributes']['entropy'] = float(entropy)
  103. #meta['attributes']['learning_loss'] = float(learning_loss)
  104. #meta['attributes']['entropy_nms'] = float(entropy_nms)
  105. #meta['attributes']['learning_loss_nms'] = float(learning_loss_nms)
  106. #meta['attributes']['distance'] = 0
  107. result = Any()
  108. result.Pack(pred, pred.DESCRIPTOR.file.package)
  109. results.append([result.SerializeToString()])
  110. outputs = {
  111. 'OUTPUT': np.asarray(results, dtype=object)
  112. }
  113. return outputs
  114. # 可选提供finalize函数
  115. def finalize(self):
  116. pass

No Description

Contributors (1)