import numpy as np import math from ext_pb2 import Predict from google.protobuf.any_pb2 import Any from scipy.spatial.distance import mahalanobis # 余弦相似度 def get_cosine_similarity(vec1, vec2): # print(vec1) # print(vec2) cos_sim = vec1.dot(vec2) / (np.linalg.norm(vec1) * np.linalg.norm(vec2)) return cos_sim # ENMS def get_entropy_by_enms(classes, scores, instance_features, threshold=0.5): i_entropy = 0 entropys = -scores*np.log(scores+0.0000001)-(1-scores)*np.log(1-scores+0.0000001) # classes, scores, instance_features, entropys = classes[0], scores[0], instance_features[0], entropys[0] while len(entropys): pick = np.argmax(entropys) c_pick = classes[pick] f_pick = instance_features[pick] e_pick = entropys[pick] classes = np.delete(classes, pick) scores = np.delete(scores, pick) instance_features = np.delete(instance_features, pick, axis=0) entropys = np.delete(entropys, pick) i_entropy += e_pick remove_index = [] for j in range(len(entropys)): if classes[j] == c_pick and get_cosine_similarity(instance_features[j], f_pick) > threshold: remove_index.append(j) classes = np.delete(classes, remove_index) scores = np.delete(scores, remove_index) instance_features = np.delete(instance_features, remove_index, axis=0) entropys = np.delete(entropys, remove_index) return i_entropy # LNMS def get_lloss_by_lnms(classes, llosses, instance_features, threshold=0.5): i_lloss = 0 llosses = np.exp(llosses) # classes, llosses, instance_features = classes[0], llosses[0], instance_features[0] while len(llosses): pick = np.argmax(llosses) c_pick = classes[pick] l_pick = llosses[pick] f_pick = instance_features[pick] classes = np.delete(classes, pick) llosses = np.delete(llosses, pick) instance_features = np.delete(instance_features, pick, axis=0) i_lloss += l_pick remove_index = [] for j in range(len(llosses)): if classes[j] == c_pick and get_cosine_similarity(instance_features[j], f_pick) > threshold: remove_index.append(j) classes = np.delete(classes, remove_index) llosses = np.delete(llosses, remove_index) instance_features = np.delete(instance_features, remove_index, axis=0) return i_lloss class Filter: # 可选提供init函数 def init(self, params): self.class_name = [] f = open(params['class_name_file'],'r',encoding='utf-8') names = f.readlines() for name in names: self.class_name.append(name) self.threshold = params['threshold'] # 必须提供filter函数 def filter(self, inputs, meta_list): results = [] for dets, labels, feature, det_features, det_lloss, meta in zip(inputs['dets'], inputs['labels'], inputs['feature'], inputs['entropy'], inputs['learning_loss'], meta_list): pred = Predict() if len(labels) == 0: result = Any() result.Pack(pred, pred.DESCRIPTOR.file.package) results.append([result.SerializeToString()]) continue meta['result'] = 'OK' meta['score'] = 0 entropy = 0 learning_loss = np.exp(det_lloss).sum() for det, index in zip(dets, labels): entropy += (-det[4]*math.log(det[4]+0.0000001)-(1-det[4])*math.log(1-det[4]+0.0000001)) if meta['score'] < det[4]: meta['score'] = float(det[4]) if det[4] > float(self.threshold): meta['result'] = 'NG' pred.bboxes.append( Predict.BBox( x0=det[0]/meta['scale_factor'], y0=det[1]/meta['scale_factor'], x1=det[2]/meta['scale_factor'], y1=det[3]/meta['scale_factor'], score=det[4], label_name=self.class_name[index], label_id=index) ) #scores = dets[:,4] #entropy_nms = get_entropy_by_enms(labels, scores, det_features) #learning_loss_nms = get_lloss_by_lnms(labels, det_lloss, det_features) #meta['attributes']={} #meta['attributes']['feature'] = feature.tolist() #meta['attributes']['entropy'] = float(entropy) #meta['attributes']['learning_loss'] = float(learning_loss) #meta['attributes']['entropy_nms'] = float(entropy_nms) #meta['attributes']['learning_loss_nms'] = float(learning_loss_nms) #meta['attributes']['distance'] = 0 result = Any() result.Pack(pred, pred.DESCRIPTOR.file.package) results.append([result.SerializeToString()]) outputs = { 'OUTPUT': np.asarray(results, dtype=object) } return outputs # 可选提供finalize函数 def finalize(self): pass