def auc_score(res_list):
gp_list = np.array([])
anno_list = np.array([])
for res in res_list:
g_pred = res.g_pred
anno = res.annotation
if g_pred.shape[-1] < anno.shape[-1]:
anno = np.delete(anno, range(g_pred.shape[-1], anno.shape[-1]), axis=-1)
elif g_pred.shape[-1] > anno.shape[-1]:
g_pred = np.delete(g_pred, range(anno.shape[-1], g_pred.shape[-1]), axis=-1)
gp_list = g_pred.T if len(gp_list) == 0 else np.append(gp_list, g_pred.T, axis=0)
anno_list = anno.T if len(anno_list) == 0 else np.append(anno_list, anno.T, axis=0)
assert(gp_list.shape == anno_list.shape)
from sklearn.metrics import roc_auc_score
class_auc = roc_auc_score(anno_list, gp_list, average=None)
print('AUC of Classes:')
print(class_auc)
all_micro_auc = roc_auc_score(anno_list, gp_list, average='micro')
print('Total micro AUC: {}'.format(all_micro_auc))
all_macro_auc = roc_auc_score(anno_list, gp_list, average='macro')
print('Total macro AUC: {}'.format(all_macro_auc))
评论列表
文章目录