modelData.py 文件源码

python
阅读 25 收藏 0 点赞 0 评论 0

项目:rdocChallenge 作者: Elyne 项目源码 文件源码
def calc_and_append_scores(y_test, y_pred, metrics, featImportance):

    metrics['scores_mae'].append(mean_absolute_error(y_test, y_pred))
    _, score_off = mae(y_test, y_pred)
    metrics['scores_mae_official'].append(score_off)
    prec, rec, fmeasure, _ = precision_recall_fscore_support(y_test, y_pred, average='macro')

    metrics['scores_prec'].append(prec)
    metrics['scores_recall'].append(rec)
    metrics['scores_f1'].append(fmeasure)
    metrics['scores_accuracy'].append(accuracy_score(y_test, y_pred))
    metrics['feature_importance'].append(featImportance)


    # Getting class-individual metrics
    tTP = [0,0,0,0]
    tFP = [0,0,0,0]
    tTN = [0,0,0,0]
    tFN = [0,0,0,0]

    for act, pred in zip(y_test, y_pred):
        if act == pred:
            for i in range(0,4):
                if i == act: #add to true positive
                    tTP[i] += 1
                else: #add to true negative
                    tTN[i] += 1
        else:
            for i in range(0,4):
                if i == act: #add to false negative
                    tFN[i] += 1
                else: #add to false positive
                    tFP[i] += 1

    tpre = [0,0,0,0]
    trec = [0,0,0,0]
    tfm = [0,0,0,0]
    ttp = [0,0,0,0]
    for i in range(0,4):
        if (tTP[i] > 0.):
            tpre[i] = tTP[i] / (tTP[i] + tFP[i])
            trec[i] = tTP[i] / (tTP[i] + tFN[i])
        if ((trec[i] > 0.) | (tpre[i] > 0.)):
            tfm[i] = (2*(tpre[i] * trec[i])) / (tpre[i]+trec[i])
        ttp[i] = tTP[i]

    #for each label separately,
    # to see how well our model performs on separate labels
    metrics['indRec'].append(trec)
    metrics['indPrec'].append(tpre)
    metrics['indFmeasure'].append(tfm)
    metrics['indTP'].append(ttp)
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号