mxnet METRIC自定义评估验证函数
发布日期:2021-05-07 16:55:25 浏览次数:23 分类:技术文章

本文共 3520 字,大约阅读时间需要 11 分钟。

 

insightface自定义loss:

params = [1.e-10]    sel = mx.symbol.argmax(data = fc7, axis=1)    sel = (sel==gt_label)    norm = embedding*embedding    norm = mx.symbol.sum(norm, axis=1)    norm = norm+params[0]    feature_incay = sel/norm    feature_incay = mx.symbol.mean(feature_incay) * args.incay    extra_loss = mx.symbol.MakeLoss(feature_incay)

 

自定义损失函数

# -*- coding=utf-8 -*-import mxnet as mximport numpy as npimport logginglogging.basicConfig(level=logging.INFO)x = mx.sym.Variable('data')y = mx.sym.FullyConnected(data=x, num_hidden=1)label = mx.sym.Variable('label')cross_entropy = label * log(out) + (1 - label) * log(1 - out)loss = MakeLoss(cross_entropy)pred_loss = mx.sym.Group([mx.sym.BlockGrad(y), loss])ex = pred_loss.simple_bind(mx.cpu(), data=(32, 2))# testtest_data = mx.nd.array(np.random.random(size=(32, 2)))test_label = mx.nd.array(np.random.random(size=(32, 1)))ex.forward(is_train=True, data=test_data, label=test_label)ex.backward()print ex.arg_dictfc_w = ex.arg_dict['fullyconnected0_weight'].asnumpy()fc_w_grad = ex.grad_arrays[1].asnumpy()fc_bias = ex.arg_dict['fullyconnected0_bias'].asnumpy()fc_bias_grad = ex.grad_arrays[2].asnumpy()logging.info('fc_weight:{}, fc_weights_grad:{}'.format(fc_w, fc_w_grad))logging.info('fc_bias:{}, fc_bias_grad:{}'.format(fc_bias, fc_bias_grad))

 

使用makeloss只能得到损失而不是预测,要得到损失和预测需要使用mx.sym.Group()和mx.sym.BlockGrad()

 

label = mx.sym.Variable('label')out = mx.sym.Activation(data=final, act_type='sigmoid')ce = label * mx.sym.log(out) + (1 - label) * mx.sym.log(1 - out)weights = mx.sym.Variable('weights')loss = mx.sym.MakeLoss(weigths * ce, normalization='batch')

 

# -*- coding=utf-8 -*-import mxnet as mximport numpy as npimport logginglogging.basicConfig(level=logging.INFO)x = mx.sym.Variable('data')y = mx.sym.FullyConnected(data=x, num_hidden=1)label = mx.sym.Variable('label')cross_entropy = label * log(out) + (1 - label) * log(1 - out)loss = MakeLoss(cross_entropy)pred_loss = mx.sym.Group([mx.sym.BlockGrad(y), loss])ex = pred_loss.simple_bind(mx.cpu(), data=(32, 2))# testtest_data = mx.nd.array(np.random.random(size=(32, 2)))test_label = mx.nd.array(np.random.random(size=(32, 1)))ex.forward(is_train=True, data=test_data, label=test_label)ex.backward()print ex.arg_dictfc_w = ex.arg_dict['fullyconnected0_weight'].asnumpy()fc_w_grad = ex.grad_arrays[1].asnumpy()fc_bias = ex.arg_dict['fullyconnected0_bias'].asnumpy()fc_bias_grad = ex.grad_arrays[2].asnumpy()logging.info('fc_weight:{}, fc_weights_grad:{}'.format(fc_w, fc_w_grad))logging.info('fc_bias:{}, fc_bias_grad:{}'.format(fc_bias, fc_bias_grad))

 

import mxnet as mxclass Siamise_metric(mx.metric.EvalMetric):    def __init__(self, name='siamise_acc'):        super(Siamise_metric, self).__init__(name=name)    def update(self, label, pred):        preds = pred[0]        labels = label[0]        preds_label = preds.asnumpy().ravel()        labels = labels.asnumpy().ravel()        #self.sum_metric += labels[preds_label < 0.5].sum() + len(        #    labels[preds_label >= 0.5]) - labels[preds_label >= 0.5].sum()        #self.num_inst += len(labels)        pred = (preds_label < 0.5)        acc = (pred == labels).sum()        self.sum_metric += acc        self.num_inst += len(labels)  # numpy.prod(label.shape)class Contrastive_loss(mx.metric.EvalMetric):    def __init__(self, name='contrastive_loss'):        super(Contrastive_loss, self).__init__(name=name)    def update(self, label, pred):        loss = pred[1].asnumpy()        self.sum_metric += loss        self.num_inst += len(loss)

 

上一篇:谷歌最新提出无需卷积、注意力 ,纯MLP构成的视觉架构
下一篇:MXNET gluon自定义损失函数

发表评论

最新留言

关注你微信了!
[***.104.42.241]2025年04月02日 00时47分47秒