我们从Python开源项目中,提取了以下10个代码示例,用于说明如何使用chainer.functions.mean_absolute_error()。
def loss_func_rec_l1(x_out, t): return F.mean_absolute_error(x_out, t)
def pixel_wise_loss(self, x, y): if self.loss_norm == 1: return F.mean_absolute_error(x, y) elif self.loss_norm == 2: return F.mean_squared_error(x, y) else: raise ValueError('Invalid norm {}'.format(self.loss_norm))
def line_loss(self, x, t, k=3): #print(x.data.type) lx = x - F.max_pooling_2d(x, k, 1, 1) lt = t - F.max_pooling_2d(t, k, 1, 1) return 2 * F.mean_absolute_error(lx, lt) # 0 for dataset # 1 for fake # G_p_rough: output of Generator (paired rough sketch) # p_line: paired line art
def loss_G(self, real_B, fake_B, fake_D): loss_l1 = F.mean_absolute_error(real_B, fake_B) chainer.report({'loss_l1': loss_l1}, self.G) batch_size, _, h, w = fake_D.shape loss_D = - F.sum(F.log(fake_D + self.eps)) / (batch_size*h*w) chainer.report({'loss_D': loss_D}, self.G) loss = loss_D + self.lambd*loss_l1 chainer.report({'loss': loss}, self.G) return loss
def loss_enc(self, enc, x_out, t_out, y_out, lam1=100, lam2=1): batchsize,_,w,h = y_out.data.shape loss_rec = lam1*(F.mean_absolute_error(x_out, t_out)) loss_adv = lam2*F.sum(F.softplus(-y_out)) / batchsize / w / h loss = loss_rec + loss_adv chainer.report({'loss': loss}, enc) return loss
def loss_dec(self, dec, x_out, t_out, y_out, lam1=100, lam2=1): batchsize,_,w,h = y_out.data.shape loss_rec = lam1*(F.mean_absolute_error(x_out, t_out)) loss_adv = lam2*F.sum(F.softplus(-y_out)) / batchsize / w / h loss = loss_rec + loss_adv chainer.report({'loss': loss}, dec) return loss
def __call__(self, x): h = x h = F.leaky_relu(self.c0(h)) h = F.leaky_relu(self.c1(h)) h = F.leaky_relu(self.c2(h)) h = F.leaky_relu(self.c3(h)) h = F.leaky_relu(self.l4(h)) h = F.reshape(F.leaky_relu(self.l5(h)), (x.data.shape[0], self.ch, 4, 4)) h = F.leaky_relu(self.dc3(h)) h = F.leaky_relu(self.dc2(h)) h = F.leaky_relu(self.dc1(h)) h = F.tanh(self.dc0(h)) return F.mean_absolute_error(h, x)
def __init__(self, model, lossfunc=F.mean_absolute_error ): super(LossEval, self).__init__() self.lossfunc = lossfunc with self.init_scope(): self.model = model
def loss_cnn(self, cnn, x_out, dst, dis_out, lam1=100, lam2=1): loss_rec = lam1 * ( F.mean_absolute_error(x_out, dst) ) batchsize,_,w,h = dis_out.data.shape loss_adv = lam2 * F.sum( F.softplus(-dis_out) ) / batchsize / w / h loss = loss_rec + loss_adv chainer.report({'loss': loss,"loss_rec":loss_rec, 'loss_adv': loss_adv }, cnn) return loss