我们从Python开源项目中,提取了以下3个代码示例,用于说明如何使用keras.objectives.mean_squared_error()。
def print_regression_model_summary(prefix, y_test, y_pred, parmsFromNormalization): y_test = (y_test*parmsFromNormalization.std*parmsFromNormalization.sqrtx2) + parmsFromNormalization.mean y_pred = (y_pred*parmsFromNormalization.std*parmsFromNormalization.sqrtx2) + parmsFromNormalization.mean mse = mean_squared_error(y_test, y_pred) error_AC, rmsep, mape, rmse = almost_correct_based_accuracy(y_test, y_pred, 10) rmsle = calculate_rmsle(y_test, y_pred) print ">> %s AC_errorRate=%.1f RMSEP=%.6f MAPE=%6f RMSE=%6f mse=%f rmsle=%.5f" %(prefix, error_AC, rmsep, mape, rmse, mse, rmsle) log.write("%s AC_errorRate=%.1f RMSEP=%.6f MAPE=%6f RMSE=%6f mse=%f rmsle=%.5f" %(prefix, error_AC, rmsep, mape, rmse, mse, rmsle)) # Utility function to report best scores
def gmse_factory(gamma): def gamma_mse(y_true, y_pred): return gamma * mean_squared_error(y_true, y_pred) return gamma_mse
def vae_loss(x, x_decoded_mean): x_d = Flatten()(x) x_dec_d = Flatten()(x_decoded_mean) xent_loss = input_dim * objectives.mean_squared_error(x_d, x_dec_d) kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return xent_loss + kl_loss