mlpack
Functions
loss_functions_test.cpp File Reference
#include <mlpack/core.hpp>
#include <mlpack/methods/ann/layer/layer.hpp>
#include <mlpack/methods/ann/loss_functions/huber_loss.hpp>
#include <mlpack/methods/ann/loss_functions/poisson_nll_loss.hpp>
#include <mlpack/methods/ann/loss_functions/kl_divergence.hpp>
#include <mlpack/methods/ann/loss_functions/earth_mover_distance.hpp>
#include <mlpack/methods/ann/loss_functions/mean_squared_error.hpp>
#include <mlpack/methods/ann/loss_functions/sigmoid_cross_entropy_error.hpp>
#include <mlpack/methods/ann/loss_functions/binary_cross_entropy_loss.hpp>
#include <mlpack/methods/ann/loss_functions/reconstruction_loss.hpp>
#include <mlpack/methods/ann/loss_functions/margin_ranking_loss.hpp>
#include <mlpack/methods/ann/loss_functions/mean_squared_logarithmic_error.hpp>
#include <mlpack/methods/ann/loss_functions/mean_bias_error.hpp>
#include <mlpack/methods/ann/loss_functions/dice_loss.hpp>
#include <mlpack/methods/ann/loss_functions/log_cosh_loss.hpp>
#include <mlpack/methods/ann/loss_functions/hinge_embedding_loss.hpp>
#include <mlpack/methods/ann/loss_functions/cosine_embedding_loss.hpp>
#include <mlpack/methods/ann/loss_functions/l1_loss.hpp>
#include <mlpack/methods/ann/loss_functions/soft_margin_loss.hpp>
#include <mlpack/methods/ann/loss_functions/mean_absolute_percentage_error.hpp>
#include <mlpack/methods/ann/loss_functions/triplet_margin_loss.hpp>
#include <mlpack/methods/ann/loss_functions/hinge_loss.hpp>
#include <mlpack/methods/ann/init_rules/nguyen_widrow_init.hpp>
#include <mlpack/methods/ann/ffn.hpp>
#include "catch.hpp"
#include "test_catch_tools.hpp"
#include "ann_test_tools.hpp"
Include dependency graph for loss_functions_test.cpp:

Functions

 TEST_CASE ("HuberLossTest", "[LossFunctionsTest]")
 Simple Huber Loss test.
 
 TEST_CASE ("PoissonNLLLossTest", "[LossFunctionsTest]")
 Poisson Negative Log Likelihood Loss function test.
 
 TEST_CASE ("SimpleKLDivergenceTest", "[LossFunctionsTest]")
 Simple KL Divergence test. More...
 
 TEST_CASE ("SimpleMeanSquaredLogarithmicErrorTest", "[LossFunctionsTest]")
 
 TEST_CASE ("KLDivergenceMeanTest", "[LossFunctionsTest]")
 Test to check KL Divergence loss function when we take mean.
 
 TEST_CASE ("KLDivergenceNoMeanTest", "[LossFunctionsTest]")
 Test to check KL Divergence loss function when we do not take mean.
 
 TEST_CASE ("SimpleMeanSquaredErrorTest", "[LossFunctionsTest]")
 
 TEST_CASE ("SimpleBinaryCrossEntropyLossTest", "[LossFunctionsTest]")
 
 TEST_CASE ("SimpleSigmoidCrossEntropyErrorTest", "[LossFunctionsTest]")
 Simple test for the Sigmoid Cross Entropy performance function.
 
 TEST_CASE ("SimpleEarthMoverDistanceLayerTest", "[LossFunctionsTest]")
 Simple test for the Earth Mover Distance Layer.
 
 TEST_CASE ("GradientMeanSquaredErrorTest", "[LossFunctionsTest]")
 
 TEST_CASE ("GradientReconstructionLossTest", "[LossFunctionsTest]")
 
 TEST_CASE ("DiceLossTest", "[LossFunctionsTest]")
 
 TEST_CASE ("SimpleMeanBiasErrorTest", "[LossFunctionsTest]")
 
 TEST_CASE ("LogCoshLossTest", "[LossFunctionsTest]")
 Simple test for the Log-Hyperbolic-Cosine loss function.
 
 TEST_CASE ("HingeEmbeddingLossTest", "[LossFunctionsTest]")
 Simple test for the Hinge Embedding loss function.
 
 TEST_CASE ("SimpleL1LossTest", "[LossFunctionsTest]")
 Simple test for the l1 loss function.
 
 TEST_CASE ("CosineEmbeddingLossTest", "[LossFunctionsTest]")
 Simple test for the Cosine Embedding loss function.
 
 TEST_CASE ("MarginRankingLossTest", "[LossFunctionsTest]")
 
 TEST_CASE ("SoftMarginLossTest", "[LossFunctionsTest]")
 Simple test for the Softmargin Loss function.
 
 TEST_CASE ("MeanAbsolutePercentageErrorTest", "[LossFunctionsTest]")
 Simple test for the Mean Absolute Percentage Error function.
 
 TEST_CASE ("TripletMarginLossTest")
 
 TEST_CASE ("HingeLossTest", "[LossFunctionsTest]")
 Simple test for the Hinge loss function.
 

Detailed Description

Author
Dakshit Agrawal
Sourabh Varshney
Atharva Khandait
Saksham Rastogi

Tests for loss functions in mlpack::methods::ann:loss_functions.

mlpack is free software; you may redistribute it and/or modify it under the terms of the 3-clause BSD license. You should have received a copy of the 3-clause BSD license along with mlpack. If not, see http://www.opensource.org/licenses/BSD-3-Clause for more information.

Function Documentation

◆ TEST_CASE()

TEST_CASE ( "SimpleKLDivergenceTest"  ,
""  [LossFunctionsTest] 
)

Simple KL Divergence test.

The loss should be zero if input = target.