mlpack
|
#include <mlpack/prereqs.hpp>
#include <mlpack/methods/ann/activation_functions/logistic_function.hpp>
#include <mlpack/methods/ann/activation_functions/identity_function.hpp>
#include <mlpack/methods/ann/activation_functions/rectifier_function.hpp>
#include <mlpack/methods/ann/activation_functions/tanh_function.hpp>
#include <mlpack/methods/ann/activation_functions/softplus_function.hpp>
#include <mlpack/methods/ann/activation_functions/hard_sigmoid_function.hpp>
#include <mlpack/methods/ann/activation_functions/swish_function.hpp>
#include <mlpack/methods/ann/activation_functions/mish_function.hpp>
#include <mlpack/methods/ann/activation_functions/lisht_function.hpp>
#include <mlpack/methods/ann/activation_functions/gelu_function.hpp>
#include <mlpack/methods/ann/activation_functions/elliot_function.hpp>
#include <mlpack/methods/ann/activation_functions/elish_function.hpp>
#include <mlpack/methods/ann/activation_functions/gaussian_function.hpp>
#include <mlpack/methods/ann/activation_functions/hard_swish_function.hpp>
#include <mlpack/methods/ann/activation_functions/tanh_exponential_function.hpp>
#include <mlpack/methods/ann/activation_functions/silu_function.hpp>
Go to the source code of this file.
Classes | |
class | mlpack::ann::BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Implementation of the base layer. More... | |
Namespaces | |
mlpack | |
Linear algebra utility functions, generally performed on matrices or vectors. | |
mlpack::ann | |
Artificial Neural Network. | |
Typedefs | |
template<class ActivationFunction = LogisticFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::SigmoidLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard Sigmoid-Layer using the logistic activation function. | |
template<class ActivationFunction = IdentityFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::IdentityLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard Identity-Layer using the identity activation function. | |
template<class ActivationFunction = RectifierFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::ReLULayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard rectified linear unit non-linearity layer. | |
template<class ActivationFunction = TanhFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::TanHLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard hyperbolic tangent layer. | |
template<class ActivationFunction = SoftplusFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::SoftPlusLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard Softplus-Layer using the Softplus activation function. | |
template<class ActivationFunction = HardSigmoidFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::HardSigmoidLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard HardSigmoid-Layer using the HardSigmoid activation function. | |
template<class ActivationFunction = SwishFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::SwishFunctionLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard Swish-Layer using the Swish activation function. | |
template<class ActivationFunction = MishFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::MishFunctionLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard Mish-Layer using the Mish activation function. | |
template<class ActivationFunction = LiSHTFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::LiSHTFunctionLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard LiSHT-Layer using the LiSHT activation function. | |
template<class ActivationFunction = GELUFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::GELUFunctionLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard GELU-Layer using the GELU activation function. | |
template<class ActivationFunction = ElliotFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::ElliotFunctionLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard Elliot-Layer using the Elliot activation function. | |
template<class ActivationFunction = ElishFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::ElishFunctionLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard ELiSH-Layer using the ELiSH activation function. | |
template<class ActivationFunction = GaussianFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::GaussianFunctionLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard Gaussian-Layer using the Gaussian activation function. | |
template<class ActivationFunction = HardSwishFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::HardSwishFunctionLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard HardSwish-Layer using the HardSwish activation function. | |
template<class ActivationFunction = TanhExpFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::TanhExpFunctionLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard TanhExp-Layer using the TanhExp activation function. | |
template<class ActivationFunction = SILUFunction, typename InputDataType = arma::mat, typename OutputDataType = arma::mat> | |
using | mlpack::ann::SILUFunctionLayer = BaseLayer< ActivationFunction, InputDataType, OutputDataType > |
Standard SILU-Layer using the SILU activation function. | |
Definition of the BaseLayer class, which attaches various functions to the embedding layer.
mlpack is free software; you may redistribute it and/or modify it under the terms of the 3-clause BSD license. You should have received a copy of the 3-clause BSD license along with mlpack. If not, see http://www.opensource.org/licenses/BSD-3-Clause for more information.