mlpack
layer_types.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
13 #define MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
14 
15 #include <boost/variant.hpp>
16 
17 // Layer modules.
63 
64 // Convolution modules.
68 
69 // Regularizers.
71 
72 // Loss function modules.
74 
75 namespace mlpack {
76 namespace ann {
77 
78 template<typename InputDataType, typename OutputDataType> class BatchNorm;
79 template<typename InputDataType, typename OutputDataType> class DropConnect;
80 template<typename InputDataType, typename OutputDataType> class Glimpse;
81 template<typename InputDataType, typename OutputDataType> class LayerNorm;
82 template<typename InputDataType, typename OutputDataType> class LSTM;
83 template<typename InputDataType, typename OutputDataType> class GRU;
84 template<typename InputDataType, typename OutputDataType> class FastLSTM;
85 template<typename InputDataType, typename OutputDataType> class VRClassReward;
86 template<typename InputDataType, typename OutputDataType> class Concatenate;
87 template<typename InputDataType, typename OutputDataType> class Padding;
88 template<typename InputDataType, typename OutputDataType> class ReLU6;
89 
90 template<typename InputDataType,
91  typename OutputDataType,
92  typename RegularizerType>
93 class Linear;
94 
95 template<typename InputDataType,
96  typename OutputDataType,
97  typename Activation>
98 class RBF;
99 
100 template<typename InputDataType,
101  typename OutputDataType,
102  typename RegularizerType>
104 
105 template<typename InputDataType,
106  typename OutputDataType>
108 
109 template<typename InputDataType,
110  typename OutputDataType,
111  typename RegularizerType>
112 class Linear3D;
113 
114 template<typename InputDataType,
115  typename OutputDataType
116 >
118 
119 template<typename InputDataType,
120  typename OutputDataType
121 >
123 
124 template <typename InputDataType,
125  typename OutputDataType,
126  typename RegularizerType>
128 
129 template<typename InputDataType,
130  typename OutputDataType
131 >
133 
134 template<typename InputDataType,
135  typename OutputDataType,
136  typename... CustomLayers
137 >
138 class AddMerge;
139 
140 template<typename InputDataType,
141  typename OutputDataType,
142  bool residual,
143  typename... CustomLayers
144 >
146 
147 template<typename InputDataType,
148  typename OutputDataType,
149  typename... CustomLayers
150 >
151 class Highway;
152 
153 template<typename InputDataType,
154  typename OutputDataType,
155  typename... CustomLayers
156 >
157 class Recurrent;
158 
159 template<typename InputDataType,
160  typename OutputDataType,
161  typename... CustomLayers
162 >
163 class Concat;
164 
165 template<
166  typename OutputLayerType,
167  typename InputDataType,
168  typename OutputDataType
169 >
170 class ConcatPerformance;
171 
172 template<
173  typename ForwardConvolutionRule,
174  typename BackwardConvolutionRule,
175  typename GradientConvolutionRule,
176  typename InputDataType,
177  typename OutputDataType
178 >
179 class Convolution;
180 
181 template<
182  typename ForwardConvolutionRule,
183  typename BackwardConvolutionRule,
184  typename GradientConvolutionRule,
185  typename InputDataType,
186  typename OutputDataType
187 >
189 
190 template<
191  typename ForwardConvolutionRule,
192  typename BackwardConvolutionRule,
193  typename GradientConvolutionRule,
194  typename InputDataType,
195  typename OutputDataType
196 >
197 class AtrousConvolution;
198 
199 template<
200  typename InputDataType,
201  typename OutputDataType
202 >
204 
205 template<typename InputDataType,
206  typename OutputDataType,
207  typename... CustomLayers
208 >
210 
211 template <typename InputDataType,
212  typename OutputDataType,
213  typename... CustomLayers
214 >
216 
217 template <typename InputDataType,
218  typename OutputDataType
219 >
220 class AdaptiveMaxPooling;
221 
222 template <typename InputDataType,
223  typename OutputDataType
224 >
225 class AdaptiveMeanPooling;
226 
227 using MoreTypes = boost::variant<
251 >;
252 
253 template <typename... CustomLayers>
254 using LayerTypes = boost::variant<
263  arma::mat, arma::mat>*,
275  arma::mat, arma::mat>*,
278  NaiveConvolution<FullConvolution>,
279  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
308  NaiveConvolution<ValidConvolution>,
309  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
311  MoreTypes,
312  CustomLayers*...
313 >;
314 
315 } // namespace ann
316 } // namespace mlpack
317 
318 #endif
Implementation of the variance reduced classification reinforcement layer.
Definition: layer_types.hpp:85
Implementation of the Add module class.
Definition: add.hpp:34
Implementation of the AdaptiveMaxPooling layer.
Definition: adaptive_max_pooling.hpp:33
Implementation of the Concatenate module class.
Definition: concatenate.hpp:36
The ISRLU activation function, defined by.
Definition: isrlu.hpp:60
Implementation of the log softmax layer.
Definition: log_softmax.hpp:36
Implementation of the AddMerge module class.
Definition: add_merge.hpp:42
Definition and Implementation of the Nearest Interpolation Layer.
Definition: nearest_interpolation.hpp:34
Linear algebra utility functions, generally performed on matrices or vectors.
Definition: cv.hpp:1
Implementation of the Padding module class.
Definition: layer_types.hpp:87
Declaration of the VirtualBatchNorm layer class.
Definition: layer_types.hpp:117
The FlexibleReLU activation function, defined by.
Definition: flexible_relu.hpp:59
Implementation of the Transposed Convolution class.
Definition: layer_types.hpp:188
Implementation of the reinforce normal layer.
Definition: reinforce_normal.hpp:34
Implementation of the LPPooling.
Definition: lp_pooling.hpp:32
Implementation of the Linear layer class.
Definition: layer_types.hpp:93
The LeakyReLU activation function, defined by.
Definition: leaky_relu.hpp:44
This class implements the Recurrent Model for Visual Attention, using a variety of possible layer imp...
Definition: layer_types.hpp:203
Implementation of the Convolution class.
Definition: convolution.hpp:77
Positional Encoding injects some information about the relative or absolute position of the tokens in...
Definition: positional_encoding.hpp:37
Implementation of the MeanPooling.
Definition: mean_pooling.hpp:33
Implementation of the Reparametrization layer class.
Definition: layer_types.hpp:132
Implementation of the Join module class.
Definition: join.hpp:33
Implementation of the concat performance class.
Definition: concat_performance.hpp:37
Declaration of the WeightNorm layer class.
Definition: layer_types.hpp:215
The Hard Tanh activation function, defined by.
Definition: hard_tanh.hpp:49
The select module selects the specified column from a given input matrix.
Definition: select.hpp:32
Implementation of the negative log likelihood layer.
Definition: negative_log_likelihood.hpp:35
Implementation of the Softmax layer.
Definition: softmax.hpp:38
Multihead Attention allows the model to jointly attend to information from different representation s...
Definition: layer_types.hpp:127
The PReLU activation function, defined by (where alpha is trainable)
Definition: parametric_relu.hpp:45
Implementation of the AdaptiveMeanPooling.
Definition: adaptive_mean_pooling.hpp:34
Implementation of the base layer.
Definition: base_layer.hpp:71
Implementation of the PixelShuffle layer.
Definition: pixel_shuffle.hpp:49
Implementation of the Concat class.
Definition: concat.hpp:43
Implementation of the Highway layer.
Definition: highway.hpp:58
Implementation of the LSTM module class.
Definition: layer_types.hpp:82
Implementation of the Linear3D layer class.
Definition: layer_types.hpp:112
Declaration of the Layer Normalization class.
Definition: layer_norm.hpp:65
The Lookup class stores word embeddings and retrieves them using tokens.
Definition: lookup.hpp:41
Implementation of the NoisyLinear layer class.
Definition: layer_types.hpp:107
Implementation of the subview layer.
Definition: subview.hpp:34
Implementation of the MiniBatchDiscrimination layer.
Definition: layer_types.hpp:122
Implementation of the MultiplyMerge module class.
Definition: layer_types.hpp:209
Implementation of the LinearNoBias class.
Definition: layer_types.hpp:103
A concatenated ReLU has two outputs, one ReLU and one negative ReLU, concatenated together...
Definition: c_relu.hpp:50
Computes the two-dimensional convolution.
Definition: naive_convolution.hpp:35
An implementation of a gru network layer.
Definition: gru.hpp:58
The dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values to zero a...
Definition: dropout.hpp:53
The glimpse layer returns a retina-like representation (down-scaled cropped images) of increasing sca...
Definition: glimpse.hpp:88
The DropConnect layer is a regularizer that randomly with probability ratio sets the connection value...
Definition: dropconnect.hpp:63
Implementation of the multiply constant layer.
Definition: multiply_constant.hpp:34
Definition and implementation of the Channel Shuffle Layer.
Definition: channel_shuffle.hpp:46
The alpha - dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values t...
Definition: alpha_dropout.hpp:50
The CELU activation function, defined by.
Definition: celu.hpp:60
Definition: layer_types.hpp:88
Declaration of the Batch Normalization layer class.
Definition: batch_norm.hpp:56
Implementation of the RecurrentLayer class.
Definition: layer_types.hpp:157
Implementation of the Sequential class.
Definition: layer_types.hpp:145
Implementation of the constant layer.
Definition: constant.hpp:34
Implementation of the MaxPooling layer.
Definition: max_pooling.hpp:52
The ELU activation function, defined by.
Definition: elu.hpp:111
Implementation of the Radial Basis Function layer.
Definition: layer_types.hpp:98
Implementation of the SpatialDropout layer.
Definition: spatial_dropout.hpp:48
Definition and Implementation of the Bilinear Interpolation Layer.
Definition: bilinear_interpolation.hpp:39
An implementation of a faster version of the Fast LSTM network layer.
Definition: fast_lstm.hpp:66
Implementation of the Atrous Convolution class.
Definition: atrous_convolution.hpp:52