mlpack
ffn_impl.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_FFN_IMPL_HPP
13 #define MLPACK_METHODS_ANN_FFN_IMPL_HPP
14 
15 // In case it hasn't been included yet.
16 #include "ffn.hpp"
17 
25 
27 
28 namespace mlpack {
29 namespace ann {
30 
31 
32 template<typename OutputLayerType, typename InitializationRuleType,
33  typename... CustomLayers>
35  OutputLayerType outputLayer, InitializationRuleType initializeRule) :
36  outputLayer(std::move(outputLayer)),
37  initializeRule(std::move(initializeRule)),
38  width(0),
39  height(0),
40  reset(false),
41  numFunctions(0),
42  deterministic(false)
43 {
44  /* Nothing to do here. */
45 }
46 
47 template<typename OutputLayerType, typename InitializationRuleType,
48  typename... CustomLayers>
50 {
51  std::for_each(network.begin(), network.end(),
52  boost::apply_visitor(deleteVisitor));
53 }
54 
55 template<typename OutputLayerType, typename InitializationRuleType,
56  typename... CustomLayers>
58  arma::mat predictors, arma::mat responses)
59 {
60  numFunctions = responses.n_cols;
61  this->predictors = std::move(predictors);
62  this->responses = std::move(responses);
63  this->deterministic = false;
64  ResetDeterministic();
65 
66  if (!reset)
68 }
69 
70 template<typename OutputLayerType, typename InitializationRuleType,
71  typename... CustomLayers>
72 template<typename OptimizerType>
73 typename std::enable_if<
74  HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
75  ::value, void>::type
77 WarnMessageMaxIterations(OptimizerType& optimizer, size_t samples) const
78 {
79  if (optimizer.MaxIterations() < samples &&
80  optimizer.MaxIterations() != 0)
81  {
82  Log::Warn << "The optimizer's maximum number of iterations "
83  << "is less than the size of the dataset; the "
84  << "optimizer will not pass over the entire "
85  << "dataset. To fix this, modify the maximum "
86  << "number of iterations to be at least equal "
87  << "to the number of points of your dataset "
88  << "(" << samples << ")." << std::endl;
89  }
90 }
91 
92 template<typename OutputLayerType, typename InitializationRuleType,
93  typename... CustomLayers>
94 template<typename OptimizerType>
95 typename std::enable_if<
96  !HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
97  ::value, void>::type
99 WarnMessageMaxIterations(OptimizerType& /* optimizer */, size_t /* samples */)
100  const
101 {
102  return;
103 }
104 
105 template<typename OutputLayerType, typename InitializationRuleType,
106  typename... CustomLayers>
107 template<typename OptimizerType, typename... CallbackTypes>
109  arma::mat predictors,
110  arma::mat responses,
111  OptimizerType& optimizer,
112  CallbackTypes&&... callbacks)
113 {
114  CheckInputShape<std::vector<LayerTypes<CustomLayers...> > >(network,
115  predictors.n_rows,
116  "FFN<>::Train()");
117 
118  ResetData(std::move(predictors), std::move(responses));
119 
120  WarnMessageMaxIterations<OptimizerType>(optimizer, this->predictors.n_cols);
121 
122  // Train the model.
123  Timer::Start("ffn_optimization");
124  const double out = optimizer.Optimize(*this, parameter, callbacks...);
125  Timer::Stop("ffn_optimization");
126 
127  Log::Info << "FFN::FFN(): final objective of trained model is " << out
128  << "." << std::endl;
129  return out;
130 }
131 
132 template<typename OutputLayerType, typename InitializationRuleType,
133  typename... CustomLayers>
134 template<typename OptimizerType, typename... CallbackTypes>
136  arma::mat predictors,
137  arma::mat responses,
138  CallbackTypes&&... callbacks)
139 {
140  CheckInputShape<std::vector<LayerTypes<CustomLayers...> > >(network,
141  predictors.n_rows,
142  "FFN<>::Train()");
143 
144  ResetData(std::move(predictors), std::move(responses));
145 
146  OptimizerType optimizer;
147 
148  WarnMessageMaxIterations<OptimizerType>(optimizer, this->predictors.n_cols);
149 
150  // Train the model.
151  Timer::Start("ffn_optimization");
152  const double out = optimizer.Optimize(*this, parameter, callbacks...);
153  Timer::Stop("ffn_optimization");
154 
155  Log::Info << "FFN::FFN(): final objective of trained model is " << out
156  << "." << std::endl;
157  return out;
158 }
159 
160 template<typename OutputLayerType, typename InitializationRuleType,
161  typename... CustomLayers>
162 template<typename PredictorsType, typename ResponsesType>
164  const PredictorsType& inputs, ResponsesType& results)
165 {
166  if (parameter.is_empty())
167  ResetParameters();
168 
169  Forward(inputs);
170  results = boost::apply_visitor(outputParameterVisitor, network.back());
171 }
172 
173 template<typename OutputLayerType, typename InitializationRuleType,
174  typename... CustomLayers>
175 template<typename PredictorsType, typename ResponsesType>
177  const PredictorsType& inputs,
178  ResponsesType& results,
179  const size_t begin,
180  const size_t end)
181 {
182  boost::apply_visitor(ForwardVisitor(inputs,
183  boost::apply_visitor(outputParameterVisitor, network[begin])),
184  network[begin]);
185 
186  for (size_t i = 1; i < end - begin + 1; ++i)
187  {
188  boost::apply_visitor(ForwardVisitor(boost::apply_visitor(
189  outputParameterVisitor, network[begin + i - 1]),
190  boost::apply_visitor(outputParameterVisitor, network[begin + i])),
191  network[begin + i]);
192  }
193 
194  results = boost::apply_visitor(outputParameterVisitor, network[end]);
195 }
196 
197 template<typename OutputLayerType, typename InitializationRuleType,
198  typename... CustomLayers>
199 template<typename PredictorsType, typename TargetsType, typename GradientsType>
201  const PredictorsType& inputs,
202  const TargetsType& targets,
203  GradientsType& gradients)
204 {
205  double res = outputLayer.Forward(boost::apply_visitor(
206  outputParameterVisitor, network.back()), targets);
207 
208  for (size_t i = 0; i < network.size(); ++i)
209  {
210  res += boost::apply_visitor(lossVisitor, network[i]);
211  }
212 
213  outputLayer.Backward(boost::apply_visitor(outputParameterVisitor,
214  network.back()), targets, error);
215 
216  gradients = arma::zeros<arma::mat>(parameter.n_rows, parameter.n_cols);
217 
218  Backward();
219  ResetGradients(gradients);
220  Gradient(inputs);
221 
222  return res;
223 }
224 
225 template<typename OutputLayerType, typename InitializationRuleType,
226  typename... CustomLayers>
228  arma::mat predictors, arma::mat& results)
229 {
230  CheckInputShape<std::vector<LayerTypes<CustomLayers...> > >(
231  network, predictors.n_rows, "FFN<>::Predict()");
232 
233  if (parameter.is_empty())
234  ResetParameters();
235 
236  if (!deterministic)
237  {
238  deterministic = true;
239  ResetDeterministic();
240  }
241 
242  arma::mat resultsTemp;
243  Forward(arma::mat(predictors.colptr(0), predictors.n_rows, 1, false, true));
244  resultsTemp = boost::apply_visitor(outputParameterVisitor,
245  network.back()).col(0);
246 
247  results = arma::mat(resultsTemp.n_elem, predictors.n_cols);
248  results.col(0) = resultsTemp.col(0);
249 
250  for (size_t i = 1; i < predictors.n_cols; ++i)
251  {
252  Forward(arma::mat(predictors.colptr(i), predictors.n_rows, 1, false, true));
253 
254  resultsTemp = boost::apply_visitor(outputParameterVisitor,
255  network.back());
256  results.col(i) = resultsTemp.col(0);
257  }
258 }
259 
260 template<typename OutputLayerType, typename InitializationRuleType,
261  typename... CustomLayers>
262 template<typename PredictorsType, typename ResponsesType>
264  const PredictorsType& predictors, const ResponsesType& responses)
265 {
266  CheckInputShape<std::vector<LayerTypes<CustomLayers...> > >(
267  network, predictors.n_rows, "FFN<>::Evaluate()");
268 
269  if (parameter.is_empty())
270  ResetParameters();
271 
272  if (!deterministic)
273  {
274  deterministic = true;
275  ResetDeterministic();
276  }
277 
278  Forward(predictors);
279 
280  double res = outputLayer.Forward(boost::apply_visitor(
281  outputParameterVisitor, network.back()), responses);
282 
283  for (size_t i = 0; i < network.size(); ++i)
284  {
285  res += boost::apply_visitor(lossVisitor, network[i]);
286  }
287 
288  return res;
289 }
290 
291 template<typename OutputLayerType, typename InitializationRuleType,
292  typename... CustomLayers>
294  const arma::mat& parameters)
295 {
296  double res = 0;
297  for (size_t i = 0; i < predictors.n_cols; ++i)
298  res += Evaluate(parameters, i, 1, true);
299 
300  return res;
301 }
302 
303 template<typename OutputLayerType, typename InitializationRuleType,
304  typename... CustomLayers>
306  const arma::mat& /* parameters */,
307  const size_t begin,
308  const size_t batchSize,
309  const bool deterministic)
310 {
311  if (parameter.is_empty())
312  ResetParameters();
313 
314  if (deterministic != this->deterministic)
315  {
316  this->deterministic = deterministic;
317  ResetDeterministic();
318  }
319 
320  Forward(predictors.cols(begin, begin + batchSize - 1));
321  double res = outputLayer.Forward(
322  boost::apply_visitor(outputParameterVisitor, network.back()),
323  responses.cols(begin, begin + batchSize - 1));
324 
325  for (size_t i = 0; i < network.size(); ++i)
326  {
327  res += boost::apply_visitor(lossVisitor, network[i]);
328  }
329 
330  return res;
331 }
332 
333 template<typename OutputLayerType, typename InitializationRuleType,
334  typename... CustomLayers>
336  const arma::mat& parameters, const size_t begin, const size_t batchSize)
337 {
338  return Evaluate(parameters, begin, batchSize, true);
339 }
340 
341 template<typename OutputLayerType, typename InitializationRuleType,
342  typename... CustomLayers>
343 template<typename GradType>
345 EvaluateWithGradient(const arma::mat& parameters, GradType& gradient)
346 {
347  double res = 0;
348  for (size_t i = 0; i < predictors.n_cols; ++i)
349  res += EvaluateWithGradient(parameters, i, gradient, 1);
350 
351  return res;
352 }
353 
354 template<typename OutputLayerType, typename InitializationRuleType,
355  typename... CustomLayers>
356 template<typename GradType>
358 EvaluateWithGradient(const arma::mat& /* parameters */,
359  const size_t begin,
360  GradType& gradient,
361  const size_t batchSize)
362 {
363  if (gradient.is_empty())
364  {
365  if (parameter.is_empty())
366  ResetParameters();
367 
368  gradient = arma::zeros<arma::mat>(parameter.n_rows, parameter.n_cols);
369  }
370  else
371  {
372  gradient.zeros();
373  }
374 
375  if (this->deterministic)
376  {
377  this->deterministic = false;
378  ResetDeterministic();
379  }
380 
381  Forward(predictors.cols(begin, begin + batchSize - 1));
382  double res = outputLayer.Forward(
383  boost::apply_visitor(outputParameterVisitor, network.back()),
384  responses.cols(begin, begin + batchSize - 1));
385 
386  for (size_t i = 0; i < network.size(); ++i)
387  {
388  res += boost::apply_visitor(lossVisitor, network[i]);
389  }
390 
391  outputLayer.Backward(
392  boost::apply_visitor(outputParameterVisitor, network.back()),
393  responses.cols(begin, begin + batchSize - 1),
394  error);
395 
396  Backward();
397  ResetGradients(gradient);
398  Gradient(predictors.cols(begin, begin + batchSize - 1));
399 
400  return res;
401 }
402 
403 template<typename OutputLayerType, typename InitializationRuleType,
404  typename... CustomLayers>
406  const arma::mat& parameters,
407  const size_t begin,
408  arma::mat& gradient,
409  const size_t batchSize)
410 {
411  this->EvaluateWithGradient(parameters, begin, gradient, batchSize);
412 }
413 
414 template<typename OutputLayerType, typename InitializationRuleType,
415  typename... CustomLayers>
417 {
418  math::ShuffleData(predictors, responses, predictors, responses);
419 }
420 
421 template<typename OutputLayerType, typename InitializationRuleType,
422  typename... CustomLayers>
423 void FFN<OutputLayerType, InitializationRuleType,
424  CustomLayers...>::ResetParameters()
425 {
426  ResetDeterministic();
427 
428  // Reset the network parameter with the given initialization rule.
429  NetworkInitialization<InitializationRuleType,
430  CustomLayers...> networkInit(initializeRule);
431  networkInit.Initialize(network, parameter);
432 }
433 
434 template<typename OutputLayerType, typename InitializationRuleType,
435  typename... CustomLayers>
436 void FFN<OutputLayerType, InitializationRuleType,
437  CustomLayers...>::ResetDeterministic()
438 {
439  DeterministicSetVisitor deterministicSetVisitor(deterministic);
440  std::for_each(network.begin(), network.end(),
441  boost::apply_visitor(deterministicSetVisitor));
442 }
443 
444 template<typename OutputLayerType, typename InitializationRuleType,
445  typename... CustomLayers>
446 void FFN<OutputLayerType, InitializationRuleType,
447  CustomLayers...>::ResetGradients(arma::mat& gradient)
448 {
449  size_t offset = 0;
450  for (size_t i = 0; i < network.size(); ++i)
451  {
452  offset += boost::apply_visitor(GradientSetVisitor(gradient, offset),
453  network[i]);
454  }
455 }
456 
457 template<typename OutputLayerType, typename InitializationRuleType,
458  typename... CustomLayers>
459 template<typename InputType>
460 void FFN<OutputLayerType, InitializationRuleType,
461  CustomLayers...>::Forward(const InputType& input)
462 {
463  boost::apply_visitor(ForwardVisitor(input,
464  boost::apply_visitor(outputParameterVisitor, network.front())),
465  network.front());
466 
467  if (!reset)
468  {
469  if (boost::apply_visitor(outputWidthVisitor, network.front()) != 0)
470  {
471  width = boost::apply_visitor(outputWidthVisitor, network.front());
472  }
473 
474  if (boost::apply_visitor(outputHeightVisitor, network.front()) != 0)
475  {
476  height = boost::apply_visitor(outputHeightVisitor, network.front());
477  }
478  }
479 
480  for (size_t i = 1; i < network.size(); ++i)
481  {
482  if (!reset)
483  {
484  // Set the input width.
485  boost::apply_visitor(SetInputWidthVisitor(width), network[i]);
486 
487  // Set the input height.
488  boost::apply_visitor(SetInputHeightVisitor(height), network[i]);
489  }
490 
491  boost::apply_visitor(ForwardVisitor(boost::apply_visitor(
492  outputParameterVisitor, network[i - 1]),
493  boost::apply_visitor(outputParameterVisitor, network[i])), network[i]);
494 
495  if (!reset)
496  {
497  // Get the output width.
498  if (boost::apply_visitor(outputWidthVisitor, network[i]) != 0)
499  {
500  width = boost::apply_visitor(outputWidthVisitor, network[i]);
501  }
502 
503  // Get the output height.
504  if (boost::apply_visitor(outputHeightVisitor, network[i]) != 0)
505  {
506  height = boost::apply_visitor(outputHeightVisitor, network[i]);
507  }
508  }
509  }
510 
511  if (!reset)
512  reset = true;
513 }
514 
515 template<typename OutputLayerType, typename InitializationRuleType,
516  typename... CustomLayers>
518 {
519  boost::apply_visitor(BackwardVisitor(boost::apply_visitor(
520  outputParameterVisitor, network.back()), error,
521  boost::apply_visitor(deltaVisitor, network.back())), network.back());
522 
523  for (size_t i = 2; i < network.size(); ++i)
524  {
525  boost::apply_visitor(BackwardVisitor(boost::apply_visitor(
526  outputParameterVisitor, network[network.size() - i]),
527  boost::apply_visitor(deltaVisitor, network[network.size() - i + 1]),
528  boost::apply_visitor(deltaVisitor, network[network.size() - i])),
529  network[network.size() - i]);
530  }
531 }
532 
533 template<typename OutputLayerType, typename InitializationRuleType,
534  typename... CustomLayers>
535 template<typename InputType>
536 void FFN<OutputLayerType, InitializationRuleType,
537  CustomLayers...>::Gradient(const InputType& input)
538 {
539  boost::apply_visitor(GradientVisitor(input,
540  boost::apply_visitor(deltaVisitor, network[1])), network.front());
541 
542  for (size_t i = 1; i < network.size() - 1; ++i)
543  {
544  boost::apply_visitor(GradientVisitor(boost::apply_visitor(
545  outputParameterVisitor, network[i - 1]),
546  boost::apply_visitor(deltaVisitor, network[i + 1])), network[i]);
547  }
548 
549  boost::apply_visitor(GradientVisitor(boost::apply_visitor(
550  outputParameterVisitor, network[network.size() - 2]), error),
551  network[network.size() - 1]);
552 }
553 
554 template<typename OutputLayerType, typename InitializationRuleType,
555  typename... CustomLayers>
556 template<typename Archive>
558  Archive& ar, const uint32_t /* version */)
559 {
560  ar(CEREAL_NVP(parameter));
561  ar(CEREAL_NVP(width));
562  ar(CEREAL_NVP(height));
563 
564  ar(CEREAL_NVP(reset));
565 
566  // Be sure to clear other layers before loading.
567  if (cereal::is_loading<Archive>())
568  {
569  std::for_each(network.begin(), network.end(),
570  boost::apply_visitor(deleteVisitor));
571  network.clear();
572  }
573 
574  ar(CEREAL_VECTOR_VARIANT_POINTER(network));
575 
576  // If we are loading, we need to initialize the weights.
577  if (cereal::is_loading<Archive>())
578  {
579  size_t offset = 0;
580  for (size_t i = 0; i < network.size(); ++i)
581  {
582  offset += boost::apply_visitor(WeightSetVisitor(parameter, offset),
583  network[i]);
584 
585  boost::apply_visitor(resetVisitor, network[i]);
586  }
587 
588  deterministic = true;
589  ResetDeterministic();
590  }
591 }
592 
593 template<typename OutputLayerType, typename InitializationRuleType,
594  typename... CustomLayers>
595 void FFN<OutputLayerType, InitializationRuleType,
596  CustomLayers...>::Swap(FFN& network)
597 {
598  std::swap(outputLayer, network.outputLayer);
599  std::swap(initializeRule, network.initializeRule);
600  std::swap(width, network.width);
601  std::swap(height, network.height);
602  std::swap(reset, network.reset);
603  std::swap(this->network, network.network);
604  std::swap(predictors, network.predictors);
605  std::swap(responses, network.responses);
606  std::swap(parameter, network.parameter);
607  std::swap(numFunctions, network.numFunctions);
608  std::swap(error, network.error);
609  std::swap(deterministic, network.deterministic);
610  std::swap(delta, network.delta);
611  std::swap(inputParameter, network.inputParameter);
612  std::swap(outputParameter, network.outputParameter);
613  std::swap(gradient, network.gradient);
614 };
615 
616 template<typename OutputLayerType, typename InitializationRuleType,
617  typename... CustomLayers>
619  const FFN& network):
620  outputLayer(network.outputLayer),
621  initializeRule(network.initializeRule),
622  width(network.width),
623  height(network.height),
624  reset(network.reset),
625  predictors(network.predictors),
626  responses(network.responses),
627  parameter(network.parameter),
628  numFunctions(network.numFunctions),
629  error(network.error),
630  deterministic(network.deterministic),
631  delta(network.delta),
632  inputParameter(network.inputParameter),
633  outputParameter(network.outputParameter),
634  gradient(network.gradient)
635 {
636  // Build new layers according to source network
637  for (size_t i = 0; i < network.network.size(); ++i)
638  {
639  this->network.push_back(boost::apply_visitor(copyVisitor,
640  network.network[i]));
641  boost::apply_visitor(resetVisitor, this->network.back());
642  }
643 };
644 
645 template<typename OutputLayerType, typename InitializationRuleType,
646  typename... CustomLayers>
648  FFN&& network):
649  outputLayer(std::move(network.outputLayer)),
650  initializeRule(std::move(network.initializeRule)),
651  width(network.width),
652  height(network.height),
653  reset(network.reset),
654  predictors(std::move(network.predictors)),
655  responses(std::move(network.responses)),
656  parameter(std::move(network.parameter)),
657  numFunctions(network.numFunctions),
658  error(std::move(network.error)),
659  deterministic(network.deterministic),
660  delta(std::move(network.delta)),
661  inputParameter(std::move(network.inputParameter)),
662  outputParameter(std::move(network.outputParameter)),
663  gradient(std::move(network.gradient))
664 {
665  this->network = std::move(network.network);
666 };
667 
668 template<typename OutputLayerType, typename InitializationRuleType,
669  typename... CustomLayers>
670 FFN<OutputLayerType, InitializationRuleType, CustomLayers...>&
671 FFN<OutputLayerType, InitializationRuleType,
672  CustomLayers...>::operator = (FFN network)
673 {
674  Swap(network);
675  return *this;
676 };
677 
678 } // namespace ann
679 } // namespace mlpack
680 
681 #endif
SetInputHeightVisitor updates the input height parameter with the given input height.
Definition: set_input_height_visitor.hpp:27
static void Start(const std::string &name)
Start the given timer.
Definition: timers.cpp:28
BackwardVisitor executes the Backward() function given the input, error and delta parameter...
Definition: backward_visitor.hpp:28
double EvaluateWithGradient(const arma::mat &parameters, GradType &gradient)
Evaluate the feedforward network with the given parameters.
Definition: ffn_impl.hpp:345
Linear algebra utility functions, generally performed on matrices or vectors.
Definition: cv.hpp:1
std::enable_if< HasMaxIterations< OptimizerType, size_t &(OptimizerType::*)()>::value, void >::type WarnMessageMaxIterations(OptimizerType &optimizer, size_t samples) const
Check if the optimizer has MaxIterations() parameter, if it does then check if it&#39;s value is less tha...
Definition: ffn_impl.hpp:77
Definition: pointer_wrapper.hpp:23
GradientSetVisitor update the gradient parameter given the gradient set.
Definition: gradient_set_visitor.hpp:26
FFN(OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the FFN object.
Definition: ffn_impl.hpp:34
SetInputWidthVisitor updates the input width parameter with the given input width.
Definition: set_input_width_visitor.hpp:27
void ResetParameters()
Reset the module infomration (weights/parameters).
Definition: ffn_impl.hpp:424
DeterministicSetVisitor set the deterministic parameter given the deterministic value.
Definition: deterministic_set_visitor.hpp:28
double Backward(const PredictorsType &inputs, const TargetsType &targets, GradientsType &gradients)
Perform the backward pass of the data in real batch mode.
Definition: ffn_impl.hpp:200
WeightSetVisitor update the module parameters given the parameters set.
Definition: weight_set_visitor.hpp:26
static MLPACK_EXPORT util::PrefixedOutStream Warn
Prints warning messages prefixed with [WARN ].
Definition: log.hpp:87
void serialize(Archive &ar, const uint32_t)
Serialize the model.
Definition: ffn_impl.hpp:557
ForwardVisitor executes the Forward() function given the input and output parameter.
Definition: forward_visitor.hpp:28
~FFN()
Destructor to release allocated memory.
Definition: ffn_impl.hpp:49
static void Stop(const std::string &name)
Stop the given timer.
Definition: timers.cpp:36
SearchModeVisitor executes the Gradient() method of the given module using the input and delta parame...
Definition: gradient_visitor.hpp:28
void ShuffleData(const MatType &inputPoints, const LabelsType &inputLabels, MatType &outputPoints, LabelsType &outputLabels, const std::enable_if_t<!arma::is_SpMat< MatType >::value > *=0, const std::enable_if_t<!arma::is_Cube< MatType >::value > *=0)
Shuffle a dataset and associated labels (or responses).
Definition: shuffle_data.hpp:28
void Forward(const PredictorsType &inputs, ResponsesType &results)
Perform the forward pass of the data in real batch mode.
Definition: ffn_impl.hpp:163
void Shuffle()
Shuffle the order of function visitation.
Definition: ffn_impl.hpp:416
static MLPACK_EXPORT util::PrefixedOutStream Info
Prints informational messages if –verbose is specified, prefixed with [INFO ].
Definition: log.hpp:84
void Gradient(const arma::mat &parameters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the feedforward network with the given parameters, and with respect to only ...
Definition: ffn_impl.hpp:405
Implementation of a standard feed forward network.
Definition: ffn.hpp:52
This class is used to initialize the network with the given initialization rule.
Definition: network_init.hpp:33
void Predict(arma::mat predictors, arma::mat &results)
Predict the responses to a given set of predictors.
Definition: ffn_impl.hpp:227
double Train(arma::mat predictors, arma::mat responses, OptimizerType &optimizer, CallbackTypes &&... callbacks)
Train the feedforward network on the given input data using the given optimizer.
Definition: ffn_impl.hpp:108
double Evaluate(const PredictorsType &predictors, const ResponsesType &responses)
Evaluate the feedforward network with the given predictors and responses.
Definition: ffn_impl.hpp:263
#define CEREAL_VECTOR_VARIANT_POINTER(T)
Cereal does not support the serialization of raw pointer.
Definition: pointer_vector_variant_wrapper.hpp:92