mlpack
Functions
adaboost_main.cpp File Reference
#include <mlpack/prereqs.hpp>
#include <mlpack/core/util/io.hpp>
#include <mlpack/core/data/normalize_labels.hpp>
#include <mlpack/core/util/mlpack_main.hpp>
#include "adaboost.hpp"
#include "adaboost_model.hpp"
Include dependency graph for adaboost_main.cpp:
This graph shows which files directly or indirectly include this file:

Functions

 BINDING_NAME ("AdaBoost")
 
 BINDING_SHORT_DESC ("An implementation of the AdaBoost.MH (Adaptive Boosting) algorithm for " "classification. This can be used to train an AdaBoost model on labeled " "data or use an existing AdaBoost model to predict the classes of new " "points.")
 
 BINDING_LONG_DESC ("This program implements the AdaBoost (or Adaptive " "Boosting) algorithm. The variant of AdaBoost implemented here is " "AdaBoost.MH. It uses a weak learner, either decision stumps or " "perceptrons, and over many iterations, creates a strong learner that is a " "weighted ensemble of weak learners. It runs these iterations until a " "tolerance value is crossed for change in the value of the weighted " "training error." "\" "For more information about the algorithm, see the paper \mproved " "Boosting Algorithms Using Confidence-Rated Predictions\ by R.E. Schapire" " and Y. Singer." "\" "This program allows training of an AdaBoost model, and then application of" " that model to a test dataset. To train a model, a dataset must be passed" " with the "+PRINT_PARAM_STRING("training")+" option. Labels can be " "given with the "+PRINT_PARAM_STRING("labels")+" option; if no labels " "are specified, the labels will be assumed to be the last column of the " "input dataset. Alternately, an AdaBoost model may be loaded with the "+PRINT_PARAM_STRING("input_model")+" option." "\" "Once a model is trained or loaded, it may be used to provide class " "predictions for a given test dataset. A test dataset may be specified " "with the "+PRINT_PARAM_STRING("test")+" parameter. The predicted " "classes for each point in the test dataset are output to the "+PRINT_PARAM_STRING("predictions")+" output parameter. The AdaBoost " "model itself is output to the "+PRINT_PARAM_STRING("output_model")+" output parameter." "\" "Note: the following parameter is deprecated and " "will be removed in mlpack 4.0.0: "+PRINT_PARAM_STRING("output")+"." "\ "Use "+PRINT_PARAM_STRING("predictions")+" instead of "+PRINT_PARAM_STRING("output")+'.')
 
 BINDING_EXAMPLE ("For example, to run AdaBoost on an input dataset "+PRINT_DATASET("data")+" with labels "+PRINT_DATASET("labels")+"and perceptrons as the weak learner type, storing the trained model in "+PRINT_MODEL("model")+", one could use the following command: " "\"+PRINT_CALL("adaboost", "training", "data", "labels", "labels", "output_model", "model", "weak_learner", "perceptron")+"\" "Similarly, an already-trained model in "+PRINT_MODEL("model")+" can" " be used to provide class predictions from test data "+PRINT_DATASET("test_data")+" and store the output in "+PRINT_DATASET("predictions")+" with the following command: " "\"+PRINT_CALL("adaboost", "input_model", "model", "test", "test_data", "predictions", "predictions"))
 
 BINDING_SEE_ALSO ("AdaBoost on Wikipedia", "https://en.wikipedia.org/wiki/" "AdaBoost")
 
 BINDING_SEE_ALSO ("Improved boosting algorithms using confidence-rated " "predictions (pdf)", "http://rob.schapire.net/papers/SchapireSi98.pdf")
 
 BINDING_SEE_ALSO ("Perceptron", "#perceptron")
 
 BINDING_SEE_ALSO ("Decision Stump", "#decision_stump")
 
 BINDING_SEE_ALSO ("mlpack::adaboost::AdaBoost C++ class documentation", "@doxygen/classmlpack_1_1adaboost_1_1AdaBoost.html")
 
 PARAM_MATRIX_IN ("training", "Dataset for training AdaBoost.", "t")
 
 PARAM_UROW_IN ("labels", "Labels for the training set.", "l")
 
 PARAM_MATRIX_IN ("test", "Test dataset.", "T")
 
 PARAM_UROW_OUT ("output", "Predicted labels for the test set.", "o")
 
 PARAM_UROW_OUT ("predictions", "Predicted labels for the test set.", "P")
 
 PARAM_MATRIX_OUT ("probabilities", "Predicted class probabilities for each " "point in the test set.", "p")
 
 PARAM_INT_IN ("iterations", "The maximum number of boosting iterations to be run" " (0 will run until convergence.)", "i", 1000)
 
 PARAM_DOUBLE_IN ("tolerance", "The tolerance for change in values of the " "weighted error during training.", "e", 1e-10)
 
 PARAM_STRING_IN ("weak_learner", "The type of weak learner to use: " "'decision_stump', or 'perceptron'.", "w", "decision_stump")
 
 PARAM_MODEL_IN (AdaBoostModel, "input_model", "Input AdaBoost model.", "m")
 
 PARAM_MODEL_OUT (AdaBoostModel, "output_model", "Output trained AdaBoost model.", "M")
 

Detailed Description

Author
Udit Saxena

Implementation of the AdaBoost main program.

@article{Schapire:1999:IBA:337859.337870,
author = {Schapire, Robert E. and Singer, Yoram},
title = {Improved Boosting Algorithms Using Confidence-rated Predictions},
journal = {Machine Learning},
issue_date = {Dec. 1999},
volume = {37},
number = {3},
month = dec,
year = {1999},
issn = {0885-6125},
pages = {297--336},
numpages = {40},
url = {http://dx.doi.org/10.1023/A:1007614523901},
doi = {10.1023/A:1007614523901},
acmid = {337870},
publisher = {Kluwer Academic Publishers},
address = {Hingham, MA, USA},
keywords = {boosting algorithms, decision trees, multiclass classification,
output coding}

mlpack is free software; you may redistribute it and/or modify it under the terms of the 3-clause BSD license. You should have received a copy of the 3-clause BSD license along with mlpack. If not, see http://www.opensource.org/licenses/BSD-3-Clause for more information.