GestureRecognitionToolkit  Version: 1.0 Revision: 04-03-15
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
AdaBoost.cpp
1 /*
2 GRT MIT License
3 Copyright (c) <2012> <Nicholas Gillian, Media Lab, MIT>
4 
5 Permission is hereby granted, free of charge, to any person obtaining a copy of this software
6 and associated documentation files (the "Software"), to deal in the Software without restriction,
7 including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
9 subject to the following conditions:
10 
11 The above copyright notice and this permission notice shall be included in all copies or substantial
12 portions of the Software.
13 
14 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
15 LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
16 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
17 WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
18 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
19 */
20 
21 #include "AdaBoost.h"
22 
23 using namespace std;
24 
25 namespace GRT{
26 
27 //Register the AdaBoost module with the Classifier base class
28 RegisterClassifierModule< AdaBoost > AdaBoost::registerModule("AdaBoost");
29 
30 AdaBoost::AdaBoost(const WeakClassifier &weakClassifier,bool useScaling,bool useNullRejection,double nullRejectionCoeff,UINT numBoostingIterations,UINT predictionMethod)
31 {
32  setWeakClassifier( weakClassifier );
33  this->useScaling = useScaling;
34  this->useNullRejection = useNullRejection;
35  this->nullRejectionCoeff = nullRejectionCoeff;
36  this->numBoostingIterations = numBoostingIterations;
37  this->predictionMethod = predictionMethod;
38  classType = "AdaBoost";
39  classifierType = classType;
40  classifierMode = STANDARD_CLASSIFIER_MODE;
41  debugLog.setProceedingText("[DEBUG AdaBoost]");
42  errorLog.setProceedingText("[ERROR AdaBoost]");
43  trainingLog.setProceedingText("[TRAINING AdaBoost]");
44  warningLog.setProceedingText("[WARNING AdaBoost]");
45 }
46 
47 AdaBoost::AdaBoost(const AdaBoost &rhs){
48  classifierType = "AdaBoost";
49  classifierMode = STANDARD_CLASSIFIER_MODE;
50  debugLog.setProceedingText("[DEBUG AdaBoost]");
51  errorLog.setProceedingText("[ERROR AdaBoost]");
52  trainingLog.setProceedingText("[TRAINING AdaBoost]");
53  warningLog.setProceedingText("[WARNING AdaBoost]");
54  *this = rhs;
55 }
56 
57 AdaBoost::~AdaBoost(void)
58 {
59  //Clean up any weak classifiers
60  clearWeakClassifiers();
61 }
62 
63 AdaBoost& AdaBoost::operator=(const AdaBoost &rhs){
64  if( this != &rhs ){
65  //Clear the current weak classifiers
66  clearWeakClassifiers();
67 
68  this->numBoostingIterations = rhs.numBoostingIterations;
69  this->predictionMethod = rhs.predictionMethod;
70  this->models = rhs.models;
71 
72  if( rhs.weakClassifiers.size() > 0 ){
73  for(UINT i=0; i<rhs.weakClassifiers.size(); i++){
74  WeakClassifier *weakClassiferPtr = rhs.weakClassifiers[i]->createNewInstance();
75  weakClassifiers.push_back( weakClassiferPtr );
76  }
77  }
78 
79  //Clone the classifier variables
80  copyBaseVariables( (Classifier*)&rhs );
81  }
82  return *this;
83 }
84 
85 bool AdaBoost::deepCopyFrom(const Classifier *classifier){
86 
87  if( classifier == NULL ){
88  errorLog << "deepCopyFrom(const Classifier *classifier) - The classifier pointer is NULL!" << endl;
89  return false;
90  }
91 
92  if( this->getClassifierType() == classifier->getClassifierType() ){
93  //Clone the AdaBoost values
94  AdaBoost *ptr = (AdaBoost*)classifier;
95 
96  //Clear the current weak classifiers
97  clearWeakClassifiers();
98 
99  this->numBoostingIterations = ptr->numBoostingIterations;
100  this->predictionMethod = ptr->predictionMethod;
101  this->models = ptr->models;
102 
103  if( ptr->weakClassifiers.size() > 0 ){
104  for(UINT i=0; i<ptr->weakClassifiers.size(); i++){
105  WeakClassifier *weakClassiferPtr = ptr->weakClassifiers[i]->createNewInstance();
106  weakClassifiers.push_back( weakClassiferPtr );
107  }
108  }
109 
110  //Clone the classifier variables
111  return copyBaseVariables( classifier );
112  }
113  return false;
114 }
115 
116 bool AdaBoost::train_(ClassificationData &trainingData){
117 
118  //Clear any previous model
119  clear();
120 
121  if( trainingData.getNumSamples() <= 1 ){
122  errorLog << "train_(ClassificationData &trainingData) - There are not enough training samples to train a model! Number of samples: " << trainingData.getNumSamples() << endl;
123  return false;
124  }
125 
126  numInputDimensions = trainingData.getNumDimensions();
127  numClasses = trainingData.getNumClasses();
128  const UINT M = trainingData.getNumSamples();
129  const UINT POSITIVE_LABEL = WEAK_CLASSIFIER_POSITIVE_CLASS_LABEL;
130  const UINT NEGATIVE_LABEL = WEAK_CLASSIFIER_NEGATIVE_CLASS_LABEL;
131  double alpha = 0;
132  const double beta = 0.001;
133  double epsilon = 0;
134  TrainingResult trainingResult;
135 
136  const UINT K = (UINT)weakClassifiers.size();
137  if( K == 0 ){
138  errorLog << "train_(ClassificationData &trainingData) - No weakClassifiers have been set. You need to set at least one weak classifier first." << endl;
139  return false;
140  }
141 
142  classLabels.resize(numClasses);
143  models.resize(numClasses);
144  ranges = trainingData.getRanges();
145 
146  //Scale the training data if needed
147  if( useScaling ){
148  trainingData.scale(ranges,0,1);
149  }
150 
151  //Create the weights vector
152  VectorDouble weights(M);
153 
154  //Create the error matrix
155  MatrixDouble errorMatrix(K,M);
156 
157  for(UINT classIter=0; classIter<numClasses; classIter++){
158 
159  //Get the class label for the current class
160  classLabels[classIter] = trainingData.getClassLabels()[classIter];
161 
162  //Set the class label of the current model
163  models[ classIter ].setClassLabel( classLabels[classIter] );
164 
165  //Setup the labels for this class, POSITIVE_LABEL == 1, NEGATIVE_LABEL == 2
166  ClassificationData classData;
167  classData.setNumDimensions(trainingData.getNumDimensions());
168  for(UINT i=0; i<M; i++){
169  UINT label = trainingData[i].getClassLabel()==classLabels[classIter] ? POSITIVE_LABEL : NEGATIVE_LABEL;
170  VectorDouble trainingSample = trainingData[i].getSample();
171  classData.addSample(label,trainingSample);
172  }
173 
174  //Setup the initial training sample weights
175  std::fill(weights.begin(),weights.end(),1.0/M);
176 
177  //Run the boosting loop
178  bool keepBoosting = true;
179  UINT t = 0;
180 
181  while( keepBoosting ){
182 
183  //Pick the classifier from the family of classifiers that minimizes the total error
184  UINT bestClassifierIndex = 0;
185  double minError = numeric_limits<double>::max();
186  for(UINT k=0; k<K; k++){
187  //Get the k'th possible classifier
188  WeakClassifier *weakLearner = weakClassifiers[k];
189 
190  //Train the current classifier
191  if( !weakLearner->train(classData,weights) ){
192  errorLog << "Failed to train weakLearner!" << endl;
193  return false;
194  }
195 
196  //Compute the weighted error for this clasifier
197  double e = 0;
198  double positiveLabel = weakLearner->getPositiveClassLabel();
199  double numCorrect = 0;
200  double numIncorrect = 0;
201  for(UINT i=0; i<M; i++){
202  //Only penalize errors
203  double prediction = weakLearner->predict( classData[i].getSample() );
204 
205  if( (prediction == positiveLabel && classData[i].getClassLabel() != POSITIVE_LABEL) || //False positive
206  (prediction != positiveLabel && classData[i].getClassLabel() == POSITIVE_LABEL) ){ //False negative
207  e += weights[i]; //Increase the error proportional to the weight of the example
208  errorMatrix[k][i] = 1; //Flag that there was an error
209  numIncorrect++;
210  }else{
211  errorMatrix[k][i] = 0; //Flag that there was no error
212  numCorrect++;
213  }
214  }
215 
216  trainingLog << "PositiveClass: " << classLabels[classIter] << " Boosting Iter: " << t << " Classifier: " << k << " WeightedError: " << e << " NumCorrect: " << numCorrect/M << " NumIncorrect: " <<numIncorrect/M << endl;
217 
218  if( e < minError ){
219  minError = e;
220  bestClassifierIndex = k;
221  }
222 
223  }
224 
225  epsilon = minError;
226 
227  //Set alpha, using the M1 weight value, small weights (close to 0) will receive a strong weight in the final classifier
228  alpha = 0.5 * log( (1.0-epsilon)/epsilon );
229 
230  trainingLog << "PositiveClass: " << classLabels[classIter] << " Boosting Iter: " << t << " Best Classifier Index: " << bestClassifierIndex << " MinError: " << minError << " Alpha: " << alpha << endl;
231 
232  if( grt_isinf(alpha) ){ keepBoosting = false; trainingLog << "Alpha is INF. Stopping boosting for current class" << endl; }
233  if( 0.5 - epsilon <= beta ){ keepBoosting = false; trainingLog << "Epsilon <= Beta. Stopping boosting for current class" << endl; }
234  if( ++t >= numBoostingIterations ) keepBoosting = false;
235 
236  trainingResult.setClassificationResult(t, minError, this);
237  trainingResults.push_back(trainingResult);
238  trainingResultsObserverManager.notifyObservers( trainingResult );
239 
240  if( keepBoosting ){
241 
242  //Add the best weak classifier to the committee
243  models[ classIter ].addClassifierToCommitee( weakClassifiers[bestClassifierIndex], alpha );
244 
245  //Update the weights for the next boosting iteration
246  double reWeight = (1.0 - epsilon) / epsilon;
247  double oldSum = 0;
248  double newSum = 0;
249  for(UINT i=0; i<M; i++){
250  oldSum += weights[i];
251  //Only update the weights that resulted in an incorrect prediction
252  if( errorMatrix[bestClassifierIndex][i] == 1 ) weights[i] *= reWeight;
253  newSum += weights[i];
254  }
255 
256  //Normalize all the weights
257  //This results to increasing the weights of the samples that were incorrectly labelled
258  //While decreasing the weights of the samples that were correctly classified
259  reWeight = oldSum/newSum;
260  for(UINT i=0; i<M; i++){
261  weights[i] *= reWeight;
262  }
263 
264  }else{
265  trainingLog << "Stopping boosting training at iteration : " << t-1 << " with an error of " << epsilon << endl;
266  if( t-1 == 0 ){
267  //Add the best weak classifier to the committee (we have to add it as this is the first iteration)
268  if( grt_isinf(alpha) ){ alpha = 1; } //If alpha is infinite then the first classifier got everything correct
269  models[ classIter ].addClassifierToCommitee( weakClassifiers[bestClassifierIndex], alpha );
270  }
271  }
272 
273  }
274  }
275 
276  //Normalize the weights
277  for(UINT k=0; k<numClasses; k++){
278  models[k].normalizeWeights();
279  }
280 
281  //Flag that the model has been trained
282  trained = true;
283 
284  //Setup the data for prediction
285  predictedClassLabel = 0;
286  maxLikelihood = 0;
287  classLikelihoods.resize(numClasses);
288  classDistances.resize(numClasses);
289 
290  return true;
291 }
292 
293 bool AdaBoost::predict_(VectorDouble &inputVector){
294 
295  predictedClassLabel = 0;
296  maxLikelihood = -10000;
297 
298  if( !trained ){
299  errorLog << "predict_(VectorDouble &inputVector) - AdaBoost Model Not Trained!" << endl;
300  return false;
301  }
302 
303  if( inputVector.size() != numInputDimensions ){
304  errorLog << "predict_(VectorDouble &inputVector) - The size of the input vector (" << inputVector.size() << ") does not match the num features in the model (" << numInputDimensions << endl;
305  return false;
306  }
307 
308  if( useScaling ){
309  for(UINT n=0; n<numInputDimensions; n++){
310  inputVector[n] = scale(inputVector[n], ranges[n].minValue, ranges[n].maxValue, 0, 1);
311  }
312  }
313 
314  if( classLikelihoods.size() != numClasses ) classLikelihoods.resize(numClasses,0);
315  if( classDistances.size() != numClasses ) classDistances.resize(numClasses,0);
316 
317  UINT bestClassIndex = 0;
318  UINT numPositivePredictions = 0;
319  bestDistance = -numeric_limits<double>::max();
320  double worstDistance = numeric_limits<double>::max();
321  double sum = 0;
322  for(UINT k=0; k<numClasses; k++){
323  double result = models[k].predict( inputVector );
324 
325  switch ( predictionMethod ) {
326  case MAX_POSITIVE_VALUE:
327  if( result > 0 ){
328  if( result > bestDistance ){
329  bestDistance = result;
330  bestClassIndex = k;
331  }
332  numPositivePredictions++;
333  classLikelihoods[k] = result;
334  }else classLikelihoods[k] = 0;
335 
336  classDistances[k] = result;
337  sum += classLikelihoods[k];
338 
339  break;
340  case MAX_VALUE:
341  if( result > bestDistance ){
342  bestDistance = result;
343  bestClassIndex = k;
344  }
345  if( result < worstDistance ){
346  worstDistance = result;
347  }
348  numPositivePredictions++; //In the MAX_VALUE mode we assume all samples are valid
349  classLikelihoods[k] = result;
350  classDistances[k] = result;
351 
352  break;
353  default:
354  errorLog << "predict_(VectorDouble &inputVector) - Unknown prediction method!" << endl;
355  break;
356  }
357  }
358 
359  if( predictionMethod == MAX_VALUE ){
360  //Some of the class likelihoods might be negative, so we add the most negative value to each to offset this
361  worstDistance = fabs( worstDistance );
362  for(UINT k=0; k<numClasses; k++){
363  classLikelihoods[k] += worstDistance;
364  sum += classLikelihoods[k];
365  }
366  }
367 
368  //Normalize the class likelihoods
369  if( sum > 0 ){
370  for(UINT k=0; k<numClasses; k++)
371  classLikelihoods[k] /= sum;
372  }
373  maxLikelihood = classLikelihoods[ bestClassIndex ];
374 
375  if( numPositivePredictions == 0 ){
376  predictedClassLabel = GRT_DEFAULT_NULL_CLASS_LABEL;
377  }else predictedClassLabel = classLabels[ bestClassIndex ];
378 
379  return true;
380 }
381 
382 bool AdaBoost::recomputeNullRejectionThresholds(){
383 
384  if( trained ){
385  //Todo - need to add null rejection for AdaBoost
386  return false;
387  }
388  return false;
389 }
390 
391 bool AdaBoost::setNullRejectionCoeff(double nullRejectionCoeff){
392 
393  if( nullRejectionCoeff > 0 ){
394  this->nullRejectionCoeff = nullRejectionCoeff;
395  recomputeNullRejectionThresholds();
396  return true;
397  }
398  return false;
399 }
400 
401 bool AdaBoost::saveModelToFile(fstream &file) const{
402 
403  if(!file.is_open())
404  {
405  errorLog <<"saveModelToFile(fstream &file) - The file is not open!" << endl;
406  return false;
407  }
408 
409  //Write the header info
410  file<<"GRT_ADABOOST_MODEL_FILE_V2.0\n";
411 
412  //Write the classifier settings to the file
413  if( !Classifier::saveBaseSettingsToFile(file) ){
414  errorLog <<"saveModelToFile(fstream &file) - Failed to save classifier base settings to file!" << endl;
415  return false;
416  }
417 
418  //Write the AdaBoost settings to the file
419  file << "PredictionMethod: " << predictionMethod << endl;
420 
421  //If the model has been trained then write the model
422  if( trained ){
423  file << "Models: " << endl;
424  for(UINT i=0; i<models.size(); i++){
425  if( !models[i].saveModelToFile( file ) ){
426  errorLog <<"saveModelToFile(fstream &file) - Failed to write model " << i << " to file!" << endl;
427  file.close();
428  return false;
429  }
430  }
431  }
432 
433  return true;
434 }
435 
436 bool AdaBoost::loadModelFromFile(fstream &file){
437 
438  clear();
439 
440  if(!file.is_open())
441  {
442  errorLog << "loadModelFromFile(string filename) - Could not open file to load model!" << endl;
443  return false;
444  }
445 
446  std::string word;
447  file >> word;
448 
449  //Check to see if we should load a legacy file
450  if( word == "GRT_ADABOOST_MODEL_FILE_V1.0" ){
451  return loadLegacyModelFromFile( file );
452  }
453 
454  if( word != "GRT_ADABOOST_MODEL_FILE_V2.0" ){
455  errorLog <<"loadModelFromFile(fstream &file) - Failed to read file header!" << endl;
456  errorLog << word << endl;
457  return false;
458  }
459 
460  //Load the base settings from the file
461  if( !Classifier::loadBaseSettingsFromFile(file) ){
462  errorLog << "loadModelFromFile(string filename) - Failed to load base settings from file!" << endl;
463  return false;
464  }
465 
466  file >> word;
467  if( word != "PredictionMethod:" ){
468  errorLog <<"loadModelFromFile(fstream &file) - Failed to read PredictionMethod header!" << endl;
469  return false;
470  }
471  file >> predictionMethod;
472 
473  if( trained ){
474  file >> word;
475  if( word != "Models:" ){
476  errorLog <<"loadModelFromFile(fstream &file) - Failed to read Models header!" << endl;
477  return false;
478  }
479 
480  //Load the models
481  models.resize( numClasses );
482  for(UINT i=0; i<models.size(); i++){
483  if( !models[i].loadModelFromFile( file ) ){
484  errorLog << "loadModelFromFile(fstream &file) - Failed to load model " << i << " from file!" << endl;
485  file.close();
486  return false;
487  }
488  }
489 
490  //Recompute the null rejection thresholds
491  recomputeNullRejectionThresholds();
492 
493  //Resize the prediction results to make sure it is setup for realtime prediction
494  maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
495  bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
496  classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
497  classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
498  }
499 
500  return true;
501 }
502 
503 bool AdaBoost::clear(){
504 
505  //Clear the Classifier variables
506  Classifier::clear();
507 
508  //Clear the AdaBoost model
509  models.clear();
510 
511  return true;
512 }
513 
514 bool AdaBoost::setWeakClassifier(const WeakClassifier &weakClassifer){
515 
516  //Clear any previous weak classifers
517  clearWeakClassifiers();
518 
519  WeakClassifier *weakClassiferPtr = weakClassifer.createNewInstance();
520 
521  weakClassifiers.push_back( weakClassiferPtr );
522 
523  return true;
524 }
525 
526 bool AdaBoost::addWeakClassifier(const WeakClassifier &weakClassifer){
527 
528  WeakClassifier *weakClassiferPtr = weakClassifer.createNewInstance();
529  weakClassifiers.push_back( weakClassiferPtr );
530 
531  return true;
532 }
533 
534 bool AdaBoost::clearWeakClassifiers(){
535 
536  for(UINT i=0; i<weakClassifiers.size(); i++){
537  if( weakClassifiers[i] != NULL ){
538  delete weakClassifiers[i];
539  weakClassifiers[i] = NULL;
540  }
541  }
542  weakClassifiers.clear();
543  return true;
544 }
545 
546 bool AdaBoost::setNumBoostingIterations(UINT numBoostingIterations){
547  if( numBoostingIterations > 0 ){
548  this->numBoostingIterations = numBoostingIterations;
549  return true;
550  }
551  return false;
552 }
553 
554 bool AdaBoost::setPredictionMethod(UINT predictionMethod){
555  if( predictionMethod != MAX_POSITIVE_VALUE && predictionMethod != MAX_VALUE ){
556  return false;
557  }
558  this->predictionMethod = predictionMethod;
559  return true;
560 }
561 
562 void AdaBoost::printModel(){
563 
564  cout <<"AdaBoostModel: \n";
565  cout<<"NumFeatures: " << numInputDimensions << endl;
566  cout<<"NumClasses: " << numClasses << endl;
567  cout <<"UseScaling: " << useScaling << endl;
568  cout<<"UseNullRejection: " << useNullRejection << endl;
569 
570  for(UINT k=0; k<numClasses; k++){
571  cout << "Class: " << k+1 << " ClassLabel: " << classLabels[k] << endl;
572  models[k].print();
573  }
574 
575 }
576 
577 bool AdaBoost::loadLegacyModelFromFile( fstream &file ){
578 
579  string word;
580 
581  file >> word;
582  if( word != "NumFeatures:" ){
583  errorLog <<"loadModelFromFile(fstream &file) - Failed to read NumFeatures header!" << endl;
584  return false;
585  }
586  file >> numInputDimensions;
587 
588  file >> word;
589  if( word != "NumClasses:" ){
590  errorLog <<"loadModelFromFile(fstream &file) - Failed to read NumClasses header!" << endl;
591  return false;
592  }
593  file >> numClasses;
594 
595  file >> word;
596  if( word != "UseScaling:" ){
597  errorLog <<"loadModelFromFile(fstream &file) - Failed to read UseScaling header!" << endl;
598  return false;
599  }
600  file >> useScaling;
601 
602  file >> word;
603  if( word != "UseNullRejection:" ){
604  errorLog <<"loadModelFromFile(fstream &file) - Failed to read UseNullRejection header!" << endl;
605  return false;
606  }
607  file >> useNullRejection;
608 
609  if( useScaling ){
610  file >> word;
611  if( word != "Ranges:" ){
612  errorLog <<"loadModelFromFile(fstream &file) - Failed to read Ranges header!" << endl;
613  return false;
614  }
615  ranges.resize( numInputDimensions );
616 
617  for(UINT n=0; n<ranges.size(); n++){
618  file >> ranges[n].minValue;
619  file >> ranges[n].maxValue;
620  }
621  }
622 
623  file >> word;
624  if( word != "Trained:" ){
625  errorLog <<"loadModelFromFile(fstream &file) - Failed to read Trained header!" << endl;
626  return false;
627  }
628  file >> trained;
629 
630  file >> word;
631  if( word != "PredictionMethod:" ){
632  errorLog <<"loadModelFromFile(fstream &file) - Failed to read PredictionMethod header!" << endl;
633  return false;
634  }
635  file >> predictionMethod;
636 
637  if( trained ){
638  file >> word;
639  if( word != "Models:" ){
640  errorLog <<"loadModelFromFile(fstream &file) - Failed to read Models header!" << endl;
641  return false;
642  }
643 
644  //Load the models
645  models.resize( numClasses );
646  classLabels.resize( numClasses );
647  for(UINT i=0; i<models.size(); i++){
648  if( !models[i].loadModelFromFile( file ) ){
649  errorLog << "loadModelFromFile(fstream &file) - Failed to load model " << i << " from file!" << endl;
650  file.close();
651  return false;
652  }
653 
654  //Set the class label
655  classLabels[i] = models[i].getClassLabel();
656  }
657  }
658 
659  //Recompute the null rejection thresholds
660  recomputeNullRejectionThresholds();
661 
662  //Resize the prediction results to make sure it is setup for realtime prediction
663  maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
664  bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
665  classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
666  classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
667 
668  return true;
669 }
670 
671 } //End of namespace GRT
672 
vector< UINT > getClassLabels() const
virtual double predict(const VectorDouble &x)
Definition: AdaBoost.cpp:25
bool setNumDimensions(UINT numDimensions)
This class contains the AdaBoost classifier. AdaBoost (Adaptive Boosting) is a powerful classifier th...
virtual bool train(ClassificationData &trainingData, VectorDouble &weights)
bool scale(const double minTarget, const double maxTarget)
WeakClassifier * createNewInstance() const
virtual void print() const
virtual double getPositiveClassLabel() const
bool setClassificationResult(unsigned int trainingIteration, double accuracy, MLBase *trainer)
vector< MinMax > getRanges() const
string getClassifierType() const
Definition: Classifier.cpp:159
bool addSample(UINT classLabel, const VectorDouble &sample)