GestureRecognitionToolkit  Version: 1.0 Revision: 04-03-15
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
ANBC.cpp
1 /*
2 GRT MIT License
3 Copyright (c) <2012> <Nicholas Gillian, Media Lab, MIT>
4 
5 Permission is hereby granted, free of charge, to any person obtaining a copy of this software
6 and associated documentation files (the "Software"), to deal in the Software without restriction,
7 including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
9 subject to the following conditions:
10 
11 The above copyright notice and this permission notice shall be included in all copies or substantial
12 portions of the Software.
13 
14 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
15 LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
16 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
17 WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
18 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
19 */
20 
21 #include "ANBC.h"
22 
23 using namespace std;
24 
25 namespace GRT{
26 
27 //Register the ANBC module with the Classifier base class
28 RegisterClassifierModule< ANBC > ANBC::registerModule("ANBC");
29 
30 ANBC::ANBC(bool useScaling,bool useNullRejection,double nullRejectionCoeff)
31 {
32  this->useScaling = useScaling;
33  this->useNullRejection = useNullRejection;
34  this->nullRejectionCoeff = nullRejectionCoeff;
35  supportsNullRejection = true;
36  weightsDataSet = false;
37  classType = "ANBC";
38  classifierType = classType;
39  classifierMode = STANDARD_CLASSIFIER_MODE;
40  debugLog.setProceedingText("[DEBUG ANBC]");
41  errorLog.setProceedingText("[ERROR ANBC]");
42  trainingLog.setProceedingText("[TRAINING ANBC]");
43  warningLog.setProceedingText("[WARNING ANBC]");
44 }
45 
46 ANBC::ANBC(const ANBC &rhs){
47  classType = "ANBC";
48  classifierType = classType;
49  classifierMode = STANDARD_CLASSIFIER_MODE;
50  debugLog.setProceedingText("[DEBUG ANBC]");
51  errorLog.setProceedingText("[ERROR ANBC]");
52  trainingLog.setProceedingText("[TRAINING ANBC]");
53  warningLog.setProceedingText("[WARNING ANBC]");
54  *this = rhs;
55 }
56 
57 ANBC::~ANBC(void)
58 {
59 }
60 
61 ANBC& ANBC::operator=(const ANBC &rhs){
62  if( this != &rhs ){
63  //ANBC variables
64  this->weightsDataSet = rhs.weightsDataSet;
65  this->weightsData = rhs.weightsData;
66  this->models = rhs.models;
67 
68  //Classifier variables
69  copyBaseVariables( (Classifier*)&rhs );
70  }
71  return *this;
72 }
73 
74 bool ANBC::deepCopyFrom(const Classifier *classifier){
75 
76  if( classifier == NULL ) return false;
77 
78  if( this->getClassifierType() == classifier->getClassifierType() ){
79 
80  ANBC *ptr = (ANBC*)classifier;
81  //Clone the ANBC values
82  this->weightsDataSet = ptr->weightsDataSet;
83  this->weightsData = ptr->weightsData;
84  this->models = ptr->models;
85 
86  //Clone the classifier variables
87  return copyBaseVariables( classifier );
88  }
89  return false;
90 }
91 
92 bool ANBC::train_(ClassificationData &labelledTrainingData){
93 
94  //Clear any previous model
95  clear();
96 
97  const unsigned int M = labelledTrainingData.getNumSamples();
98  const unsigned int N = labelledTrainingData.getNumDimensions();
99  const unsigned int K = labelledTrainingData.getNumClasses();
100 
101  if( M == 0 ){
102  errorLog << "train_(ClassificationData &labelledTrainingData) - Training data has zero samples!" << endl;
103  return false;
104  }
105 
106  if( weightsDataSet ){
107  if( weightsData.getNumDimensions() != N ){
108  errorLog << "train_(ClassificationData &labelledTrainingData) - The number of dimensions in the weights data (" << weightsData.getNumDimensions() << ") is not equal to the number of dimensions of the training data (" << N << ")" << endl;
109  return false;
110  }
111  }
112 
113  numInputDimensions = N;
114  numClasses = K;
115  models.resize(K);
116  classLabels.resize(K);
117  ranges = labelledTrainingData.getRanges();
118 
119  //Scale the training data if needed
120  if( useScaling ){
121  //Scale the training data between 0 and 1
122  labelledTrainingData.scale(0, 1);
123  }
124 
125  //Train each of the models
126  for(UINT k=0; k<numClasses; k++){
127 
128  //Get the class label for the kth class
129  UINT classLabel = labelledTrainingData.getClassTracker()[k].classLabel;
130 
131  //Set the kth class label
132  classLabels[k] = classLabel;
133 
134  //Get the weights for this class
135  VectorDouble weights(numInputDimensions);
136  if( weightsDataSet ){
137  bool weightsFound = false;
138  for(UINT i=0; i<weightsData.getNumSamples(); i++){
139  if( weightsData[i].getClassLabel() == classLabel ){
140  weights = weightsData[i].getSample();
141  weightsFound = true;
142  break;
143  }
144  }
145 
146  if( !weightsFound ){
147  errorLog << "train_(ClassificationData &labelledTrainingData) - Failed to find the weights for class " << classLabel << endl;
148  return false;
149  }
150  }else{
151  //If the weights data has not been set then all the weights are 1
152  for(UINT j=0; j<numInputDimensions; j++) weights[j] = 1.0;
153  }
154 
155  //Get all the training data for this class
156  ClassificationData classData = labelledTrainingData.getClassData(classLabel);
157  MatrixDouble data(classData.getNumSamples(),N);
158 
159  //Copy the training data into a matrix
160  for(UINT i=0; i<data.getNumRows(); i++){
161  for(UINT j=0; j<data.getNumCols(); j++){
162  data[i][j] = classData[i][j];
163  }
164  }
165 
166  //Train the model for this class
167  models[k].gamma = nullRejectionCoeff;
168  if( !models[k].train(classLabel,data,weights) ){
169  errorLog << "train_(ClassificationData &labelledTrainingData) - Failed to train model for class: " << classLabel << endl;
170 
171  //Try and work out why the training failed
172  if( models[k].N == 0 ){
173  errorLog << "train_(ClassificationData &labelledTrainingData) - N == 0!" << endl;
174  models.clear();
175  return false;
176  }
177  for(UINT j=0; j<numInputDimensions; j++){
178  if( models[k].sigma[j] == 0 ){
179  errorLog << "train_(ClassificationData &labelledTrainingData) - The standard deviation of column " << j+1 << " is zero! Check the training data" << endl;
180  models.clear();
181  return false;
182  }
183  }
184  models.clear();
185  return false;
186  }
187 
188  }
189 
190  //Store the null rejection thresholds
191  nullRejectionThresholds.resize(numClasses);
192  for(UINT k=0; k<numClasses; k++) {
193  nullRejectionThresholds[k] = models[k].threshold;
194  }
195 
196  //Flag that the models have been trained
197  trained = true;
198  return trained;
199 }
200 
201 bool ANBC::predict_(VectorDouble &inputVector){
202 
203  if( !trained ){
204  errorLog << "predict_(VectorDouble &inputVector) - ANBC Model Not Trained!" << endl;
205  return false;
206  }
207 
208  predictedClassLabel = 0;
209  maxLikelihood = -10000;
210 
211  if( !trained ) return false;
212 
213  if( inputVector.size() != numInputDimensions ){
214  errorLog << "predict_(VectorDouble &inputVector) - The size of the input vector (" << inputVector.size() << ") does not match the num features in the model (" << numInputDimensions << endl;
215  return false;
216  }
217 
218  if( useScaling ){
219  for(UINT n=0; n<numInputDimensions; n++){
220  inputVector[n] = scale(inputVector[n], ranges[n].minValue, ranges[n].maxValue, MIN_SCALE_VALUE, MAX_SCALE_VALUE);
221  }
222  }
223 
224  if( classLikelihoods.size() != numClasses ) classLikelihoods.resize(numClasses,0);
225  if( classDistances.size() != numClasses ) classDistances.resize(numClasses,0);
226 
227  double classLikelihoodsSum = 0;
228  double minDist = -99e+99;
229  for(UINT k=0; k<numClasses; k++){
230  classDistances[k] = models[k].predict( inputVector );
231 
232  //At this point the class likelihoods and class distances are the same thing
233  classLikelihoods[k] = classDistances[k];
234 
235  //If the distances are very far away then they could be -inf or nan so catch this so the sum still works
236  if( grt_isinf(classLikelihoods[k]) || grt_isnan(classLikelihoods[k]) ){
237  classLikelihoods[k] = 0;
238  }else{
239  classLikelihoods[k] = exp( classLikelihoods[k] );
240  classLikelihoodsSum += classLikelihoods[k];
241 
242  //The loglikelihood values are negative so we want the values closest to 0
243  if( classDistances[k] > minDist ){
244  minDist = classDistances[k];
245  predictedClassLabel = k;
246  }
247  }
248  }
249 
250  //If the class likelihoods sum is zero then all classes are -INF
251  if( classLikelihoodsSum == 0 ){
252  predictedClassLabel = GRT_DEFAULT_NULL_CLASS_LABEL;
253  maxLikelihood = 0;
254  return true;
255  }
256 
257  //Normalize the classlikelihoods
258  for(UINT k=0; k<numClasses; k++){
259  classLikelihoods[k] /= classLikelihoodsSum;
260  }
261  maxLikelihood = classLikelihoods[predictedClassLabel];
262 
263  if( useNullRejection ){
264  //Check to see if the best result is greater than the models threshold
265  if( minDist >= models[predictedClassLabel].threshold ) predictedClassLabel = models[predictedClassLabel].classLabel;
266  else predictedClassLabel = GRT_DEFAULT_NULL_CLASS_LABEL;
267  }else predictedClassLabel = models[predictedClassLabel].classLabel;
268 
269  return true;
270 }
271 
272 bool ANBC::recomputeNullRejectionThresholds(){
273 
274  if( trained ){
275  if( nullRejectionThresholds.size() != numClasses )
276  nullRejectionThresholds.resize(numClasses);
277  for(UINT k=0; k<numClasses; k++) {
278  models[k].recomputeThresholdValue(nullRejectionCoeff);
279  nullRejectionThresholds[k] = models[k].threshold;
280  }
281  return true;
282  }
283  return false;
284 }
285 
286 bool ANBC::reset(){
287  return true;
288 }
289 
290 bool ANBC::clear(){
291 
292  //Clear the Classifier variables
293  Classifier::clear();
294 
295  //Clear the ANBC model
296  weightsData.clear();
297  models.clear();
298 
299  return true;
300 }
301 
302 bool ANBC::saveModelToFile(fstream &file) const{
303 
304  if(!file.is_open())
305  {
306  errorLog <<"saveModelToFile(fstream &file) - The file is not open!" << endl;
307  return false;
308  }
309 
310  //Write the header info
311  file<<"GRT_ANBC_MODEL_FILE_V2.0\n";
312 
313  //Write the classifier settings to the file
314  if( !Classifier::saveBaseSettingsToFile(file) ){
315  errorLog <<"saveModelToFile(fstream &file) - Failed to save classifier base settings to file!" << endl;
316  return false;
317  }
318 
319  if( trained ){
320  //Write each of the models
321  for(UINT k=0; k<numClasses; k++){
322  file<<"*************_MODEL_*************\n";
323  file<<"Model_ID: "<<k+1<<endl;
324  file<<"N: "<<models[k].N<<endl;
325  file<<"ClassLabel: "<<models[k].classLabel<<endl;
326  file<<"Threshold: "<<models[k].threshold<<endl;
327  file<<"Gamma: "<<models[k].gamma<<endl;
328  file<<"TrainingMu: "<<models[k].trainingMu<<endl;
329  file<<"TrainingSigma: "<<models[k].trainingSigma<<endl;
330 
331  file<<"Mu:\n";
332  for(UINT j=0; j<models[k].N; j++){
333  file << "\t" << models[k].mu[j];
334  }file<<endl;
335 
336  file<<"Sigma:\n";
337  for(UINT j=0; j<models[k].N; j++){
338  file << "\t" << models[k].sigma[j];
339  }file<<endl;
340 
341  file<<"Weights:\n";
342  for(UINT j=0; j<models[k].N; j++){
343  file << "\t" << models[k].weights[j];
344  }file<<endl;
345  }
346  }
347 
348  return true;
349 }
350 
351 bool ANBC::loadModelFromFile(fstream &file){
352 
353  trained = false;
354  numInputDimensions = 0;
355  numClasses = 0;
356  models.clear();
357  classLabels.clear();
358 
359  if(!file.is_open())
360  {
361  errorLog << "loadModelFromFile(string filename) - Could not open file to load model" << endl;
362  return false;
363  }
364 
365  std::string word;
366  file >> word;
367 
368  //Check to see if we should load a legacy file
369  if( word == "GRT_ANBC_MODEL_FILE_V1.0" ){
370  return loadLegacyModelFromFile( file );
371  }
372 
373  //Find the file type header
374  if(word != "GRT_ANBC_MODEL_FILE_V2.0"){
375  errorLog << "loadModelFromFile(string filename) - Could not find Model File Header" << endl;
376  return false;
377  }
378 
379  //Load the base settings from the file
380  if( !Classifier::loadBaseSettingsFromFile(file) ){
381  errorLog << "loadModelFromFile(string filename) - Failed to load base settings from file!" << endl;
382  return false;
383  }
384 
385  if( trained ){
386 
387  //Resize the buffer
388  models.resize(numClasses);
389 
390  //Load each of the K models
391  for(UINT k=0; k<numClasses; k++){
392  UINT modelID;
393  file >> word;
394  if(word != "*************_MODEL_*************"){
395  errorLog << "loadModelFromFile(string filename) - Could not find header for the "<<k+1<<"th model" << endl;
396  return false;
397  }
398 
399  file >> word;
400  if(word != "Model_ID:"){
401  errorLog << "loadModelFromFile(string filename) - Could not find model ID for the "<<k+1<<"th model" << endl;
402  return false;
403  }
404  file >> modelID;
405 
406  if(modelID-1!=k){
407  cout<<"ANBC: Model ID does not match the current class ID for the "<<k+1<<"th model" << endl;
408  return false;
409  }
410 
411  file >> word;
412  if(word != "N:"){
413  cout<<"ANBC: Could not find N for the "<<k+1<<"th model" << endl;
414  return false;
415  }
416  file >> models[k].N;
417 
418  file >> word;
419  if(word != "ClassLabel:"){
420  errorLog << "loadModelFromFile(string filename) - Could not find ClassLabel for the "<<k+1<<"th model" << endl;
421  return false;
422  }
423  file >> models[k].classLabel;
424  classLabels[k] = models[k].classLabel;
425 
426  file >> word;
427  if(word != "Threshold:"){
428  errorLog << "loadModelFromFile(string filename) - Could not find the threshold for the "<<k+1<<"th model" << endl;
429  return false;
430  }
431  file >> models[k].threshold;
432 
433  file >> word;
434  if(word != "Gamma:"){
435  errorLog << "loadModelFromFile(string filename) - Could not find the gamma parameter for the "<<k+1<<"th model" << endl;
436  return false;
437  }
438  file >> models[k].gamma;
439 
440  file >> word;
441  if(word != "TrainingMu:"){
442  errorLog << "loadModelFromFile(string filename) - Could not find the training mu parameter for the "<<k+1<<"th model" << endl;
443  return false;
444  }
445  file >> models[k].trainingMu;
446 
447  file >> word;
448  if(word != "TrainingSigma:"){
449  errorLog << "loadModelFromFile(string filename) - Could not find the training sigma parameter for the "<<k+1<<"th model" << endl;
450  return false;
451  }
452  file >> models[k].trainingSigma;
453 
454  //Resize the buffers
455  models[k].mu.resize(numInputDimensions);
456  models[k].sigma.resize(numInputDimensions);
457  models[k].weights.resize(numInputDimensions);
458 
459  //Load Mu, Sigma and Weights
460  file >> word;
461  if(word != "Mu:"){
462  errorLog << "loadModelFromFile(string filename) - Could not find the Mu vector for the "<<k+1<<"th model" << endl;
463  return false;
464  }
465 
466  //Load Mu
467  for(UINT j=0; j<models[k].N; j++){
468  double value;
469  file >> value;
470  models[k].mu[j] = value;
471  }
472 
473  file >> word;
474  if(word != "Sigma:"){
475  errorLog << "loadModelFromFile(string filename) - Could not find the Sigma vector for the "<<k+1<<"th model" << endl;
476  return false;
477  }
478 
479  //Load Sigma
480  for(UINT j=0; j<models[k].N; j++){
481  double value;
482  file >> value;
483  models[k].sigma[j] = value;
484  }
485 
486  file >> word;
487  if(word != "Weights:"){
488  errorLog << "loadModelFromFile(string filename) - Could not find the Weights vector for the "<<k+1<<"th model" << endl;
489  return false;
490  }
491 
492  //Load Weights
493  for(UINT j=0; j<models[k].N; j++){
494  double value;
495  file >> value;
496  models[k].weights[j] = value;
497  }
498  }
499 
500  //Recompute the null rejection thresholds
501  recomputeNullRejectionThresholds();
502 
503  //Resize the prediction results to make sure it is setup for realtime prediction
504  maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
505  bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
506  classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
507  classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
508  }
509 
510  return true;
511 }
512 
513 VectorDouble ANBC::getNullRejectionThresholds() const{
514  if( !trained ) return VectorDouble();
515  return nullRejectionThresholds;
516 }
517 
518 bool ANBC::setNullRejectionCoeff(double nullRejectionCoeff){
519 
520  if( nullRejectionCoeff > 0 ){
521  this->nullRejectionCoeff = nullRejectionCoeff;
522  recomputeNullRejectionThresholds();
523  return true;
524  }
525  return false;
526 }
527 
528 bool ANBC::setWeights(const ClassificationData &weightsData){
529 
530  if( weightsData.getNumSamples() > 0 ){
531  weightsDataSet = true;
532  this->weightsData = weightsData;
533  return true;
534  }
535  return false;
536 }
537 
538 bool ANBC::loadLegacyModelFromFile( fstream &file ){
539 
540  string word;
541 
542  file >> word;
543  if(word != "NumFeatures:"){
544  errorLog << "loadANBCModelFromFile(string filename) - Could not find NumFeatures " << endl;
545  return false;
546  }
547  file >> numInputDimensions;
548 
549  file >> word;
550  if(word != "NumClasses:"){
551  errorLog << "loadANBCModelFromFile(string filename) - Could not find NumClasses" << endl;
552  return false;
553  }
554  file >> numClasses;
555 
556  file >> word;
557  if(word != "UseScaling:"){
558  errorLog << "loadANBCModelFromFile(string filename) - Could not find UseScaling" << endl;
559  return false;
560  }
561  file >> useScaling;
562 
563  file >> word;
564  if(word != "UseNullRejection:"){
565  errorLog << "loadANBCModelFromFile(string filename) - Could not find UseNullRejection" << endl;
566  return false;
567  }
568  file >> useNullRejection;
569 
571  if( useScaling ){
572  //Resize the ranges buffer
573  ranges.resize(numInputDimensions);
574 
575  file >> word;
576  if(word != "Ranges:"){
577  errorLog << "loadANBCModelFromFile(string filename) - Could not find the Ranges" << endl;
578  return false;
579  }
580  for(UINT n=0; n<ranges.size(); n++){
581  file >> ranges[n].minValue;
582  file >> ranges[n].maxValue;
583  }
584  }
585 
586  //Resize the buffer
587  models.resize(numClasses);
588  classLabels.resize(numClasses);
589 
590  //Load each of the K models
591  for(UINT k=0; k<numClasses; k++){
592  UINT modelID;
593  file >> word;
594  if(word != "*************_MODEL_*************"){
595  errorLog << "loadANBCModelFromFile(string filename) - Could not find header for the "<<k+1<<"th model" << endl;
596  return false;
597  }
598 
599  file >> word;
600  if(word != "Model_ID:"){
601  errorLog << "loadANBCModelFromFile(string filename) - Could not find model ID for the "<<k+1<<"th model" << endl;
602  return false;
603  }
604  file >> modelID;
605 
606  if(modelID-1!=k){
607  cout<<"ANBC: Model ID does not match the current class ID for the "<<k+1<<"th model" << endl;
608  return false;
609  }
610 
611  file >> word;
612  if(word != "N:"){
613  cout<<"ANBC: Could not find N for the "<<k+1<<"th model" << endl;
614  return false;
615  }
616  file >> models[k].N;
617 
618  file >> word;
619  if(word != "ClassLabel:"){
620  errorLog << "loadANBCModelFromFile(string filename) - Could not find ClassLabel for the "<<k+1<<"th model" << endl;
621  return false;
622  }
623  file >> models[k].classLabel;
624  classLabels[k] = models[k].classLabel;
625 
626  file >> word;
627  if(word != "Threshold:"){
628  errorLog << "loadANBCModelFromFile(string filename) - Could not find the threshold for the "<<k+1<<"th model" << endl;
629  return false;
630  }
631  file >> models[k].threshold;
632 
633  file >> word;
634  if(word != "Gamma:"){
635  errorLog << "loadANBCModelFromFile(string filename) - Could not find the gamma parameter for the "<<k+1<<"th model" << endl;
636  return false;
637  }
638  file >> models[k].gamma;
639 
640  file >> word;
641  if(word != "TrainingMu:"){
642  errorLog << "loadANBCModelFromFile(string filename) - Could not find the training mu parameter for the "<<k+1<<"th model" << endl;
643  return false;
644  }
645  file >> models[k].trainingMu;
646 
647  file >> word;
648  if(word != "TrainingSigma:"){
649  errorLog << "loadANBCModelFromFile(string filename) - Could not find the training sigma parameter for the "<<k+1<<"th model" << endl;
650  return false;
651  }
652  file >> models[k].trainingSigma;
653 
654  //Resize the buffers
655  models[k].mu.resize(numInputDimensions);
656  models[k].sigma.resize(numInputDimensions);
657  models[k].weights.resize(numInputDimensions);
658 
659  //Load Mu, Sigma and Weights
660  file >> word;
661  if(word != "Mu:"){
662  errorLog << "loadANBCModelFromFile(string filename) - Could not find the Mu vector for the "<<k+1<<"th model" << endl;
663  return false;
664  }
665 
666  //Load Mu
667  for(UINT j=0; j<models[k].N; j++){
668  double value;
669  file >> value;
670  models[k].mu[j] = value;
671  }
672 
673  file >> word;
674  if(word != "Sigma:"){
675  errorLog << "loadANBCModelFromFile(string filename) - Could not find the Sigma vector for the "<<k+1<<"th model" << endl;
676  return false;
677  }
678 
679  //Load Sigma
680  for(UINT j=0; j<models[k].N; j++){
681  double value;
682  file >> value;
683  models[k].sigma[j] = value;
684  }
685 
686  file >> word;
687  if(word != "Weights:"){
688  errorLog << "loadANBCModelFromFile(string filename) - Could not find the Weights vector for the "<<k+1<<"th model" << endl;
689  return false;
690  }
691 
692  //Load Weights
693  for(UINT j=0; j<models[k].N; j++){
694  double value;
695  file >> value;
696  models[k].weights[j] = value;
697  }
698 
699  file >> word;
700  if(word != "*********************************"){
701  errorLog << "loadANBCModelFromFile(string filename) - Could not find the model footer for the "<<k+1<<"th model" << endl;
702  return false;
703  }
704  }
705 
706  //Flag that the model is trained
707  trained = true;
708 
709  //Recompute the null rejection thresholds
710  recomputeNullRejectionThresholds();
711 
712  //Resize the prediction results to make sure it is setup for realtime prediction
713  maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
714  bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
715  classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
716  classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
717 
718  return true;
719 
720 }
721 
722 } //End of namespace GRT
723 
Definition: AdaBoost.cpp:25
vector< ClassTracker > getClassTracker() const
This class implements the Adaptive Naive Bayes Classifier algorithm. The Adaptive Naive Bayes Classif...
bool scale(const double minTarget, const double maxTarget)
vector< MinMax > getRanges() const
string getClassifierType() const
Definition: Classifier.cpp:159
Definition: ANBC.h:50
ClassificationData getClassData(const UINT classLabel) const