26 RegisterClassifierModule< SVM > SVM::registerModule(
"SVM");
28 SVM::SVM(UINT kernelType,UINT svmType,
bool useScaling,
bool useNullRejection,
bool useAutoGamma,
double gamma,UINT degree,
double coef0,
double nu,
double C,
bool useCrossValidation,UINT kFoldValue){
32 param.weight_label = NULL;
39 param.svm_type = C_SVC;
40 param.kernel_type = LINEAR_KERNEL;
45 param.cache_size = 100;
50 param.probability = 1;
52 param.weight_label = NULL;
54 this->useScaling =
false;
55 this->useCrossValidation =
false;
56 this->useNullRejection =
false;
57 this->useAutoGamma =
true;
58 classificationThreshold = 0.5;
59 crossValidationResult = 0;
62 classifierType = classType;
63 classifierMode = STANDARD_CLASSIFIER_MODE;
64 debugLog.setProceedingText(
"[DEBUG SVM]");
65 errorLog.setProceedingText(
"[ERROR SVM]");
66 trainingLog.setProceedingText(
"[TRAINING SVM]");
67 warningLog.setProceedingText(
"[WARNING SVM]");
69 init(kernelType,svmType,useScaling,useNullRejection,useAutoGamma,gamma,degree,coef0,nu,C,useCrossValidation,kFoldValue);
74 param.weight_label = NULL;
80 classifierType = classType;
81 classifierMode = STANDARD_CLASSIFIER_MODE;
82 debugLog.setProceedingText(
"[DEBUG SVM]");
83 errorLog.setProceedingText(
"[ERROR SVM]");
84 trainingLog.setProceedingText(
"[TRAINING SVM]");
85 warningLog.setProceedingText(
"[WARNING SVM]");
100 this->problemSet =
false;
101 this->model = rhs.deepCopyModel();
102 this->deepCopyParam( rhs.param, this->param );
103 this->numInputDimensions = rhs.numInputDimensions;
104 this->kFoldValue = rhs.kFoldValue;
105 this->classificationThreshold = rhs.classificationThreshold;
106 this->crossValidationResult = rhs.crossValidationResult;
107 this->useAutoGamma = rhs.useAutoGamma;
108 this->useCrossValidation = rhs.useCrossValidation;
118 if( classifier == NULL )
return false;
121 SVM *ptr = (
SVM*)classifier;
126 this->problemSet =
false;
127 this->model = ptr->deepCopyModel();
128 this->deepCopyParam( ptr->param, this->param );
129 this->numInputDimensions = ptr->numInputDimensions;
130 this->kFoldValue = ptr->kFoldValue;
131 this->classificationThreshold = ptr->classificationThreshold;
132 this->crossValidationResult = ptr->crossValidationResult;
133 this->useAutoGamma = ptr->useAutoGamma;
134 this->useCrossValidation = ptr->useCrossValidation;
149 errorLog <<
"train_(ClassificationData &trainingData) - Training data has zero samples!" << endl;
154 if( !convertClassificationDataToLIBSVMFormat(trainingData) ){
155 errorLog <<
"train_(ClassificationData &trainingData) - Failed To Convert Labelled Classification Data To LIBSVM Format!" << endl;
159 if( useAutoGamma ) param.gamma = 1.0/numInputDimensions;
162 bool trainingResult = trainSVM();
164 if(! trainingResult ){
165 errorLog <<
"train_(ClassificationData &trainingData) - Failed To Train SVM Model!" << endl;
175 errorLog <<
"predict_(VectorDouble &inputVector) - The SVM model has not been trained!" << endl;
179 if( inputVector.size() != numInputDimensions ){
180 errorLog <<
"predict_(VectorDouble &inputVector) - The size of the input vector (" << inputVector.size() <<
") does not match the number of features of the model (" << numInputDimensions <<
")" << endl;
184 if( param.probability == 1 ){
185 if( !predictSVM( inputVector, maxLikelihood, classLikelihoods ) ){
186 errorLog <<
"predict(VectorDouble inputVector) - Prediction Failed!" << endl;
190 if( !predictSVM( inputVector ) ){
191 errorLog <<
"predict(VectorDouble inputVector) - Prediction Failed!" << endl;
199 bool SVM::init(UINT kernelType,UINT svmType,
bool useScaling,
bool useNullRejection,
bool useAutoGamma,
double gamma,UINT degree,
double coef0,
double nu,
double C,
bool useCrossValidation,UINT kFoldValue){
205 if( !validateKernelType(kernelType) ){
206 errorLog <<
"init(...) - Unknown kernelType!\n";
210 if( !validateSVMType(svmType) ){
211 errorLog <<
"init(...) - Unknown kernelType!\n";
215 param.svm_type = (int)svmType;
216 param.kernel_type = (int)kernelType;
217 param.degree = (int)degree;
221 param.cache_size = 100;
226 param.probability = 1;
228 param.weight_label = NULL;
230 this->useScaling = useScaling;
231 this->useCrossValidation = useCrossValidation;
232 this->useNullRejection = useNullRejection;
233 this->useAutoGamma = useAutoGamma;
234 classificationThreshold = 0.5;
235 crossValidationResult = 0;
240 void SVM::deleteProblemSet(){
242 for(
int i=0; i<prob.l; i++){
261 param.svm_type = C_SVC;
262 param.kernel_type = LINEAR_KERNEL;
267 param.cache_size = 100;
272 param.probability = 1;
274 param.weight_label = NULL;
276 useCrossValidation =
false;
281 bool SVM::validateProblemAndParameters(){
283 const char *errorMsg = svm_check_parameter(&prob,¶m);
286 errorLog <<
"validateProblemAndParameters() - Parameters do not match problem!" << endl;
293 bool SVM::trainSVM(){
295 crossValidationResult = 0;
299 svm_free_and_destroy_model(&model);
305 errorLog <<
"trainSVM() - Problem not set!" << endl;
310 if( !validateProblemAndParameters() )
return false;
314 for(
int i=0; i<prob.l; i++)
315 for(UINT j=0; j<numInputDimensions; j++)
316 prob.x[i][j].value =
scale(prob.x[i][j].value,ranges[j].minValue,ranges[j].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
319 if( useCrossValidation ){
321 double total_correct = 0;
322 double total_error = 0;
323 double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
324 double *target =
new double[prob.l];
326 svm_cross_validation(&prob,¶m,kFoldValue,target);
327 if( param.svm_type == EPSILON_SVR || param.svm_type == NU_SVR )
329 for(i=0;i<prob.l;i++)
331 double y = prob.y[i];
332 double v = target[i];
333 total_error += (v-y)*(v-y);
340 crossValidationResult = total_error/prob.l;
344 for(i=0;i<prob.l;i++){
345 if(target[i] == prob.y[i]){
349 crossValidationResult = total_correct/prob.l*100.0;
355 model = svm_train(&prob,¶m);
358 errorLog <<
"trainSVM() - Failed to train SVM Model!" << endl;
367 classLabels[k] = model->label[k];
369 classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
370 classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
376 bool SVM::predictSVM(VectorDouble &inputVector){
378 if( !trained || inputVector.size() != numInputDimensions )
return false;
383 x =
new svm_node[numInputDimensions+1];
384 for(UINT j=0; j<numInputDimensions; j++){
385 x[j].index = (int)j+1;
386 x[j].value = inputVector[j];
389 x[numInputDimensions].index = -1;
390 x[numInputDimensions].value = 0;
394 for(UINT i=0; i<numInputDimensions; i++)
395 x[i].value =
scale(x[i].value,ranges[i].minValue,ranges[i].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
399 double predict_label = svm_predict(model,x);
402 predictedClassLabel = (UINT)predict_label;
410 bool SVM::predictSVM(VectorDouble &inputVector,
double &maxProbability, VectorDouble &probabilites){
412 if( !trained || param.probability == 0 || inputVector.size() != numInputDimensions )
return false;
414 double *prob_estimates = NULL;
418 prob_estimates =
new double[ model->nr_class ];
421 x =
new svm_node[numInputDimensions+1];
422 for(UINT j=0; j<numInputDimensions; j++){
423 x[j].index = (int)j+1;
424 x[j].value = inputVector[j];
427 x[numInputDimensions].index = -1;
428 x[numInputDimensions].value = 0;
432 for(UINT j=0; j<numInputDimensions; j++)
433 x[j].value =
scale(x[j].value,ranges[j].minValue,ranges[j].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
437 double predict_label = svm_predict_probability(model,x,prob_estimates);
439 predictedClassLabel = 0;
441 probabilites.resize(model->nr_class);
442 for(
int k=0; k<model->nr_class; k++){
443 if( maxProbability < prob_estimates[k] ){
444 maxProbability = prob_estimates[k];
445 predictedClassLabel = k+1;
446 maxLikelihood = maxProbability;
448 probabilites[k] = prob_estimates[k];
451 if( !useNullRejection ) predictedClassLabel = (UINT)predict_label;
453 if( maxProbability >= classificationThreshold ){
454 predictedClassLabel = (UINT)predict_label;
455 }
else predictedClassLabel = GRT_DEFAULT_NULL_CLASS_LABEL;
459 delete[] prob_estimates;
465 bool SVM::convertClassificationDataToLIBSVMFormat(ClassificationData &trainingData){
470 const UINT numTrainingExamples = trainingData.getNumSamples();
471 numInputDimensions = trainingData.getNumDimensions();
474 ranges = trainingData.getRanges();
477 prob.l = numTrainingExamples;
478 prob.x =
new svm_node*[numTrainingExamples];
479 prob.y =
new double[numTrainingExamples];
482 for(UINT i=0; i<numTrainingExamples; i++){
484 prob.y[i] = trainingData[i].getClassLabel();
487 prob.x[i] =
new svm_node[numInputDimensions+1];
488 for(UINT j=0; j<numInputDimensions; j++){
489 prob.x[i][j].index = j+1;
490 prob.x[i][j].value = trainingData[i].getSample()[j];
492 prob.x[i][numInputDimensions].index = -1;
493 prob.x[i][numInputDimensions].value = 0;
501 if( !file.is_open() ){
505 file <<
"SVM_MODEL_FILE_V2.0\n";
509 errorLog <<
"saveModelToFile(fstream &file) - Failed to save classifier base settings to file!" << endl;
513 const svm_parameter& param = trained ? model->param : this->param;
515 file <<
"ModelType: ";
516 switch( param.svm_type ){
527 file <<
"EPSILON_SVR";
533 errorLog <<
"saveModelToFile(fstream &file) - Invalid model type: " << param.svm_type << endl;
539 file <<
"KernelType: ";
540 switch(param.kernel_type){
545 file <<
"POLYNOMIAL";
554 file <<
"PRECOMPUTED";
557 errorLog <<
"saveModelToFile(fstream &file) - Invalid kernel type: " << param.kernel_type << endl;
562 file <<
"Degree: " << param.degree << endl;
563 file <<
"Gamma: " << param.gamma << endl;
564 file <<
"Coef0: " << param.coef0 << endl;
565 file <<
"NumberOfFeatures: " << numInputDimensions << endl;
566 file <<
"UseShrinking: " << param.shrinking << endl;
567 file <<
"UseProbability: " << param.probability << endl;
570 UINT numClasses = (UINT)model->nr_class;
571 UINT numSV = (UINT)model->l;
572 file <<
"NumberOfSupportVectors: " << numSV << endl;
575 for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->rho[i] <<
"\t";
580 for(UINT i=0;i<numClasses;i++) file << model->label[i] <<
"\t";
586 for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->probA[i] <<
"\t";
592 for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->probB[i] <<
"\t";
597 file <<
"NumSupportVectorsPerClass: \n";
598 for(UINT i=0;i<numClasses;i++) file << model->nSV[i] <<
"\t";
602 file <<
"SupportVectors: \n";
604 const double *
const *sv_coef = model->sv_coef;
605 const svm_node *
const *SV = model->SV;
607 for(UINT i=0;i<numSV;i++){
608 for(UINT j=0;j<numClasses-1;j++)
609 file << sv_coef[j][i] <<
"\t";
613 if(param.kernel_type == PRECOMPUTED) file << (int) p->value <<
"\t";
615 while(p->index != -1){
616 file << p->index <<
"\t" << p->value <<
"\t";
627 bool SVM::loadModelFromFile(fstream &file){
631 UINT halfNumClasses = 0;
636 if( !file.is_open() ){
637 errorLog <<
"loadModelFromFile(fstream &file) - The file is not open!" << endl;
645 if( word ==
"SVM_MODEL_FILE_V1.0" ){
646 return loadLegacyModelFromFile( file );
650 if( word !=
"SVM_MODEL_FILE_V2.0" ){
651 errorLog <<
"loadModelFromFile(fstream &file) - Invalid file format!" << endl;
657 if( !Classifier::loadBaseSettingsFromFile(file) ){
658 errorLog <<
"loadModelFromFile(string filename) - Failed to load base settings from file!" << endl;
667 model->sv_coef = NULL;
678 model->param.svm_type = 0;
679 model->param.kernel_type = 0;
680 model->param.degree = 0;
681 model->param.gamma = 0;
682 model->param.coef0 = 0;
683 model->param.cache_size = 0;
684 model->param.eps = 0;
686 model->param.nr_weight = 0;
687 model->param.weight_label = NULL;
688 model->param.weight = NULL;
691 model->param.shrinking = 0;
692 model->param.probability = 1;
696 if(word !=
"ModelType:"){
697 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find ModelType header!" << endl;
702 if( word ==
"C_SVC" ){
703 model->param.svm_type = C_SVC;
705 if( word ==
"NU_SVC" ){
706 model->param.svm_type = NU_SVC;
708 if( word ==
"ONE_CLASS" ){
709 model->param.svm_type = ONE_CLASS;
711 if( word ==
"EPSILON_SVR" ){
712 model->param.svm_type = EPSILON_SVR;
714 if( word ==
"NU_SVR" ){
715 model->param.svm_type = NU_SVR;
717 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find SVM type!" << endl;
728 if(word !=
"KernelType:"){
729 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find kernel type!" << endl;
734 if( word ==
"LINEAR" ){
735 model->param.kernel_type = LINEAR;
737 if( word ==
"POLYNOMIAL" ){
738 model->param.kernel_type = POLY;
741 model->param.kernel_type = RBF;
743 if( word ==
"SIGMOID" ){
744 model->param.kernel_type = SIGMOID;
746 if( word ==
"PRECOMPUTED" ){
747 model->param.kernel_type = PRECOMPUTED;
749 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find kernel type!" << endl;
760 if(word !=
"Degree:"){
761 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find Degree header!" << endl;
765 file >> model->param.degree;
769 if(word !=
"Gamma:"){
770 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find Gamma header!" << endl;
774 file >> model->param.gamma;
778 if(word !=
"Coef0:"){
779 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find Coef0 header!" << endl;
783 file >> model->param.coef0;
787 if(word !=
"NumberOfFeatures:"){
788 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find NumberOfFeatures header!" << endl;
792 file >> numInputDimensions;
796 if(word !=
"UseShrinking:"){
797 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find UseShrinking header!" << endl;
801 file >> model->param.shrinking;
805 if(word !=
"UseProbability:"){
806 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find UseProbability header!" << endl;
810 file >> model->param.probability;
815 if(word !=
"NumberOfSupportVectors:"){
816 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find NumberOfSupportVectors header!" << endl;
823 halfNumClasses = numClasses*(numClasses-1)/2;
824 model->nr_class = numClasses;
830 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find RHO header!" << endl;
834 model->rho =
new double[ halfNumClasses ];
835 for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->rho[i];
839 if(word !=
"Label:"){
842 model->label =
new int[ numClasses ];
843 for(UINT i=0;i<numClasses;i++) file >> model->label[i];
850 if(word !=
"ProbA:"){
853 model->probA =
new double[ halfNumClasses ];
854 for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probA[i];
861 if(word !=
"ProbB:"){
864 model->probB =
new double[ halfNumClasses ];
865 for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probB[i];
872 if( word ==
"NumSupportVectorsPerClass:" ){
873 model->nSV =
new int[ numClasses ];
874 for(UINT i=0; i<numClasses; i++) file >> model->nSV[i];
883 if(word !=
"SupportVectors:"){
884 errorLog <<
"loadModelFromFile(fstream &file) - Failed to find SupportVectors header!" << endl;
890 model->sv_coef =
new double*[numClasses-1];
891 for(UINT j=0;j<numClasses-1;j++) model->sv_coef[j] =
new double[numSV];
894 for(UINT i=0; i<numSV; i++){
895 for(UINT j=0; j<numClasses-1; j++){
896 file >> model->sv_coef[j][i];
899 model->SV[i] =
new svm_node[numInputDimensions+1];
901 if(model->param.kernel_type == PRECOMPUTED) file >> model->SV[i][0].value;
903 for(UINT j=0; j<numInputDimensions; j++){
904 file >> model->SV[i][j].index;
905 file >> model->SV[i][j].value;
907 model->SV[i][numInputDimensions].index = -1;
908 model->SV[i][numInputDimensions].value = 0;
913 this->numClasses = getNumClasses();
914 classLabels.resize(getNumClasses());
915 for(UINT k=0; k<getNumClasses(); k++){
916 classLabels[k] = model->label[k];
923 maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
924 bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
925 classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
926 classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
937 crossValidationResult = 0;
939 svm_free_and_destroy_model(&model);
940 svm_destroy_param(¶m);
946 bool SVM::getIsCrossValidationTrainingEnabled()
const{
947 return useCrossValidation;
950 bool SVM::getIsAutoGammaEnabled()
const{
954 string SVM::getSVMType()
const{
957 string modelName =
"UNKNOWN";
959 paramPtr = &model->param;
960 }
else paramPtr = ¶m;
962 switch(paramPtr->svm_type){
967 modelName =
"NU_SVC";
970 modelName =
"ONE_CLASS";
973 modelName =
"EPSILON_SVR";
976 modelName =
"NU_SVR";
985 string SVM::getKernelType()
const{
987 string modelName =
"UNKNOWN";
989 paramPtr = &model->param;
990 }
else paramPtr = ¶m;
992 switch(paramPtr->kernel_type){
994 modelName =
"LINEAR_KERNEL";
997 modelName =
"POLY_KERNEL";
1000 modelName =
"RBF_KERNEL";
1002 case(SIGMOID_KERNEL):
1003 modelName =
"SIGMOID_KERNEL";
1005 case(PRECOMPUTED_KERNEL):
1006 modelName =
"PRECOMPUTED_KERNEL";
1014 UINT SVM::getNumClasses()
const{
1015 if( !trained )
return 0;
1016 return (UINT) model->nr_class;
1019 UINT SVM::getDegree()
const{
1021 return (UINT)model->param.degree;
1023 return (UINT)param.gamma;
1026 double SVM::getGamma()
const{
1028 return model->param.gamma;
1033 double SVM::getNu()
const{
1035 return model->param.nu;
1040 double SVM::getCoef0()
const{
1042 return model->param.coef0;
1047 double SVM::getC()
const{
1049 return model->param.C;
1054 double SVM::getCrossValidationResult()
const{
return crossValidationResult; }
1056 bool SVM::setSVMType(
const UINT svmType){
1057 if( validateSVMType(svmType) ){
1058 param.svm_type = (int)svmType;
1064 bool SVM::setKernelType(
const UINT kernelType){
1065 if( validateKernelType(kernelType) ){
1066 param.kernel_type = (int)kernelType;
1069 warningLog <<
"setKernelType(UINT kernelType) - Failed to set kernel type, unknown kernelType!" << endl;
1073 bool SVM::setGamma(
const double gamma){
1074 if( !useAutoGamma ){
1075 this->param.gamma = gamma;
1078 warningLog <<
"setGamma(double gamma) - Failed to set gamma, useAutoGamma is enabled, setUseAutoGamma to false first!" << endl;
1082 bool SVM::setDegree(
const UINT degree){
1083 this->param.degree = (int)degree;
1087 bool SVM::setNu(
const double nu){
1088 this->param.nu = nu;
1092 bool SVM::setCoef0(
const double coef0){
1093 this->param.coef0 = coef0;
1097 bool SVM::setC(
const double C){
1102 bool SVM::setKFoldCrossValidationValue(
const UINT kFoldValue){
1103 if( kFoldValue > 0 ){
1104 this->kFoldValue = kFoldValue;
1107 warningLog <<
"setKFoldCrossValidationValue(const UINT kFoldValue) - Failed to set kFoldValue, the kFoldValue must be greater than 0!" << endl;
1111 bool SVM::enableAutoGamma(
const bool useAutoGamma){
1112 this->useAutoGamma = useAutoGamma;
1116 bool SVM::enableCrossValidationTraining(
const bool useCrossValidation){
1117 this->useCrossValidation = useCrossValidation;
1121 bool SVM::validateSVMType(
const UINT svmType){
1122 if( svmType == C_SVC ){
1125 if( svmType == NU_SVC ){
1128 if( svmType == ONE_CLASS ){
1131 if( svmType == EPSILON_SVR ){
1134 if( svmType == NU_SVR ){
1140 bool SVM::validateKernelType(
const UINT kernelType){
1141 if( kernelType == LINEAR_KERNEL ){
1144 if( kernelType == POLY_KERNEL ){
1147 if( kernelType == RBF_KERNEL ){
1150 if( kernelType == SIGMOID_KERNEL ){
1153 if( kernelType == PRECOMPUTED_KERNEL ){
1159 struct svm_model* SVM::deepCopyModel()
const{
1161 if( model == NULL )
return NULL;
1163 UINT halfNumClasses = 0;
1166 struct svm_model *m =
new svm_model;
1181 m->param.svm_type = 0;
1182 m->param.kernel_type = 0;
1183 m->param.degree = 0;
1186 m->param.cache_size = 0;
1189 m->param.nr_weight = 0;
1190 m->param.weight_label = NULL;
1191 m->param.weight = NULL;
1194 m->param.shrinking = 0;
1195 m->param.probability = 1;
1198 m->param.svm_type = model->param.svm_type;
1199 m->param.kernel_type = model->param.kernel_type ;
1200 m->param.degree = model->param.degree;
1201 m->param.gamma = model->param.gamma;
1202 m->param.coef0 = model->param.coef0;
1203 m->nr_class = model->nr_class;
1205 m->param.shrinking = model->param.shrinking;
1206 m->param.probability = model->param.probability;
1209 halfNumClasses = model->nr_class*(model->nr_class-1)/2;
1211 m->rho =
new double[ halfNumClasses ];
1212 for(
int i=0;i <model->nr_class*(model->nr_class-1)/2; i++) m->rho[i] = model->rho[i];
1214 if( model->label != NULL ){
1215 m->label =
new int[ model->nr_class ];
1216 for(
int i=0;i<model->nr_class;i++) m->label[i] = model->label[i];
1219 if( model->probA != NULL ){
1220 m->probA =
new double[ halfNumClasses ];
1221 for(UINT i=0;i<halfNumClasses; i++) m->probA[i] = model->probA[i];
1224 if( model->probB != NULL ){
1225 m->probB =
new double[ halfNumClasses ];
1226 for(UINT i=0; i<halfNumClasses; i++) m->probB[i] = model->probB[i];
1229 if( model->nSV != NULL ){
1230 m->nSV =
new int[ model->nr_class ];
1231 for(
int i=0; i<model->nr_class; i++) m->nSV[i] = model->nSV[i];
1235 m->sv_coef =
new double*[numClasses-1];
1236 for(UINT j=0;j<numClasses-1;j++) m->sv_coef[j] =
new double[model->l];
1237 m->SV =
new svm_node*[model->l];
1239 for(
int i=0; i<model->l; i++){
1240 for(
int j=0; j<model->nr_class-1; j++){
1241 m->sv_coef[j][i] = model->sv_coef[j][i];
1244 m->SV[i] =
new svm_node[numInputDimensions+1];
1246 if(model->param.kernel_type == PRECOMPUTED) m->SV[i][0].value = model->SV[i][0].value;
1248 for(UINT j=0; j<numInputDimensions; j++){
1249 m->SV[i][j].index = model->SV[i][j].index;
1250 m->SV[i][j].value = model->SV[i][j].value;
1252 m->SV[i][numInputDimensions].index = -1;
1253 m->SV[i][numInputDimensions].value = 0;
1263 bool SVM::deepCopyProblem(
const struct svm_problem &source,
struct svm_problem &target,
const unsigned int numInputDimensions )
const{
1266 if( target.y != NULL ){
1270 if( target.x != NULL ){
1271 for(
int i=0; i<target.l; i++){
1272 delete[] target.x[i];
1278 target.l = source.l;
1280 if( source.x != NULL ){
1281 target.x =
new svm_node*[ target.l ];
1282 for(
int i=0; i<target.l; i++){
1283 target.x[i] =
new svm_node[ numInputDimensions+1 ];
1284 for(
unsigned int j=0; j<numInputDimensions+1; j++){
1285 target.x[i][j] = source.x[i][j];
1290 if( source.y != NULL ){
1291 target.y =
new double[ target.l ];
1292 for(
int i=0; i<target.l; i++){
1293 target.y[i] = source.y[i];
1300 bool SVM::deepCopyParam(
const svm_parameter &source_param, svm_parameter &target_param )
const{
1303 if( target_param.weight_label != NULL ){
1304 delete[] target_param.weight_label;
1305 target_param.weight_label = NULL;
1307 if( target_param.weight != NULL ){
1308 delete[] target_param.weight;
1309 target_param.weight = NULL;
1313 target_param.svm_type = source_param.svm_type;
1314 target_param.kernel_type = source_param.kernel_type;
1315 target_param.degree = source_param.degree;
1316 target_param.gamma = source_param.gamma;
1317 target_param.coef0 = source_param.coef0;
1318 target_param.cache_size = source_param.cache_size;
1319 target_param.eps = source_param.eps;
1320 target_param.C = source_param.C;
1321 target_param.nr_weight = source_param.nr_weight;
1322 target_param.nu = source_param.nu;
1323 target_param.p = source_param.p;
1324 target_param.shrinking = source_param.shrinking;
1325 target_param.probability = source_param.probability;
1328 if( source_param.weight_label != NULL ){
1331 if( source_param.weight != NULL ){
1338 bool SVM::loadLegacyModelFromFile( fstream &file ){
1343 UINT halfNumClasses = 0;
1344 numInputDimensions = 0;
1348 model->nr_class = 0;
1351 model->sv_coef = NULL;
1353 model->probA = NULL;
1354 model->probB = NULL;
1355 model->label = NULL;
1357 model->label = NULL;
1362 model->param.svm_type = 0;
1363 model->param.kernel_type = 0;
1364 model->param.degree = 0;
1365 model->param.gamma = 0;
1366 model->param.coef0 = 0;
1367 model->param.cache_size = 0;
1368 model->param.eps = 0;
1370 model->param.nr_weight = 0;
1371 model->param.weight_label = NULL;
1372 model->param.weight = NULL;
1373 model->param.nu = 0;
1375 model->param.shrinking = 0;
1376 model->param.probability = 1;
1380 if(word !=
"ModelType:"){
1381 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find ModelType header!" << endl;
1386 if( word ==
"C_SVC" ){
1387 model->param.svm_type = C_SVC;
1389 if( word ==
"NU_SVC" ){
1390 model->param.svm_type = NU_SVC;
1392 if( word ==
"ONE_CLASS" ){
1393 model->param.svm_type = ONE_CLASS;
1395 if( word ==
"EPSILON_SVR" ){
1396 model->param.svm_type = EPSILON_SVR;
1398 if( word ==
"NU_SVR" ){
1399 model->param.svm_type = NU_SVR;
1401 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find SVM type!" << endl;
1412 if(word !=
"KernelType:"){
1413 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find kernel type!" << endl;
1418 if( word ==
"LINEAR" ){
1419 model->param.kernel_type = LINEAR;
1421 if( word ==
"POLYNOMIAL" ){
1422 model->param.kernel_type = POLY;
1424 if( word ==
"RBF" ){
1425 model->param.kernel_type = RBF;
1427 if( word ==
"SIGMOID" ){
1428 model->param.kernel_type = SIGMOID;
1430 if( word ==
"PRECOMPUTED" ){
1431 model->param.kernel_type = PRECOMPUTED;
1433 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find kernel type!" << endl;
1444 if(word !=
"Degree:"){
1445 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find Degree header!" << endl;
1449 file >> model->param.degree;
1453 if(word !=
"Gamma:"){
1454 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find Gamma header!" << endl;
1458 file >> model->param.gamma;
1462 if(word !=
"Coef0:"){
1463 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find Coef0 header!" << endl;
1467 file >> model->param.coef0;
1471 if(word !=
"NumberOfClasses:"){
1472 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find NumberOfClasses header!" << endl;
1480 if(word !=
"NumberOfSupportVectors:"){
1481 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find NumberOfSupportVectors header!" << endl;
1489 if(word !=
"NumberOfFeatures:"){
1490 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find NumberOfFeatures header!" << endl;
1494 file >> numInputDimensions;
1498 if(word !=
"UseShrinking:"){
1499 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find UseShrinking header!" << endl;
1503 file >> model->param.shrinking;
1507 if(word !=
"UseProbability:"){
1508 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find UseProbability header!" << endl;
1512 file >> model->param.probability;
1516 if(word !=
"UseScaling:"){
1517 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find UseScaling header!" << endl;
1525 if(word !=
"Ranges:"){
1526 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find Ranges header!" << endl;
1532 ranges.resize(numInputDimensions);
1535 for(UINT i=0; i<ranges.size(); i++){
1536 file >> ranges[i].minValue;
1537 file >> ranges[i].maxValue;
1541 halfNumClasses = numClasses*(numClasses-1)/2;
1542 model->nr_class = numClasses;
1548 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find RHO header!" << endl;
1552 model->rho =
new double[ halfNumClasses ];
1553 for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->rho[i];
1557 if(word !=
"Label:"){
1558 model->label = NULL;
1560 model->label =
new int[ numClasses ];
1561 for(UINT i=0;i<numClasses;i++) file >> model->label[i];
1568 if(word !=
"ProbA:"){
1569 model->probA = NULL;
1571 model->probA =
new double[ halfNumClasses ];
1572 for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probA[i];
1579 if(word !=
"ProbB:"){
1580 model->probB = NULL;
1582 model->probB =
new double[ halfNumClasses ];
1583 for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probB[i];
1590 if(word !=
"NumSupportVectorsPerClass:"){
1593 model->nSV =
new int[ numClasses ];
1594 for(UINT i=0;i<numClasses;i++) file >> model->nSV[i];
1601 if(word !=
"SupportVectors:"){
1602 errorLog <<
"loadLegacyModelFromFile(fstream &file) - Failed to find SupportVectors header!" << endl;
1608 model->sv_coef =
new double*[numClasses-1];
1609 for(UINT j=0;j<numClasses-1;j++) model->sv_coef[j] =
new double[numSV];
1612 for(UINT i=0; i<numSV; i++){
1613 for(UINT j=0; j<numClasses-1; j++){
1614 file >> model->sv_coef[j][i];
1617 model->SV[i] =
new svm_node[numInputDimensions+1];
1619 if(model->param.kernel_type == PRECOMPUTED) file >> model->SV[i][0].value;
1621 for(UINT j=0; j<numInputDimensions; j++){
1622 file >> model->SV[i][j].index;
1623 file >> model->SV[i][j].value;
1625 model->SV[i][numInputDimensions].index = -1;
1626 model->SV[i][numInputDimensions].value = 0;
1631 this->numClasses = getNumClasses();
1632 classLabels.resize(getNumClasses());
1633 for(UINT k=0; k<getNumClasses(); k++){
1634 classLabels[k] = model->label[k];
virtual bool train_(ClassificationData &trainingData)
virtual bool predict_(VectorDouble &inputVector)
bool copyBaseVariables(const Classifier *classifier)
UINT getNumSamples() const
This class acts as a front end for the LIBSVM library (http://www.csie.ntu.edu.tw/~cjlin/libsvm/). It implements a Support Vector Machine (SVM) classifier, a powerful classifier that works well on a wide range of classification problems, particularly on more complex problems that other classifiers (such as the KNN, GMM or ANBC algorithms) might not be able to solve.
bool init(UINT kernelType, UINT svmType, bool useScaling, bool useNullRejection, bool useAutoGamma, double gamma, UINT degree, double coef0, double nu, double C, bool useCrossValidation, UINT kFoldValue)
virtual UINT getNumClasses() const
virtual bool deepCopyFrom(const Classifier *classifier)
SVM(UINT kernelType=LINEAR_KERNEL, UINT svmType=C_SVC, bool useScaling=true, bool useNullRejection=false, bool useAutoGamma=true, double gamma=0.1, UINT degree=3, double coef0=0, double nu=0.5, double C=1, bool useCrossValidation=false, UINT kFoldValue=10)
virtual bool saveModelToFile(fstream &file) const
bool saveBaseSettingsToFile(fstream &file) const
double scale(const double &x, const double &minSource, const double &maxSource, const double &minTarget, const double &maxTarget, const bool constrain=false)
void initDefaultSVMSettings()
SVM & operator=(const SVM &rhs)
string getClassifierType() const