//----------------------------------------------------------------------------
// Copyright (C) 2004-2017 by EMGU Corporation. All rights reserved.
//----------------------------------------------------------------------------
using System;
using Emgu.CV.CvEnum;
using Emgu.CV.ML.MlEnum;
using Emgu.CV.Structure;
using Emgu.Util;
using Emgu.CV.ML.Structure;
using System.Runtime.InteropServices;
namespace Emgu.CV.ML
{
///
/// Support Vector Machine
///
public partial class SVM : UnmanagedObject, IStatModel
{
///
/// Type of SVM
///
public enum SvmType
{
///
/// n-class classification (n>=2), allows imperfect separation of classes with penalty multiplier C for outliers
///
CSvc = 100,
///
/// n-class classification with possible imperfect separation. Parameter nu (in the range 0..1, the larger the value, the smoother the decision boundary) is used instead of C
///
NuSvc = 101,
///
/// one-class SVM. All the training data are from the same class, SVM builds a boundary that separates the class from the rest of the feature space
///
OneClass = 102,
///
/// Regression. The distance between feature vectors from the training set and the fitting hyper-plane must be less than p. For outliers the penalty multiplier C is used
///
EpsSvr = 103,
///
/// Regression; nu is used instead of p.
///
NuSvr = 104
}
///
/// SVM kernel type
///
public enum SvmKernelType
{
///
/// Custom svm kernel type
///
Custom = -1,
///
/// No mapping is done, linear discrimination (or regression) is done in the original feature space. It is the fastest option. d(x,y) = x y == (x,y)
///
Linear = 0,
///
/// polynomial kernel: d(x,y) = (gamma*(xy)+coef0)^degree
///
Poly = 1,
///
/// Radial-basis-function kernel; a good choice in most cases: d(x,y) = exp(-gamma*|x-y|^2)
///
Rbf = 2,
///
/// sigmoid function is used as a kernel: d(x,y) = tanh(gamma*(xy)+coef0)
///
Sigmoid = 3,
///
/// Exponential Chi2 kernel, similar to the RBF kernel
///
Chi2 = 4,
///
/// Histogram intersection kernel. A fast kernel. K(xi,xj)=min(xi,xj).
///
Inter = 5
}
///
/// The type of SVM parameters
///
public enum ParamType
{
///
/// C
///
C = 0,
///
/// Gamma
///
Gamma = 1,
///
/// P
///
P = 2,
///
/// NU
///
Nu = 3,
///
/// COEF
///
Coef = 4,
///
/// DEGREE
///
Degree = 5
}
private IntPtr _statModelPtr;
private IntPtr _algorithmPtr;
///
/// Create a support Vector Machine
///
public SVM()
{
_ptr = MlInvoke.CvSVMDefaultCreate(ref _statModelPtr, ref _algorithmPtr);
}
///
/// Release all the memory associated with the SVM
///
protected override void DisposeObject()
{
MlInvoke.CvSVMRelease(ref _ptr);
_statModelPtr = IntPtr.Zero;
_algorithmPtr = IntPtr.Zero;
}
///
/// Get the default parameter grid for the specific SVM type
///
/// The SVM type
/// The default parameter grid for the specific SVM type
public static MCvParamGrid GetDefaultGrid(SVM.ParamType type)
{
MCvParamGrid grid = new MCvParamGrid();
MlInvoke.CvSVMGetDefaultGrid(type, ref grid);
return grid;
}
///
/// The method trains the SVM model automatically by choosing the optimal parameters C, gamma, p, nu, coef0, degree from CvSVMParams. By the optimality one mean that the cross-validation estimate of the test set error is minimal.
///
/// The training data.
/// Cross-validation parameter. The training set is divided into k_fold subsets, one subset being used to train the model, the others forming the test set. So, the SVM algorithm is executed k_fold times
///
public bool TrainAuto(
TrainData trainData,
int kFold = 10)
{
return TrainAuto(
trainData,
kFold,
GetDefaultGrid(ParamType.C),
GetDefaultGrid(ParamType.Gamma),
GetDefaultGrid(ParamType.P),
GetDefaultGrid(ParamType.Nu),
GetDefaultGrid(ParamType.Coef),
GetDefaultGrid(ParamType.Degree));
}
///
/// The method trains the SVM model automatically by choosing the optimal parameters C, gamma, p, nu, coef0, degree from CvSVMParams. By the optimality one mean that the cross-validation estimate of the test set error is minimal.
///
/// The training data.
/// Cross-validation parameter. The training set is divided into k_fold subsets, one subset being used to train the model, the others forming the test set. So, the SVM algorithm is executed k_fold times
/// cGrid
/// grid for gamma
/// grid for p
/// grid for nu
/// grid for coeff
/// grid for degree
/// If true and the problem is 2-class classification then the method creates more balanced cross-validation subsets that is proportions between classes in subsets are close to such proportion in the whole train dataset.
///
public bool TrainAuto(
TrainData trainData,
int kFold,
MCvParamGrid cGrid,
MCvParamGrid gammaGrid,
MCvParamGrid pGrid,
MCvParamGrid nuGrid,
MCvParamGrid coefGrid,
MCvParamGrid degreeGrid,
bool balanced = false)
{
return MlInvoke.CvSVMTrainAuto(
Ptr,
trainData.Ptr,
kFold,
ref cGrid,
ref gammaGrid,
ref pGrid,
ref nuGrid,
ref coefGrid,
ref degreeGrid,
balanced);
}
///
/// Retrieves all the support vectors.
///
/// All the support vector as floating-point matrix, where support vectors are stored as matrix rows.
public Mat GetSupportVectors()
{
Mat m = new Mat();
MlInvoke.CvSVMGetSupportVectors(_ptr, m);
return m;
}
IntPtr IStatModel.StatModelPtr
{
get { return _statModelPtr; }
}
IntPtr IAlgorithm.AlgorithmPtr
{
get { return _algorithmPtr; }
}
}
}