class cv::ml::SVM

Overview

Support Vector Machines. More…

#include <ml.hpp>

class SVM: public cv::ml::StatModel
{
public:
    // enums

    enum KernelTypes;
    enum ParamTypes;
    enum Types;

    // classes

    class Kernel;

    // methods

    virtual
    double
    getC() const = 0;

    virtual
    cv::Mat
    getClassWeights() const = 0;

    virtual
    double
    getCoef0() const = 0;

    virtual
    double
    getDecisionFunction(
        int i,
        OutputArray alpha,
        OutputArray svidx
        ) const = 0;

    virtual
    double
    getDegree() const = 0;

    virtual
    double
    getGamma() const = 0;

    virtual
    int
    getKernelType() const = 0;

    virtual
    double
    getNu() const = 0;

    virtual
    double
    getP() const = 0;

    virtual
    Mat
    getSupportVectors() const = 0;

    virtual
    cv::TermCriteria
    getTermCriteria() const = 0;

    virtual
    int
    getType() const = 0;

    Mat
    getUncompressedSupportVectors() const;

    virtual
    void
    setC(double val) = 0;

    virtual
    void
    setClassWeights(const cv::Mat& val) = 0;

    virtual
    void
    setCoef0(double val) = 0;

    virtual
    void
    setCustomKernel(const Ptr<Kernel>& _kernel) = 0;

    virtual
    void
    setDegree(double val) = 0;

    virtual
    void
    setGamma(double val) = 0;

    virtual
    void
    setKernel(int kernelType) = 0;

    virtual
    void
    setNu(double val) = 0;

    virtual
    void
    setP(double val) = 0;

    virtual
    void
    setTermCriteria(const cv::TermCriteria& val) = 0;

    virtual
    void
    setType(int val) = 0;

    virtual
    bool
    trainAuto(
        const Ptr<TrainData>& data,
        int kFold = 10,
        ParamGrid Cgrid = getDefaultGrid(C),
        ParamGrid gammaGrid = getDefaultGrid(GAMMA),
        ParamGrid pGrid = getDefaultGrid(P),
        ParamGrid nuGrid = getDefaultGrid(NU),
        ParamGrid coeffGrid = getDefaultGrid(COEF),
        ParamGrid degreeGrid = getDefaultGrid(DEGREE),
        bool balanced = false
        ) = 0;

    bool
    trainAuto(
        InputArray samples,
        int layout,
        InputArray responses,
        int kFold = 10,
        Ptr<ParamGrid> Cgrid = SVM::getDefaultGridPtr(SVM::C),
        Ptr<ParamGrid> gammaGrid = SVM::getDefaultGridPtr(SVM::GAMMA),
        Ptr<ParamGrid> pGrid = SVM::getDefaultGridPtr(SVM::P),
        Ptr<ParamGrid> nuGrid = SVM::getDefaultGridPtr(SVM::NU),
        Ptr<ParamGrid> coeffGrid = SVM::getDefaultGridPtr(SVM::COEF),
        Ptr<ParamGrid> degreeGrid = SVM::getDefaultGridPtr(SVM::DEGREE),
        bool balanced = false
        );

    static
    Ptr<SVM>
    create();

    static
    ParamGrid
    getDefaultGrid(int param_id);

    static
    Ptr<ParamGrid>
    getDefaultGridPtr(int param_id);

    static
    Ptr<SVM>
    load(const String& filepath);
};

Inherited Members

public:
    // enums

    enum Flags;

    // methods

    virtual
    void
    clear();

    virtual
    bool
    empty() const;

    virtual
    String
    getDefaultName() const;

    virtual
    void
    read(const FileNode& fn);

    virtual
    void
    save(const String& filename) const;

    virtual
    void
    write(FileStorage& fs) const;

    template <typename _Tp>
    static
    Ptr<_Tp>
    load(
        const String& filename,
        const String& objname = String()
        );

    template <typename _Tp>
    static
    Ptr<_Tp>
    loadFromString(
        const String& strModel,
        const String& objname = String()
        );

    template <typename _Tp>
    static
    Ptr<_Tp>
    read(const FileNode& fn);

    virtual
    float
    calcError(
        const Ptr<TrainData>& data,
        bool test,
        OutputArray resp
        ) const;

    virtual
    bool
    empty() const;

    virtual
    int
    getVarCount() const = 0;

    virtual
    bool
    isClassifier() const = 0;

    virtual
    bool
    isTrained() const = 0;

    virtual
    float
    predict(
        InputArray samples,
        OutputArray results = noArray(),
        int flags = 0
        ) const = 0;

    virtual
    bool
    train(
        const Ptr<TrainData>& trainData,
        int flags = 0
        );

    virtual
    bool
    train(
        InputArray samples,
        int layout,
        InputArray responses
        );

    template <typename _Tp>
    static
    Ptr<_Tp>
    train(
        const Ptr<TrainData>& data,
        int flags = 0
        );

protected:
    // methods

    void
    writeFormat(FileStorage& fs) const;

Detailed Documentation

Support Vector Machines.

See also:

Support Vector Machines

Methods

virtual
double
getC() const = 0

Parameter C of a SVM optimization problem. For SVM::C_SVC, SVM::EPS_SVR or SVM::NU_SVR. Default value is 0.

See also:

setC

virtual
cv::Mat
getClassWeights() const = 0

Optional weights in the SVM::C_SVC problem, assigned to particular classes. They are multiplied by C so the parameter C of class i becomes classWeights(i) * C. Thus these weights affect the misclassification penalty for different classes. The larger weight, the larger penalty on misclassification of data from the corresponding class. Default value is empty Mat.

See also:

setClassWeights

virtual
double
getCoef0() const = 0

Parameter coef0 of a kernel function. For SVM::POLY or SVM::SIGMOID. Default value is 0.

See also:

setCoef0

virtual
double
getDecisionFunction(
    int i,
    OutputArray alpha,
    OutputArray svidx
    ) const = 0

Retrieves the decision function.

The method returns rho parameter of the decision function, a scalar subtracted from the weighted sum of kernel responses.

Parameters:

i the index of the decision function. If the problem solved is regression, 1-class or 2-class classification, then there will be just one decision function and the index should always be 0. Otherwise, in the case of N-class classification, there will be \(N(N-1)/2\) decision functions.
alpha the optional output vector for weights, corresponding to different support vectors. In the case of linear SVM all the alpha’s will be 1’s.
svidx the optional output vector of indices of support vectors within the matrix of support vectors (which can be retrieved by SVM::getSupportVectors). In the case of linear SVM each decision function consists of a single “compressed” support vector.
virtual
double
getDegree() const = 0

Parameter degree of a kernel function. For SVM::POLY. Default value is 0.

See also:

setDegree

virtual
double
getGamma() const = 0

Parameter \(\gamma\) of a kernel function. For SVM::POLY, SVM::RBF, SVM::SIGMOID or SVM::CHI2. Default value is 1.

See also:

setGamma

virtual
int
getKernelType() const = 0

Type of a SVM kernel. See SVM::KernelTypes. Default value is SVM::RBF.

virtual
double
getNu() const = 0

Parameter \(\nu\) of a SVM optimization problem. For SVM::NU_SVC, SVM::ONE_CLASS or SVM::NU_SVR. Default value is 0.

See also:

setNu

virtual
double
getP() const = 0

Parameter \(\epsilon\) of a SVM optimization problem. For SVM::EPS_SVR. Default value is 0.

See also:

setP

virtual
Mat
getSupportVectors() const = 0

Retrieves all the support vectors.

The method returns all the support vectors as a floating-point matrix, where support vectors are stored as matrix rows.

virtual
cv::TermCriteria
getTermCriteria() const = 0

Termination criteria of the iterative SVM training procedure which solves a partial case of constrained quadratic optimization problem. You can specify tolerance and/or the maximum number of iterations. Default value is TermCriteria( TermCriteria::MAX_ITER + TermCriteria::EPS, 1000, FLT_EPSILON );

See also:

setTermCriteria

virtual
int
getType() const = 0

Type of a SVM formulation. See SVM::Types. Default value is SVM::C_SVC.

See also:

setType

Mat
getUncompressedSupportVectors() const

Retrieves all the uncompressed support vectors of a linear SVM.

The method returns all the uncompressed support vectors of a linear SVM that the compressed support vector, used for prediction, was derived from. They are returned in a floating-point matrix, where the support vectors are stored as matrix rows.

virtual
void
setC(double val) = 0

See also:

getC

virtual
void
setClassWeights(const cv::Mat& val) = 0

See also:

getClassWeights

virtual
void
setCoef0(double val) = 0

See also:

getCoef0

virtual
void
setCustomKernel(const Ptr<Kernel>& _kernel) = 0

Initialize with custom kernel. See SVM::Kernel class for implementation details

virtual
void
setDegree(double val) = 0

See also:

getDegree

virtual
void
setGamma(double val) = 0

See also:

getGamma

virtual
void
setKernel(int kernelType) = 0

Initialize with one of predefined kernels. See SVM::KernelTypes.

virtual
void
setNu(double val) = 0

See also:

getNu

virtual
void
setP(double val) = 0

See also:

getP

virtual
void
setTermCriteria(const cv::TermCriteria& val) = 0

See also:

getTermCriteria

virtual
void
setType(int val) = 0

See also:

getType

virtual
bool
trainAuto(
    const Ptr<TrainData>& data,
    int kFold = 10,
    ParamGrid Cgrid = getDefaultGrid(C),
    ParamGrid gammaGrid = getDefaultGrid(GAMMA),
    ParamGrid pGrid = getDefaultGrid(P),
    ParamGrid nuGrid = getDefaultGrid(NU),
    ParamGrid coeffGrid = getDefaultGrid(COEF),
    ParamGrid degreeGrid = getDefaultGrid(DEGREE),
    bool balanced = false
    ) = 0

Trains an SVM with optimal parameters.

The method trains the SVM model automatically by choosing the optimal parameters C, gamma, p, nu, coef0, degree. Parameters are considered optimal when the cross-validation estimate of the test set error is minimal.

If there is no need to optimize a parameter, the corresponding grid step should be set to any value less than or equal to 1. For example, to avoid optimization in gamma, set gammaGrid.step = 0, gammaGrid.minVal, gamma_grid.maxVal as arbitrary numbers. In this case, the value Gamma is taken for gamma.

And, finally, if the optimization in a parameter is required but the corresponding grid is unknown, you may call the function SVM::getDefaultGrid. To generate a grid, for example, for gamma, call SVM::getDefaultGrid(SVM::GAMMA).

This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and the usual SVM with parameters specified in params is executed.

Parameters:

data the training data that can be constructed using TrainData::create or TrainData::loadFromCSV.
kFold Cross-validation parameter. The training set is divided into kFold subsets. One subset is used to test the model, the others form the train set. So, the SVM algorithm is executed kFold times.
Cgrid grid for C
gammaGrid grid for gamma
pGrid grid for p
nuGrid grid for nu
coeffGrid grid for coeff
degreeGrid grid for degree
balanced If true and the problem is 2-class classification then the method creates more balanced cross-validation subsets that is proportions between classes in subsets are close to such proportion in the whole train dataset.
bool
trainAuto(
    InputArray samples,
    int layout,
    InputArray responses,
    int kFold = 10,
    Ptr<ParamGrid> Cgrid = SVM::getDefaultGridPtr(SVM::C),
    Ptr<ParamGrid> gammaGrid = SVM::getDefaultGridPtr(SVM::GAMMA),
    Ptr<ParamGrid> pGrid = SVM::getDefaultGridPtr(SVM::P),
    Ptr<ParamGrid> nuGrid = SVM::getDefaultGridPtr(SVM::NU),
    Ptr<ParamGrid> coeffGrid = SVM::getDefaultGridPtr(SVM::COEF),
    Ptr<ParamGrid> degreeGrid = SVM::getDefaultGridPtr(SVM::DEGREE),
    bool balanced = false
    )

Trains an SVM with optimal parameters.

The method trains the SVM model automatically by choosing the optimal parameters C, gamma, p, nu, coef0, degree. Parameters are considered optimal when the cross-validation estimate of the test set error is minimal.

This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only offers rudimentary parameter options.

This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and the usual SVM with parameters specified in params is executed.

Parameters:

samples training samples
layout See ml::SampleTypes.
responses vector of responses associated with the training samples.
kFold Cross-validation parameter. The training set is divided into kFold subsets. One subset is used to test the model, the others form the train set. So, the SVM algorithm is
Cgrid grid for C
gammaGrid grid for gamma
pGrid grid for p
nuGrid grid for nu
coeffGrid grid for coeff
degreeGrid grid for degree
balanced If true and the problem is 2-class classification then the method creates more balanced cross-validation subsets that is proportions between classes in subsets are close to such proportion in the whole train dataset.
static
Ptr<SVM>
create()

Creates empty model. Use StatModel::train to train the model. Since SVM has several parameters, you may want to find the best parameters for your problem, it can be done with SVM::trainAuto.

static
ParamGrid
getDefaultGrid(int param_id)

Generates a grid for SVM parameters.

The function generates a grid for the specified parameter of the SVM algorithm. The grid may be passed to the function SVM::trainAuto.

Parameters:

param_id SVM parameters IDs that must be one of the SVM::ParamTypes. The grid is generated for the parameter with this ID.
static
Ptr<ParamGrid>
getDefaultGridPtr(int param_id)

Generates a grid for SVM parameters.

The function generates a grid pointer for the specified parameter of the SVM algorithm. The grid may be passed to the function SVM::trainAuto.

Parameters:

param_id SVM parameters IDs that must be one of the SVM::ParamTypes. The grid is generated for the parameter with this ID.
static
Ptr<SVM>
load(const String& filepath)

Loads and creates a serialized svm from a file.

Use SVM::save to serialize and store an SVM to disk. Load the SVM from this file again, by calling this function with the path to the file.

Parameters:

filepath path to serialized svm