using OpenCvSharp;
using OpenCvSharp.ML;
namespace demo
{
internal class Program
{
private static int NTRAINING_SAMPLES = 100; // Number of training samples per class
private static double FRAC_LINEAR_SEP = 0.9; // Fraction of samples which compose the linear separable part
static void Main(string[] args)
{
int WIDTH = 512, HEIGHT = 512;
Mat I = new Mat(HEIGHT, WIDTH, MatType.CV_8UC3); //创建窗口可视化
//--------------------- 1. Set up training data randomly ---------------------------------------
Mat trainData = new Mat(2 * NTRAINING_SAMPLES, 2, MatType.CV_32FC1);
Mat labels = new Mat(2 * NTRAINING_SAMPLES, 1, MatType.CV_32SC1);
RNG rng = new RNG(100);// Random value generation class
// Set up the linearly separable part of the training data
int nLinearSamples = (int)(FRAC_LINEAR_SEP * NTRAINING_SAMPLES);
//! [setup1]
// Generate random points for the class 1
Mat trainClass = trainData.RowRange(0, nLinearSamples);
// The x coordinate of the points is in [0, 0.4)
Mat c = trainClass.ColRange(0, 1);
rng.Fill(c, DistributionType.Uniform, new Scalar(1), new Scalar(0.4 * WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.ColRange(1, 2);
rng.Fill(c, DistributionType.Uniform, new Scalar(1), new Scalar(HEIGHT));
// Generate random points for the class 2
trainClass = trainData.RowRange(2 * NTRAINING_SAMPLES - nLinearSamples, 2 * NTRAINING_SAMPLES);
// The x coordinate of the points is in [0.6, 1]
c = trainClass.ColRange(0, 1);
rng.Fill(c, DistributionType.Uniform, new Scalar(0.6 * WIDTH), new Scalar(WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.ColRange(1, 2);
rng.Fill(c, DistributionType.Uniform, new Scalar(1), new Scalar(HEIGHT));
//! [setup1]
//------------------ Set up the non-linearly separable part of the training data ---------------
//! [setup2]
// Generate random points for the classes 1 and 2
trainClass = trainData.RowRange(nLinearSamples, 2 * NTRAINING_SAMPLES - nLinearSamples);
// The x coordinate of the points is in [0.4, 0.6)
c = trainClass.ColRange(0, 1);
rng.Fill(c, DistributionType.Uniform, new Scalar(0.4 * WIDTH), new Scalar(0.6 * WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.ColRange(1, 2);
rng.Fill(c, DistributionType.Uniform, new Scalar(1), new Scalar(HEIGHT));
//! [setup2]
//------------------------- Set up the labels for the classes ---------------------------------
labels.RowRange(0, NTRAINING_SAMPLES).SetTo(1); // Class 1
labels.RowRange(NTRAINING_SAMPLES, 2 * NTRAINING_SAMPLES).SetTo(2); // Class 2
//------------------------ 2. Set up the support vector machines parameters --------------------
//------------------------ 3. Train the svm ----------------------------------------------------
System.Diagnostics.Debug.WriteLine("Starting training process");
//! [init]
SVM svm = SVM.Create(); // 创建分类器并设置参数
svm.Type = SVM.Types.CSvc;
svm.C = 0.1;
svm.KernelType = SVM.KernelTypes.Linear;
svm.TermCriteria = new TermCriteria(CriteriaTypes.MaxIter, (int)1e7, 1e-6);
//! [init]
//! [train]
svm.Train(trainData, SampleTypes.RowSample, labels); // 训练分类器
//! [train]
System.Diagnostics.Debug.WriteLine("Finished training process");
//------------------------ 4. Show the decision regions ----------------------------------------
//! [show]
Vec3b green = new Vec3b(0, 100, 0);
Vec3b blue = new Vec3b(100, 0, 0);
for (int i = 0; i < I.Rows; ++i)
{
for (int j = 0; j < I.Cols; ++j)
{
float[] testFeatureData = { j, i }; //生成测试数据
Mat sampleMat = new Mat(1, 2, MatType.CV_32F, testFeatureData);
float response = (int)svm.Predict(sampleMat);//进行预测,返回1或-1
if (response == 1)
I.Set(i, j, green);
else
I.Set(i, j, blue);
}
}
//! [show]
//----------------------- 5. Show the training data --------------------------------------------
//! [show_data]
float px, py;
// Class 1
for (int i = 0; i < NTRAINING_SAMPLES; ++i)
{
px = trainData.At<float>(i, 0);
py = trainData.At<float>(i, 1);
Cv2.Circle(I, (int)px, (int)py, 3, new Scalar(0, 255, 0), 1, LineTypes.Link8);
}
// Class 2
for (int i = NTRAINING_SAMPLES; i < 2 * NTRAINING_SAMPLES; ++i)
{
px = trainData.At<float>(i, 0);
py = trainData.At<float>(i, 1);
Cv2.Circle(I, (int)px, (int)py, 3, new Scalar(255, 0, 0), 1, LineTypes.Link8);
}
//! [show_data]
//------------------------- 6. Show support vectors --------------------------------------------
//! [show_vectors]
Mat sv = svm.GetSupportVectors();
for (int i = 0; i < sv.Rows; ++i)
{
unsafe // 允许使用不安全代码,可能导致内存泄露
{
float* v = (float*)sv.Ptr(i).ToPointer(); //取出每行的头指针
Point pt = new Point((int)v[0], (int)v[1]);
Cv2.Circle(I, pt.X, pt.Y, 6, new Scalar(128, 128, 128), 2, LineTypes.Link8);
}
}
//! [show_vectors]
Cv2.ImWrite("result.jpg", I); // save the Image
//pictureBox1.Image = BitmapConverter.ToBitmap(I);
Cv2.ImShow("SVM for Non-Linear Training Data", I); // show it to the user
Cv2.WaitKey();
}
}
}
#include <iostream>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include "opencv2/imgcodecs.hpp"
#include <opencv2/highgui.hpp>
#include <opencv2/ml.hpp>
#define NTRAINING_SAMPLES 100 // Number of training samples per class
#define FRAC_LINEAR_SEP 0.9f // Fraction of samples which compose the linear separable part
using namespace cv;
using namespace cv::ml;
using namespace std;
static void help()
{
cout << "\n--------------------------------------------------------------------------" << endl
<< "This program shows Support Vector Machines for Non-Linearly Separable Data. " << endl
<< "Usage:" << endl
<< "./non_linear_svms" << endl
<< "--------------------------------------------------------------------------" << endl
<< endl;
}
int main()
{
help();
// Data for visual representation
const int WIDTH = 512, HEIGHT = 512;
Mat I = Mat::zeros(HEIGHT, WIDTH, CV_8UC3);
//--------------------- 1. Set up training data randomly ---------------------------------------
Mat trainData(2 * NTRAINING_SAMPLES, 2, CV_32FC1);
Mat labels(2 * NTRAINING_SAMPLES, 1, CV_32SC1);
RNG rng(100); // Random value generation class
// Set up the linearly separable part of the training data
int nLinearSamples = (int)(FRAC_LINEAR_SEP * NTRAINING_SAMPLES);
//! [setup1]
// Generate random points for the class 1
Mat trainClass = trainData.rowRange(0, nLinearSamples);
// The x coordinate of the points is in [0, 0.4)
Mat c = trainClass.colRange(0, 1);
rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(0.4 * WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.colRange(1, 2);
rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));
// Generate random points for the class 2
trainClass = trainData.rowRange(2 * NTRAINING_SAMPLES - nLinearSamples, 2 * NTRAINING_SAMPLES);
// The x coordinate of the points is in [0.6, 1]
c = trainClass.colRange(0, 1);
rng.fill(c, RNG::UNIFORM, Scalar(0.6 * WIDTH), Scalar(WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.colRange(1, 2);
rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));
//! [setup1]
//------------------ Set up the non-linearly separable part of the training data ---------------
//! [setup2]
// Generate random points for the classes 1 and 2
trainClass = trainData.rowRange(nLinearSamples, 2 * NTRAINING_SAMPLES - nLinearSamples);
// The x coordinate of the points is in [0.4, 0.6)
c = trainClass.colRange(0, 1);
rng.fill(c, RNG::UNIFORM, Scalar(0.4 * WIDTH), Scalar(0.6 * WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.colRange(1, 2);
rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));
//! [setup2]
//------------------------- Set up the labels for the classes ---------------------------------
labels.rowRange(0, NTRAINING_SAMPLES).setTo(1); // Class 1
labels.rowRange(NTRAINING_SAMPLES, 2 * NTRAINING_SAMPLES).setTo(2); // Class 2
//------------------------ 2. Set up the support vector machines parameters --------------------
//------------------------ 3. Train the svm ----------------------------------------------------
cout << "Starting training process" << endl;
//! [init]
Ptr<SVM> svm = SVM::create();
svm->setType(SVM::C_SVC);
svm->setC(0.1);
svm->setKernel(SVM::LINEAR);
svm->setTermCriteria(TermCriteria(TermCriteria::MAX_ITER, (int)1e7, 1e-6));
//! [init]
//! [train]
svm->train(trainData, ROW_SAMPLE, labels);
//! [train]
cout << "Finished training process" << endl;
//------------------------ 4. Show the decision regions ----------------------------------------
//! [show]
Vec3b green(0, 100, 0), blue(100, 0, 0);
for (int i = 0; i < I.rows; ++i)
for (int j = 0; j < I.cols; ++j)
{
Mat sampleMat = (Mat_<float>(1, 2) << i, j);
float response = svm->predict(sampleMat);
if (response == 1) I.at<Vec3b>(j, i) = green;
else if (response == 2) I.at<Vec3b>(j, i) = blue;
}
//! [show]
//----------------------- 5. Show the training data --------------------------------------------
//! [show_data]
int thick = -1;
int lineType = 8;
float px, py;
// Class 1
for (int i = 0; i < NTRAINING_SAMPLES; ++i)
{
px = trainData.at<float>(i, 0);
py = trainData.at<float>(i, 1);
circle(I, Point((int)px, (int)py), 3, Scalar(0, 255, 0), thick, lineType);
}
// Class 2
for (int i = NTRAINING_SAMPLES; i < 2 * NTRAINING_SAMPLES; ++i)
{
px = trainData.at<float>(i, 0);
py = trainData.at<float>(i, 1);
circle(I, Point((int)px, (int)py), 3, Scalar(255, 0, 0), thick, lineType);
}
//! [show_data]
//------------------------- 6. Show support vectors --------------------------------------------
//! [show_vectors]
thick = 2;
lineType = 8;
Mat sv = svm->getUncompressedSupportVectors();
for (int i = 0; i < sv.rows; ++i)
{
const float* v = sv.ptr<float>(i);
circle(I, Point((int)v[0], (int)v[1]), 6, Scalar(128, 128, 128), thick, lineType);
}
//! [show_vectors]
imwrite("result.png", I); // save the Image
imshow("SVM for Non-Linear Training Data", I); // show it to the user
waitKey(0);
}
using OpenCvSharp;
using OpenCvSharp.ML;
namespace demo
{
internal class Program
{
private static int NTRAINING_SAMPLES = 100; // Number of training samples per class
private static double FRAC_LINEAR_SEP = 0.9; // Fraction of samples which compose the linear separable part
static void Main(string[] args)
{
int WIDTH = 512, HEIGHT = 512;
Mat I = new Mat(HEIGHT, WIDTH, MatType.CV_8UC3); //创建窗口可视化
//--------------------- 1. Set up training data randomly ---------------------------------------
Mat trainData = new Mat(2 * NTRAINING_SAMPLES, 2, MatType.CV_32FC1);
Mat labels = new Mat(2 * NTRAINING_SAMPLES, 1, MatType.CV_32SC1);
RNG rng = new RNG(100);// Random value generation class
// Set up the linearly separable part of the training data
int nLinearSamples = (int)(FRAC_LINEAR_SEP * NTRAINING_SAMPLES);
//! [setup1]
// Generate random points for the class 1
Mat trainClass = trainData.RowRange(0, nLinearSamples);
// The x coordinate of the points is in [0, 0.4)
Mat c = trainClass.ColRange(0, 1);
rng.Fill(c, DistributionType.Uniform, new Scalar(1), new Scalar(0.4 * WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.ColRange(1, 2);
rng.Fill(c, DistributionType.Uniform, new Scalar(1), new Scalar(HEIGHT));
// Generate random points for the class 2
trainClass = trainData.RowRange(2 * NTRAINING_SAMPLES - nLinearSamples, 2 * NTRAINING_SAMPLES);
// The x coordinate of the points is in [0.6, 1]
c = trainClass.ColRange(0, 1);
rng.Fill(c, DistributionType.Uniform, new Scalar(0.6 * WIDTH), new Scalar(WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.ColRange(1, 2);
rng.Fill(c, DistributionType.Uniform, new Scalar(1), new Scalar(HEIGHT));
//! [setup1]
//------------------ Set up the non-linearly separable part of the training data ---------------
//! [setup2]
// Generate random points for the classes 1 and 2
trainClass = trainData.RowRange(nLinearSamples, 2 * NTRAINING_SAMPLES - nLinearSamples);
// The x coordinate of the points is in [0.4, 0.6)
c = trainClass.ColRange(0, 1);
rng.Fill(c, DistributionType.Uniform, new Scalar(0.4 * WIDTH), new Scalar(0.6 * WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.ColRange(1, 2);
rng.Fill(c, DistributionType.Uniform, new Scalar(1), new Scalar(HEIGHT));
//! [setup2]
//------------------------- Set up the labels for the classes ---------------------------------
labels.RowRange(0, NTRAINING_SAMPLES).SetTo(1); // Class 1
labels.RowRange(NTRAINING_SAMPLES, 2 * NTRAINING_SAMPLES).SetTo(2); // Class 2
//------------------------ 2. Set up the support vector machines parameters --------------------
//------------------------ 3. Train the svm ----------------------------------------------------
System.Diagnostics.Debug.WriteLine("Starting training process");
//! [init]
SVM svm = SVM.Create(); // 创建分类器并设置参数
svm.Type = SVM.Types.CSvc;
svm.C = 0.1;
svm.KernelType = SVM.KernelTypes.Linear;
svm.TermCriteria = new TermCriteria(CriteriaTypes.MaxIter, (int)1e7, 1e-6);
//! [init]
//! [train]
svm.Train(trainData, SampleTypes.RowSample, labels); // 训练分类器
//! [train]
System.Diagnostics.Debug.WriteLine("Finished training process");
//------------------------ 4. Show the decision regions ----------------------------------------
//! [show]
Vec3b green = new Vec3b(0, 100, 0);
Vec3b blue = new Vec3b(100, 0, 0);
for (int i = 0; i < I.Rows; ++i)
{
for (int j = 0; j < I.Cols; ++j)
{
float[] testFeatureData = { j, i }; //生成测试数据
Mat sampleMat = new Mat(1, 2, MatType.CV_32F, testFeatureData);
float response = (int)svm.Predict(sampleMat);//进行预测,返回1或-1
if (response == 1)
I.Set(i, j, green);
else
I.Set(i, j, blue);
}
}
//! [show]
//----------------------- 5. Show the training data --------------------------------------------
//! [show_data]
float px, py;
// Class 1
for (int i = 0; i < NTRAINING_SAMPLES; ++i)
{
px = trainData.At<float>(i, 0);
py = trainData.At<float>(i, 1);
Cv2.Circle(I, (int)px, (int)py, 3, new Scalar(0, 255, 0), 1, LineTypes.Link8);
}
// Class 2
for (int i = NTRAINING_SAMPLES; i < 2 * NTRAINING_SAMPLES; ++i)
{
px = trainData.At<float>(i, 0);
py = trainData.At<float>(i, 1);
Cv2.Circle(I, (int)px, (int)py, 3, new Scalar(255, 0, 0), 1, LineTypes.Link8);
}
//! [show_data]
//------------------------- 6. Show support vectors --------------------------------------------
//! [show_vectors]
Mat sv = svm.GetSupportVectors();
for (int i = 0; i < sv.Rows; ++i)
{
unsafe // 允许使用不安全代码,可能导致内存泄露
{
float* v = (float*)sv.Ptr(i).ToPointer(); //取出每行的头指针
Point pt = new Point((int)v[0], (int)v[1]);
Cv2.Circle(I, pt.X, pt.Y, 6, new Scalar(128, 128, 128), 2, LineTypes.Link8);
}
}
//! [show_vectors]
Cv2.ImWrite("result.jpg", I); // save the Image
//pictureBox1.Image = BitmapConverter.ToBitmap(I);
Cv2.ImShow("SVM for Non-Linear Training Data", I); // show it to the user
Cv2.WaitKey();
}
}
}
#include <iostream>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include "opencv2/imgcodecs.hpp"
#include <opencv2/highgui.hpp>
#include <opencv2/ml.hpp>
#define NTRAINING_SAMPLES 100 // Number of training samples per class
#define FRAC_LINEAR_SEP 0.9f // Fraction of samples which compose the linear separable part
using namespace cv;
using namespace cv::ml;
using namespace std;
static void help()
{
cout << "\n--------------------------------------------------------------------------" << endl
<< "This program shows Support Vector Machines for Non-Linearly Separable Data. " << endl
<< "Usage:" << endl
<< "./non_linear_svms" << endl
<< "--------------------------------------------------------------------------" << endl
<< endl;
}
int main()
{
help();
// Data for visual representation
const int WIDTH = 512, HEIGHT = 512;
Mat I = Mat::zeros(HEIGHT, WIDTH, CV_8UC3);
//--------------------- 1. Set up training data randomly ---------------------------------------
Mat trainData(2 * NTRAINING_SAMPLES, 2, CV_32FC1);
Mat labels(2 * NTRAINING_SAMPLES, 1, CV_32SC1);
RNG rng(100); // Random value generation class
// Set up the linearly separable part of the training data
int nLinearSamples = (int)(FRAC_LINEAR_SEP * NTRAINING_SAMPLES);
//! [setup1]
// Generate random points for the class 1
Mat trainClass = trainData.rowRange(0, nLinearSamples);
// The x coordinate of the points is in [0, 0.4)
Mat c = trainClass.colRange(0, 1);
rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(0.4 * WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.colRange(1, 2);
rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));
// Generate random points for the class 2
trainClass = trainData.rowRange(2 * NTRAINING_SAMPLES - nLinearSamples, 2 * NTRAINING_SAMPLES);
// The x coordinate of the points is in [0.6, 1]
c = trainClass.colRange(0, 1);
rng.fill(c, RNG::UNIFORM, Scalar(0.6 * WIDTH), Scalar(WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.colRange(1, 2);
rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));
//! [setup1]
//------------------ Set up the non-linearly separable part of the training data ---------------
//! [setup2]
// Generate random points for the classes 1 and 2
trainClass = trainData.rowRange(nLinearSamples, 2 * NTRAINING_SAMPLES - nLinearSamples);
// The x coordinate of the points is in [0.4, 0.6)
c = trainClass.colRange(0, 1);
rng.fill(c, RNG::UNIFORM, Scalar(0.4 * WIDTH), Scalar(0.6 * WIDTH));
// The y coordinate of the points is in [0, 1)
c = trainClass.colRange(1, 2);
rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));
//! [setup2]
//------------------------- Set up the labels for the classes ---------------------------------
labels.rowRange(0, NTRAINING_SAMPLES).setTo(1); // Class 1
labels.rowRange(NTRAINING_SAMPLES, 2 * NTRAINING_SAMPLES).setTo(2); // Class 2
//------------------------ 2. Set up the support vector machines parameters --------------------
//------------------------ 3. Train the svm ----------------------------------------------------
cout << "Starting training process" << endl;
//! [init]
Ptr<SVM> svm = SVM::create();
svm->setType(SVM::C_SVC);
svm->setC(0.1);
svm->setKernel(SVM::LINEAR);
svm->setTermCriteria(TermCriteria(TermCriteria::MAX_ITER, (int)1e7, 1e-6));
//! [init]
//! [train]
svm->train(trainData, ROW_SAMPLE, labels);
//! [train]
cout << "Finished training process" << endl;
//------------------------ 4. Show the decision regions ----------------------------------------
//! [show]
Vec3b green(0, 100, 0), blue(100, 0, 0);
for (int i = 0; i < I.rows; ++i)
for (int j = 0; j < I.cols; ++j)
{
Mat sampleMat = (Mat_<float>(1, 2) << i, j);
float response = svm->predict(sampleMat);
if (response == 1) I.at<Vec3b>(j, i) = green;
else if (response == 2) I.at<Vec3b>(j, i) = blue;
}
//! [show]
//----------------------- 5. Show the training data --------------------------------------------
//! [show_data]
int thick = -1;
int lineType = 8;
float px, py;
// Class 1
for (int i = 0; i < NTRAINING_SAMPLES; ++i)
{
px = trainData.at<float>(i, 0);
py = trainData.at<float>(i, 1);
circle(I, Point((int)px, (int)py), 3, Scalar(0, 255, 0), thick, lineType);
}
// Class 2
for (int i = NTRAINING_SAMPLES; i < 2 * NTRAINING_SAMPLES; ++i)
{
px = trainData.at<float>(i, 0);
py = trainData.at<float>(i, 1);
circle(I, Point((int)px, (int)py), 3, Scalar(255, 0, 0), thick, lineType);
}
//! [show_data]
//------------------------- 6. Show support vectors --------------------------------------------
//! [show_vectors]
thick = 2;
lineType = 8;
Mat sv = svm->getUncompressedSupportVectors();
for (int i = 0; i < sv.rows; ++i)
{
const float* v = sv.ptr<float>(i);
circle(I, Point((int)v[0], (int)v[1]), 6, Scalar(128, 128, 128), thick, lineType);
}
//! [show_vectors]
imwrite("result.png", I); // save the Image
imshow("SVM for Non-Linear Training Data", I); // show it to the user
waitKey(0);
}