using OpenCvSharp;
namespace demo
{
internal class Program
{
static void Main(string[] args)
{
VideoCapture cap = new VideoCapture("../../../images/vtest.avi");
Mat frame1 = new Mat();
cap.Read(frame1);
Mat prvs = new Mat();
Cv2.CvtColor(frame1, prvs, ColorConversionCodes.BGR2GRAY);
Mat hsv = new Mat();
// 设置hsv矩阵的第二个通道(索引为1)为255
Mat[] SetChannel = {
new Mat(frame1.Size(), MatType.CV_8UC1, new Scalar(0)),
new Mat(frame1.Size(), MatType.CV_8UC1, new Scalar(255)),
new Mat(frame1.Size(), MatType.CV_8UC1, new Scalar(0))
};
Cv2.Merge(SetChannel, hsv);
Mat frame2 = new Mat();
Mat next = new Mat();
Mat flow = new Mat();
Mat mag = new Mat();
Mat ang = new Mat();
Mat magn_norm = new Mat();
Mat bgr = new Mat();
while (true)
{
cap.Read(frame2);
Cv2.CvtColor(frame2, next, ColorConversionCodes.BGR2GRAY);
Cv2.CalcOpticalFlowFarneback(prvs, next, flow, 0.5, 3, 15, 3, 5, 1.2, OpticalFlowFlags.None);
// 分离通道
Mat[] channels = new Mat[2];
Cv2.Split(flow, out channels);
Cv2.CartToPolar(channels[0], channels[1], mag, ang);
Cv2.Normalize(mag, magn_norm, 0, 255, NormTypes.MinMax);
Mat[] hsvCh = new Mat[3];
Cv2.Split(hsv, out hsvCh);
// CV_32FC1转CV_8UC1
ang = ang * 180 / Cv2.PI / 2;
ang.ConvertTo(ang, MatType.CV_8UC1);
magn_norm.ConvertTo(magn_norm, MatType.CV_8UC1);
hsvCh[1].ConvertTo(hsvCh[1], MatType.CV_8UC1);
Mat[] _hsv = { ang, hsvCh[1], magn_norm };
Cv2.Merge(_hsv, hsv);
Cv2.CvtColor(hsv, bgr, ColorConversionCodes.HSV2BGR);
Cv2.ImShow("frame2", frame2);
Cv2.ImShow("flow", bgr);
if (Cv2.WaitKey(1) == 27)
break;
next.CopyTo(prvs);
}
cap.Release();
Cv2.DestroyAllWindows();
}
}
}
C++版本代码如下:
#include <iostream>
#include <opencv2/core.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/video.hpp>
#include <opencv2/highgui/highgui_c.h>
using namespace cv;
using namespace std;
/// <summary>
/// 《OpenCV-Python-Tutorial-中文版.pdf》 P235
/// </summary>
void demo1()
{
VideoCapture cap(samples::findFile("../images/vtest.avi"));
Mat frame1, prvs;
cap >> frame1;
cvtColor(frame1, prvs, COLOR_BGR2GRAY);
Mat hsv = Mat::ones(frame1.size(), frame1.type());
Mat t[3];
t[0] = Mat(frame1.size(), CV_32F, Scalar(0));
t[1] = Mat(frame1.size(), CV_32F, Scalar(255));
t[2] = Mat(frame1.size(), CV_32F, Scalar(0));
merge(t, 3, hsv);
while (true)
{
Mat frame2, next;
cap >> frame2;
if (frame2.empty())
break;
cvtColor(frame2, next, COLOR_BGR2GRAY);
//计算光流
Mat flow(prvs.size(), CV_32FC2);
calcOpticalFlowFarneback(prvs, next, flow, 0.5, 3, 15, 3, 5, 1.2, 0);
// 分离通道
Mat flow_parts[2];
split(flow, flow_parts);
Mat mag, ang, magn_norm;
cartToPolar(flow_parts[0], flow_parts[1], mag, ang, true);
normalize(mag, magn_norm, 0.0f, 1.0f, NORM_MINMAX);
Mat hsvCh[3];
split(hsv, hsvCh);
Mat _hsv[3], bgr;
_hsv[0] = ang * 180 / CV_PI / 2;;
_hsv[1] = hsvCh[1];
_hsv[2] = magn_norm,
merge(_hsv, 3, hsv);
cvtColor(hsv, bgr, COLOR_HSV2BGR);
imshow("frame2", frame2);
imshow("bgr", bgr);
if (waitKey(1) == 27)
break;
prvs = next;
}
}
/// <summary>
/// 来源:https://blog.csdn.net/ResumeProject/article/details/128507520
/// </summary>
void demo2()
{
VideoCapture cap(samples::findFile("../images/vtest.avi"));
if (!cap.isOpened()) {
cerr << "Unable to open file!" << endl;
return;
}
Mat frame1, prvs;
cap >> frame1;
cvtColor(frame1, prvs, COLOR_BGR2GRAY);
while (true)
{
Mat frame2, next;
cap >> frame2;
if (frame2.empty())
break;
cvtColor(frame2, next, COLOR_BGR2GRAY);
//计算光流
Mat flow(prvs.size(), CV_32FC2);
calcOpticalFlowFarneback(prvs, next, flow, 0.5, 3, 15, 3, 5, 1.2, 0);
// 分离通道
Mat flow_parts[2];
split(flow, flow_parts);
Mat mag, ang, magn_norm;
cartToPolar(flow_parts[0], flow_parts[1], mag, ang, true);
normalize(mag, magn_norm, 0.0f, 1.0f, NORM_MINMAX);
//build hsv image
Mat _hsv[3], hsv, hsv8, bgr;
_hsv[0] = ang * 180 / CV_PI / 2;
_hsv[1] = Mat::ones(ang.size(), CV_32F);
_hsv[2] = magn_norm;
merge(_hsv, 3, hsv);
hsv.convertTo(hsv8, CV_8U, 255.0);
cvtColor(hsv8, bgr, COLOR_HSV2BGR);
imshow("frame2", frame2);
imshow("bgr", bgr);
if (waitKey(1) == 27)
break;
prvs = next;
}
}
int main()
{
//demo1();
demo2();
}
Python版本代码如下:
import cv2
import numpy as np
cap = cv2.VideoCapture("../images/vtest.avi")
ret, frame1 = cap.read()
prvs = cv2.cvtColor(frame1, cv2.COLOR_BGR2GRAY)
hsv = np.zeros_like(frame1)
hsv[..., 1] = 255
while True:
ret, frame2 = cap.read()
next = cv2.cvtColor(frame2, cv2.COLOR_BGR2GRAY)
flow = cv2.calcOpticalFlowFarneback(prvs, next, None, 0.5, 3, 15, 3, 5, 1.2, 0)
mag, ang = cv2.cartToPolar(flow[..., 0], flow[..., 1])
hsv[..., 0] = ang * 180 / np.pi / 2
hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
cv2.imshow('frame2', frame2)
cv2.imshow('flow', bgr)
k = cv2.waitKey(1) & 0xff
if k == 27:
break
prvs = next
cap.release()
cv2.destroyAllWindows()
using OpenCvSharp;
namespace demo
{
internal class Program
{
static void Main(string[] args)
{
VideoCapture cap = new VideoCapture("../../../images/vtest.avi");
Mat frame1 = new Mat();
cap.Read(frame1);
Mat prvs = new Mat();
Cv2.CvtColor(frame1, prvs, ColorConversionCodes.BGR2GRAY);
Mat hsv = new Mat();
// 设置hsv矩阵的第二个通道(索引为1)为255
Mat[] SetChannel = {
new Mat(frame1.Size(), MatType.CV_8UC1, new Scalar(0)),
new Mat(frame1.Size(), MatType.CV_8UC1, new Scalar(255)),
new Mat(frame1.Size(), MatType.CV_8UC1, new Scalar(0))
};
Cv2.Merge(SetChannel, hsv);
Mat frame2 = new Mat();
Mat next = new Mat();
Mat flow = new Mat();
Mat mag = new Mat();
Mat ang = new Mat();
Mat magn_norm = new Mat();
Mat bgr = new Mat();
while (true)
{
cap.Read(frame2);
Cv2.CvtColor(frame2, next, ColorConversionCodes.BGR2GRAY);
Cv2.CalcOpticalFlowFarneback(prvs, next, flow, 0.5, 3, 15, 3, 5, 1.2, OpticalFlowFlags.None);
// 分离通道
Mat[] channels = new Mat[2];
Cv2.Split(flow, out channels);
Cv2.CartToPolar(channels[0], channels[1], mag, ang);
Cv2.Normalize(mag, magn_norm, 0, 255, NormTypes.MinMax);
Mat[] hsvCh = new Mat[3];
Cv2.Split(hsv, out hsvCh);
// CV_32FC1转CV_8UC1
ang = ang * 180 / Cv2.PI / 2;
ang.ConvertTo(ang, MatType.CV_8UC1);
magn_norm.ConvertTo(magn_norm, MatType.CV_8UC1);
hsvCh[1].ConvertTo(hsvCh[1], MatType.CV_8UC1);
Mat[] _hsv = { ang, hsvCh[1], magn_norm };
Cv2.Merge(_hsv, hsv);
Cv2.CvtColor(hsv, bgr, ColorConversionCodes.HSV2BGR);
Cv2.ImShow("frame2", frame2);
Cv2.ImShow("flow", bgr);
if (Cv2.WaitKey(1) == 27)
break;
next.CopyTo(prvs);
}
cap.Release();
Cv2.DestroyAllWindows();
}
}
}
C++版本代码如下:
#include <iostream>
#include <opencv2/core.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/video.hpp>
#include <opencv2/highgui/highgui_c.h>
using namespace cv;
using namespace std;
/// <summary>
/// 《OpenCV-Python-Tutorial-中文版.pdf》 P235
/// </summary>
void demo1()
{
VideoCapture cap(samples::findFile("../images/vtest.avi"));
Mat frame1, prvs;
cap >> frame1;
cvtColor(frame1, prvs, COLOR_BGR2GRAY);
Mat hsv = Mat::ones(frame1.size(), frame1.type());
Mat t[3];
t[0] = Mat(frame1.size(), CV_32F, Scalar(0));
t[1] = Mat(frame1.size(), CV_32F, Scalar(255));
t[2] = Mat(frame1.size(), CV_32F, Scalar(0));
merge(t, 3, hsv);
while (true)
{
Mat frame2, next;
cap >> frame2;
if (frame2.empty())
break;
cvtColor(frame2, next, COLOR_BGR2GRAY);
//计算光流
Mat flow(prvs.size(), CV_32FC2);
calcOpticalFlowFarneback(prvs, next, flow, 0.5, 3, 15, 3, 5, 1.2, 0);
// 分离通道
Mat flow_parts[2];
split(flow, flow_parts);
Mat mag, ang, magn_norm;
cartToPolar(flow_parts[0], flow_parts[1], mag, ang, true);
normalize(mag, magn_norm, 0.0f, 1.0f, NORM_MINMAX);
Mat hsvCh[3];
split(hsv, hsvCh);
Mat _hsv[3], bgr;
_hsv[0] = ang * 180 / CV_PI / 2;;
_hsv[1] = hsvCh[1];
_hsv[2] = magn_norm,
merge(_hsv, 3, hsv);
cvtColor(hsv, bgr, COLOR_HSV2BGR);
imshow("frame2", frame2);
imshow("bgr", bgr);
if (waitKey(1) == 27)
break;
prvs = next;
}
}
/// <summary>
/// 来源:https://blog.csdn.net/ResumeProject/article/details/128507520
/// </summary>
void demo2()
{
VideoCapture cap(samples::findFile("../images/vtest.avi"));
if (!cap.isOpened()) {
cerr << "Unable to open file!" << endl;
return;
}
Mat frame1, prvs;
cap >> frame1;
cvtColor(frame1, prvs, COLOR_BGR2GRAY);
while (true)
{
Mat frame2, next;
cap >> frame2;
if (frame2.empty())
break;
cvtColor(frame2, next, COLOR_BGR2GRAY);
//计算光流
Mat flow(prvs.size(), CV_32FC2);
calcOpticalFlowFarneback(prvs, next, flow, 0.5, 3, 15, 3, 5, 1.2, 0);
// 分离通道
Mat flow_parts[2];
split(flow, flow_parts);
Mat mag, ang, magn_norm;
cartToPolar(flow_parts[0], flow_parts[1], mag, ang, true);
normalize(mag, magn_norm, 0.0f, 1.0f, NORM_MINMAX);
//build hsv image
Mat _hsv[3], hsv, hsv8, bgr;
_hsv[0] = ang * 180 / CV_PI / 2;
_hsv[1] = Mat::ones(ang.size(), CV_32F);
_hsv[2] = magn_norm;
merge(_hsv, 3, hsv);
hsv.convertTo(hsv8, CV_8U, 255.0);
cvtColor(hsv8, bgr, COLOR_HSV2BGR);
imshow("frame2", frame2);
imshow("bgr", bgr);
if (waitKey(1) == 27)
break;
prvs = next;
}
}
int main()
{
//demo1();
demo2();
}
Python版本代码如下:
import cv2
import numpy as np
cap = cv2.VideoCapture("../images/vtest.avi")
ret, frame1 = cap.read()
prvs = cv2.cvtColor(frame1, cv2.COLOR_BGR2GRAY)
hsv = np.zeros_like(frame1)
hsv[..., 1] = 255
while True:
ret, frame2 = cap.read()
next = cv2.cvtColor(frame2, cv2.COLOR_BGR2GRAY)
flow = cv2.calcOpticalFlowFarneback(prvs, next, None, 0.5, 3, 15, 3, 5, 1.2, 0)
mag, ang = cv2.cartToPolar(flow[..., 0], flow[..., 1])
hsv[..., 0] = ang * 180 / np.pi / 2
hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
cv2.imshow('frame2', frame2)
cv2.imshow('flow', bgr)
k = cv2.waitKey(1) & 0xff
if k == 27:
break
prvs = next
cap.release()
cv2.destroyAllWindows()