Opencv人数统计 yolo kcf人头跟踪 人数统计 KCF目标跟踪 YOLO目标跟踪

Posted dragon_perfect

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了Opencv人数统计 yolo kcf人头跟踪 人数统计 KCF目标跟踪 YOLO目标跟踪相关的知识,希望对你有一定的参考价值。

       该功能使用的darknet框架,训练的yolo-tiny人头模型,之后使用opencv4.5.1版本调用的yolo-tiny模型,跟踪使用KCF跟踪算法,整体上实现了三个功能:

        1、区域内的人头统计;

        2、区域内的绊线检测功能;

        3、区域内目标跟踪,统计人头数量的功能。

一、网址:https://github.com/AlexeyAB/darknet

二、参考训练参考我的另两篇博客:

https://blog.csdn.net/zhulong1984/article/details/82344685

https://blog.csdn.net/zhulong1984/article/details/103499254

跟踪部分的代码如下:

#pragma once

#include "HeadDetect.h"
#include "opencv2/opencv.hpp"
#include "KCFLib/kcftracker.hpp"

using namespace cv;
using namespace std;

#define MAX_TRACK_COUNT		    100//目标的最大个数
#define MAX_TRAJECTORY_COUNT    200//目标轨迹的最大个数
#define MAX_MISS_FRAME			10//最大丢失帧数
#define MAX_MATCH_FRAME			3//最大匹配帧数
#define MAX_TRACK_DIST          40000//最大跟踪距离

struct ZTracker 
{
	int nID;
	int nMissTimes;
	bool bMatchID;
	bool bTrack;
	bool bStatOK;
	bool bInitDirection;
	int nInitDirection;
	cv::KalmanFilter *kf;
	cv::Point predPoint;
	cv::Point curCentre;
	cv::Rect curRect;
	int nTrajCount;
	cv::Point centres[MAX_TRAJECTORY_COUNT];
	cv::Rect blobs[MAX_TRAJECTORY_COUNT];
};

class ZHeadTrack
{
public:
	ZHeadTrack();
	~ZHeadTrack();

	int m_InCount;
	int m_OutCount;
	int m_RegionCount;
	ZTracker *m_pTrackers;
	KCFTracker m_pKCF[MAX_TRACK_COUNT];

	void trackInitial(cv::Point arrowStart, cv::Point arrowEnd, cv::Point lineStart, cv::Point lineEnd, vector<Point> regionPonits,int width,int height);

	void trackProcess(HDBox_Container *pHDBox_Container, HDRect *rect,bool bMask);

	void kcfProcess(HDBox_Container *pHDBox_Container, HDRect *rect, cv::Mat image);

	void trackInOutStatistics();

	void trackRegionStatistics();

	int pointInLineSide(cv::Point point, cv::Point lineStart, cv::Point lineEnd);

	float CalculateIou(cv::Rect& det, cv::Rect& track);

public:

	int m_trackID;
	int m_nArrowStart;
	int	m_nArrowEnd;
	cv::Point m_lineStart;
	cv::Point m_lineEnd;
	cv::Mat m_detectRegion;
	HDBox_Container m_boxContainer;
	HDTracker_Region m_trackeRegion;
};
#include "HeadTrack.h"

ZHeadTrack::ZHeadTrack()
{
	m_trackID = 0;
	m_InCount = 0;
	m_OutCount = 0;
	m_RegionCount = 0;
	m_nArrowStart = 0;
	m_nArrowEnd = 0;

	m_pTrackers = new ZTracker[MAX_TRACK_COUNT];
	memset(m_pTrackers, 0, sizeof(ZTracker));
}

ZHeadTrack::~ZHeadTrack()
{
	delete m_pTrackers;
}

void ZHeadTrack::trackInitial(cv::Point arrowStart, cv::Point arrowEnd, cv::Point lineStart, cv::Point lineEnd, vector<Point> regionPonits, int width, int height)
{
	m_trackID = 0;
	m_InCount = 0;
	m_OutCount = 0;
	m_nArrowStart = pointInLineSide(arrowStart, lineStart, lineEnd);
	m_nArrowEnd = pointInLineSide(arrowEnd, lineStart, lineEnd);
	m_lineStart = lineStart;
	m_lineEnd = lineEnd;

	vector<vector<Point>> vpts;
	vpts.push_back(regionPonits);

	cv::Mat detectRegion = cv::Mat::zeros(height, width, CV_8UC3);

	cv::fillPoly(detectRegion, vpts, cv::Scalar(255, 255, 255));
	cv::cvtColor(detectRegion, m_detectRegion, COLOR_BGR2GRAY);
	//m_detectRegion = detectRegion.clone();

	//cv::imshow("detectRegion", m_detectRegion);
	//cv::waitKey(0);
}

void ZHeadTrack::trackProcess(HDBox_Container *pHDBox_Container, HDRect *rect, bool bMask)
{
	int i = 0, j = 0;

	//把太小的rect删除
	m_boxContainer.headCount = 0;
	int width = m_detectRegion.cols;
	uchar *pDetectData = m_detectRegion.data;

	int step1 = m_detectRegion.step1();
	//cv::imshow("m_detectRegion", m_detectRegion);
	//cv::waitKey(10);

	for (i = 0;i< pHDBox_Container->headCount;i++)
	{
		HDRect *pHDRect = &pHDBox_Container->candidates[i];
		int nx = (pHDRect->right + pHDRect->left) / 2;
		int ny = (pHDRect->top + pHDRect->bottom) / 2;
		int rectW = pHDRect->right - pHDRect->left;
		int rectH = pHDRect->bottom - pHDRect->top;
		if (bMask)
		{
			if (rectW > 60 && rectH > 60 && (*(pDetectData + ny*width + nx) == 255))
			{
				m_boxContainer.candidates[m_boxContainer.headCount++] = pHDBox_Container->candidates[i];
			}
		}
		else
		{
			if (rectW > 40 && rectH > 40 && nx > rect->left && nx < rect->right && ny > rect->top && ny < rect->bottom)
			{
				m_boxContainer.candidates[m_boxContainer.headCount++] = pHDBox_Container->candidates[i];
			}
		}
	}

	bool bMatch[HD_MAX_HEADS] = { false };
	for (i = 0;i< m_boxContainer.headCount;i++)
	{
		bMatch[i] = false;
	}

	for (i = 0; i < MAX_TRACK_COUNT; i++)
	{
		ZTracker *pTracker = &m_pTrackers[i];
		if (pTracker->bTrack)
		{
			bool bMinst = false;
			int nMatchID = -1;
			int maxDist = MAX_TRACK_DIST;
			for (j = 0; j < m_boxContainer.headCount; j++)
			{
				if (!bMatch[j])
				{
					HDRect *pHDRect = &m_boxContainer.candidates[j];
					cv::Rect curRect;
					curRect.x = pHDRect->left;
					curRect.y = pHDRect->top;
					curRect.width = pHDRect->right - pHDRect->left;
					curRect.height = pHDRect->bottom - pHDRect->top;
					int nx = (pHDRect->left + pHDRect->right) / 2;
					int ny = (pHDRect->top + pHDRect->bottom) / 2;

					int dist = (pTracker->predPoint.x - nx)*(pTracker->predPoint.x - nx) + (pTracker->predPoint.y - ny)*(pTracker->predPoint.y - ny);
					if (dist < maxDist)
					{
						maxDist = dist;
						pTracker->curRect = curRect;//后面更新用
						pTracker->curCentre.x = nx;
						pTracker->curCentre.y = ny;
						nMatchID = j;
						bMinst = true;
					}
				}
			}

			//找到了blob
			if (bMinst)
			{
				bMatch[nMatchID] = true;

				HDRect *pHDRect = &m_boxContainer.candidates[nMatchID];
				cv::Rect curRect;
				curRect.x = pHDRect->left;
				curRect.y = pHDRect->top;
				curRect.width = pHDRect->right - pHDRect->left;
				curRect.height = pHDRect->bottom - pHDRect->top;
				int nx = (pHDRect->left + pHDRect->right) / 2;
				int ny = (pHDRect->top + pHDRect->bottom) / 2;

				pTracker->bMatchID = true;
				pTracker->nMissTimes = 0;
				pTracker->curCentre.x = nx;
				pTracker->curCentre.y = ny;
				pTracker->curRect = curRect;

				//更新预测值
				Mat measurement = Mat::zeros(2, 1, CV_32F);
				measurement.at<float>(0) = (float)nx;
				measurement.at<float>(1) = (float)ny;
				pTracker->kf->correct(measurement);

				Mat prediction = pTracker->kf->predict();
				pTracker->predPoint = Point(prediction.at<float>(0), prediction.at<float>(1)); //预测值(x',y')

				cv::Point centre = pTracker->centres[pTracker->nTrajCount - 1];
				if ((centre.x - nx)*(centre.x - nx) + (centre.y - ny)*(centre.y - ny) > 30)
				{
					pTracker->centres[pTracker->nTrajCount].x = nx;
					pTracker->centres[pTracker->nTrajCount].y = ny;
					pTracker->blobs[pTracker->nTrajCount] = curRect;
					pTracker->nTrajCount++;
					if (pTracker->nTrajCount >= MAX_TRAJECTORY_COUNT - 1)
					{
						pTracker->nTrajCount = MAX_TRAJECTORY_COUNT - 1;
						for (int k = 1; k < pTracker->nTrajCount; k++)
						{
							pTracker->centres[k - 1] = pTracker->centres[k];
							pTracker->blobs[k - 1] = pTracker->blobs[k];
						}
					}
				}
			}	
			else//没找到blob
			{
				pTracker->nMissTimes++;
				//Mat prediction = pTracker->kf->predict();
				//pTracker->predPoint = Point(prediction.at<float>(0), prediction.at<float>(1)); //预测值(x',y')
																							   //更新预测值
				Mat measurement = Mat::zeros(2, 1, CV_32F);
				measurement.at<float>(0) = (float)pTracker->curCentre.x;
				measurement.at<float>(1) = (float)pTracker->curCentre.y;
				pTracker->kf->correct(measurement);

				Mat prediction = pTracker->kf->predict();
				pTracker->predPoint = Point(prediction.at<float>(0), prediction.at<float>(1)); //预测值(x',y')

				if (pTracker->nMissTimes > MAX_MISS_FRAME)
				{
					pTracker->bTrack = false;
					delete pTracker->kf;
				}
			}
		}
	}

	//没有匹配上的,需要重新创建目标
	for (i = 0; i < m_boxContainer.headCount; i++)
	{
		HDRect *pHDRect = &m_boxContainer.candidates[i];
		cv::Rect curRect;
		curRect.x = pHDRect->left;
		curRect.y = pHDRect->top;
		curRect.width = pHDRect->right - pHDRect->left;
		curRect.height = pHDRect->bottom - pHDRect->top;
		int nx = (pHDRect->left + pHDRect->right) / 2;
		int ny = (pHDRect->top + pHDRect->bottom) / 2;
		if (!bMatch[i])
		{
			for (j = 0; j < MAX_TRACK_COUNT; j++)
			{
				ZTracker *pTracker = &m_pTrackers[j];
				if (!pTracker->bTrack)
				{
					pTracker->bTrack = true;
					pTracker->bMatchID = true;
					pTracker->bStatOK = false;
					pTracker->bInitDirection = false;
					pTracker->nID = ++m_trackID;

					pTracker->curCentre.x = nx;
					pTracker->curCentre.y = ny;
					pTracker->curRect = curRect;
					pTracker->nMissTimes = 0;
					pTracker->predPoint.x = nx;
					pTracker->predPoint.y = ny;

					pTracker->kf = new cv::KalmanFilter(4, 2, 0);
					pTracker->kf->transitionMatrix = (Mat_<float>(4, 4) << 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1);//转移矩阵A
					cv::setIdentity(pTracker->kf->measurementMatrix);                     //测量矩阵H
					cv::setIdentity(pTracker->kf->processNoiseCov, Scalar::all(1e-5));    //系统噪声方差矩阵Q
					cv::setIdentity(pTracker->kf->measurementNoiseCov, Scalar::all(1e-1));//测量噪声方差矩阵R
					cv::setIdentity(pTracker->kf->errorCovPost, Scalar::all(1));          //后验错误估计协方差矩阵P
					pTracker->kf->statePost = (Mat_<float>(4, 1) << nx, ny, 0, 0);

					Mat prediction = pTracker->kf->predict();
					pTracker->predPoint = Point(prediction.at<float>(0), prediction.at<float>(1)); //预测值(x',y')

					pTracker->centres[0].x = nx;
					pTracker->centres[0].y = ny;
					pTracker->blobs[0] = curRect;
					pTracker->nTrajCount = 1;
					break;
				}
			}
		}
	}
}

float ZHeadTrack::CalculateIou(cv::Rect& det, cv::Rect& track) 
{
	// get min/max points
	auto xx1 = std::max(det.tl().x, track.tl().x);
	auto yy1 = std::max(det.tl().y, track.tl().y);
	auto xx2 = std::min(det.br().x, track.br().x);
	auto yy2 = std::min(det.br().y, track.br().y);
	auto w = std::max(0, xx2 - xx1);
	auto h = std::max(0, yy2 - yy1);

	// calculate area of intersection and union
	float det_area = det.area();
	float trk_area = track.area();
	auto intersection_area = w * h;
	float union_area = det_area + trk_area - intersection_area;
	auto iou = intersection_area / union_area;
	return iou;
}

void ZHeadTrack::kcfProcess(HDBox_Container *pHDBox_Container, HDRect *rect,cv::Mat image)
{
	int i = 0, j = 0;
	//HDBox_Container container;

	m_boxContainer.headCount = 0;
	for (i = 0; i < pHDBox_Container->headCount; i++)
	{
		HDRect *pHDRect = &pHDBox_Container->candidates[i];
		int nx = (pHDRect->right + pHDRect->left) / 2;
		int ny = (pHDRect->top + pHDRect->bottom) / 2;
		int rectW = pHDRect->right - pHDRect->left;
		int rectH = pHDRect->bottom - pHDRect->top;

		if (rectW > 20 && rectH > 20 && nx > rect->left && nx < rect->right && ny > rect->top && ny < rect->bottom)
		{
			m_boxContainer.candidates[m_boxContainer.headCount++] = pHDBox_Container->candidates[i];
		}
	}

	bool bMatch[HD_MAX_HEADS] = { false };
	for (i = 0; i < m_boxContainer.headCount; i++)
	{
		bMatch[i] = false;
	}

	for (i = 0; i < MAX_TRACK_COUNT; i++)
	{
		KCFTracker *pKCF = &m_pKCF[i];
		if (pKCF->bTrack)
		{
			cv::Rect tRect = pKCF->update(image);

			pKCF->nTrackTimes++;
			pKCF->curRect = tRect;
			int nx = tRect.x + tRect.width / 2;
			int ny = tRect.y + tRect.height / 2;
			int dist = 10000000;
			float dfiou = 0.4;
			cv::Rect roiRect;
			int nFlag = 0;
			bool bOK = false;
			int nlimit = 0.6*std::sqrt(tRect.width*tRect.width + tRect.height*tRect.height);
			for (j = 0; j < m_boxContainer.headCount; j++)
			{
				HDRect *pHDRect = &m_boxContainer.candidates[j];
				int x0 = (pHDRect->left + pHDRect->right) / 2;
				int y0 = (pHDRect->top + pHDRect->bottom) / 2;
				int tempDist = std::sqrt((nx - x0)*(nx - x0) + (ny - y0)*(ny - y0));
				cv::Rect trackR;
				trackR.x = pHDRect->left;
				trackR.y = pHDRect->top;
				trackR.width = pHDRect->right - pHDRect->left;
				trackR.height = pHDRect->bottom - pHDRect->top;
				float fiou = CalculateIou(tRect, trackR);
				if (fiou > dfiou)
				{
					dfiou = fiou;
					nFlag = j;
					bOK = true;
					roiRect = trackR;
				}
			}

			if (bOK)
			{
				bMatch[nFlag] = true;
				pKCF->nMissFrame = 0;
				pKCF->init(roiRect, image);
				pKCF->curRect = roiRect;
				pKCF->nMatchTimes++;
				if (!pKCF->bValid)
				{
					if (/*pKCF->nMatchTimes > MAX_MATCH_FRAME && */pKCF->nTrackTimes == pKCF->nMatchTimes)
					{
						pKCF->bValid = true;
						pKCF->nID = ++m_trackID;
					}
					else
					{
						pKCF->bTrack = false;
						pKCF->bValid = false;
					}
				}
			}
			else
			{
				pKCF->nMissFrame++;
				if (pKCF->nMissFrame > MAX_MISS_FRAME)
				{
					pKCF->bTrack = false;
					pKCF->bValid = false;
				}
			}
		}
	}

	for (j = 0; j < m_boxContainer.headCount; j++)
	{
		if (!bMatch[j])
		{
			HDRect *pHDRect = &m_boxContainer.candidates[j];
			for (i = 0; i < MAX_TRACK_COUNT; i++)
			{
				KCFTracker *pKCF = &m_pKCF[i];
				if (!pKCF->bTrack)
				{
					pKCF->nID = -1;// ++m_trackID;
					pKCF->nMissFrame = 0;
					pKCF->nTrackTimes = 0;
					pKCF->nMatchTimes = 0;
					pKCF->bValid = false;
					cv::Rect roiRect;
					roiRect.x = pHDRect->left;
					roiRect.y = pHDRect->top;
					roiRect.width = pHDRect->right - pHDRect->left;
					roiRect.height = pHDRect->bottom - pHDRect->top;
					pKCF->bTrack = true;
					pKCF->init(roiRect, image);
					break;
				}
			}
		}
	}
}

int ZHeadTrack::pointInLineSide(cv::Point point, cv::Point lineStart, cv::Point lineEnd)
{
	int x0 = 0, x1 = 0;
	int y0 = 0, y1 = 0;
	int v0 = 0, v1 = 0;
	bool bFlag = false;
	bool bFlagX = false;
	bool bFlagY = false;

	x0 = lineStart.x;
	x1 = lineEnd.x;
	y0 = lineStart.y;
	y1 = lineEnd.y;

	先保证点在线段内
	if (x0 > x1)
	{
		bFlagX = point.x > x1 && point.x < x0;
	}
	else
	{
		bFlagX = point.x <x1 && point.x >x0;
	}

	if (y0 > y1)
	{
		bFlagY = point.y > y1 && point.y < y0;
	}
	else
	{
		bFlagY = point.y <y1 && point.y >y0;
	}

	bFlag = bFlagX || bFlagY;
	if (!bFlag)
	{
		return 0;
	}

	v0 = (point.x - x0)*(y1 - y0) - (point.y - y0)*(x1 - x0);
	v1 = x1 - x0;

	if (x1 - x0 == 0)
	{
		if (v0 < 0)
		{
			return -1;
		}
		else
		{
			return 1;
		}
	}
	else
	{
		if (v0*v1 < 0)
		{
			return -1;
		}
		else
		{
			return 1;
		}
	}

	return 0;
}

void ZHeadTrack::trackInOutStatistics()
{
	int i = 0, j = 0;

	for (i = 0; i < MAX_TRACK_COUNT; i++)
	{
		ZTracker *pTracker = &m_pTrackers[i];
		if (pTracker->bTrack && pTracker->nTrajCount > 20 && !pTracker->bStatOK)
		{
			连续确认跟踪起始点的方向
			if (!pTracker->bInitDirection)
			{
				int count0 = 0;
				for (j = 0; j < 10; j++)
				{
					int flag = pointInLineSide(pTracker->centres[j], m_lineStart, m_lineEnd);
					count0 += flag;
				}
				if (count0 > 0)
				{
					pTracker->nInitDirection = 1;
				}
				else
				{
					pTracker->nInitDirection = -1;
				}
			}

			跟踪过线需要连续确认
			int count1 = 0;
			for (j = pTracker->nTrajCount - 10; j < pTracker->nTrajCount - 1; j++)
			{
				int flag = pointInLineSide(pTracker->centres[j], m_lineStart, m_lineEnd);
				if (flag != 0 && pTracker->nInitDirection != flag)
				{
					count1++;
				}
			}
			
			if (count1 > 6)
			{
				if (pTracker->nInitDirection == m_nArrowStart)
				{
					m_InCount++;
				}
				else
				{
					m_OutCount++;
				}
				pTracker->bStatOK = true;
			}
		}
	}
}

void ZHeadTrack::trackRegionStatistics()
{
	int i = 0, j = 0;

	m_trackeRegion.currentCount = 0;
	for (i = 0; i < MAX_TRACK_COUNT; i++)
	{
		ZTracker *pTracker = &m_pTrackers[i];
		if (pTracker->bTrack && pTracker->nTrajCount > 5 && !pTracker->bStatOK)
		{
			m_trackeRegion.rect[m_trackeRegion.currentCount].id = pTracker->nID;
			m_trackeRegion.rect[m_trackeRegion.currentCount].left = pTracker->curRect.x;
			m_trackeRegion.rect[m_trackeRegion.currentCount].top = pTracker->curRect.y;
			m_trackeRegion.rect[m_trackeRegion.currentCount].right = pTracker->curRect.x + pTracker->curRect.width;
			m_trackeRegion.rect[m_trackeRegion.currentCount].bottom = pTracker->curRect.y + pTracker->curRect.height;
			m_trackeRegion.currentCount++;
			m_RegionCount++;
			pTracker->bStatOK = true;
		}
	}
}

效果展示:

 

 

跟踪的原理:

        用yolo-tiny检测出来的结果,送到KCF跟踪器中,连续匹配上两次,则确认为一个目标,之后采用tracking-by-detection的方式进行人头跟踪。

人头跟踪的demo(内涵KCF跟踪代码部分):

链接:https://pan.baidu.com/s/1iCbTtIeSQQ84zuzaceejNw 
提取码:vvar 
有问题加Q:187100248

以上是关于Opencv人数统计 yolo kcf人头跟踪 人数统计 KCF目标跟踪 YOLO目标跟踪的主要内容,如果未能解决你的问题,请参考以下文章

从 opencv 中的 kcf 跟踪中清除 ROI 历史记录

使用Python,OpenCV进行对象追踪

opencv python中的运动跟踪

学习使用OpenCV的目标跟踪技术(C ++ / Python)

目标跟踪算法----KCF进阶(基于KCF改进的算法总结)

KCF目标跟踪算法