1、说明
文章详细叙述了海康摄像头的两种实时显示方法——基于SDK 解码显示和基于数据流回调显示,并且讲述了这在两种显示方法下如何往画面添加字符和图像,最后比较了这两种方法的优劣。文章全程给以详细的程序说明,供各位开发者参考。
2 实时预览
2.1 实时预览模块流程
图中虚线框部分的模块不是必须部分,是与预览模块相关,必须在启动预览后才能调用,这些模块之间是并列的关系,各自完成相应的功能。
2.2 SDK 解码显示
在预览接口 NET_DVR_RealPlay_V40 中预览参数的播放窗口句柄赋成有效句柄,则由 SDK 实现解码功能。在初始化 SDK 和注册设备两步骤后,直接调用启动预览和停止预览接口。
SDK 直接解码显示代码:
#include <stdio.h>
#include <iostream>
#include “Windows.h”
#include “HCNetSDK.h”
#include <time.h>
using namespace std;
typedef HWND (WINAPI *PROCGETCONSOLEWINDOW)();
PROCGETCONSOLEWINDOW GetConsoleWindow;
void CALLBACK g_ExceptionCallBack(DWORD dwType, LONG lUserID, LONG lHandle, void *pUser)
{
char tempbuf[256] = {0};
switch(dwType)
{
case EXCEPTION_RECONNECT:
printf(“----------reconnect--------%d\n”, time(NULL));
break;
default:
break;
}
}
void main() {
NET_DVR_Init();
NET_DVR_SetConnectTime(2000, 1);
NET_DVR_SetReconnect(10000, true);
LONG lUserID;
NET_DVR_DEVICEINFO_V30 struDeviceInfo;
lUserID = NET_DVR_Login_V30(“192.0.0.64”, 8000, “admin”, “12345”, &struDeviceInfo);
if (lUserID < 0)
{
printf(“Login error, %d\n”, NET_DVR_GetLastError());
NET_DVR_Cleanup();
return;
}
NET_DVR_SetExceptionCallBack_V30(0, NULL,g_ExceptionCallBack, NULL);
LONG lRealPlayHandle;
HWND hWnd = GetDlgItem(hWnd, IDC_PIC);
NET_DVR_PREVIEWINFO struPlayInfo = {0};
struPlayInfo.hPlayWnd = hWnd;
struPlayInfo.lChannel = 1;
struPlayInfo.dwStreamType = 0;
struPlayInfo.dwLinkMode = 0;
struPlayInfo.bBlocked = 1;
lRealPlayHandle = NET_DVR_RealPlay_V40(lUserID, &struPlayInfo, NULL, NULL);
if (lRealPlayHandle < 0)
{
printf(“NET_DVR_RealPlay_V40 error\n”);
NET_DVR_Logout(lUserID);
NET_DVR_Cleanup();
return;
}
Sleep(10000);
NET_DVR_StopRealPlay(lRealPlayHandle);
NET_DVR_Logout(lUserID);
NET_DVR_Cleanup();
return;
}
2.3 实时流数据回调显示
实时流数据回调,用户需要在回调函数中自行处理码流数据。用户可以通过设置预览接口 NET_DVR_RealPlay_V40 中预览参数的播放窗口句柄为空值,并通过调用捕获数据的接口(即设置NET_DVR_RealPlay_V40 接口中的回调函数或调用NET_DVR_SetRealDataCallBack、NET_DVR_SetStandardDataCallBack 接口),获取码流数据进行后续解码播放处理。
实时流数据回调代码:
#include <stdio.h>
#include <iostream>
#include “Windows.h”
#include “HCNetSDK.h”
#include “plaympeg4.h”
#include <time.h>
using namespace std;
typedef HWND (WINAPI *PROCGETCONSOLEWINDOW)();
PROCGETCONSOLEWINDOW GetConsoleWindow;
LONG lPort;
HANDLE MainVectorhMutex;
vector<IplImage*> MainImageVector;
void yv12toYUV(char *outYuv, char *inYv12, int width, int height, int widthStep)
{
int col, row;
unsigned int Y, U, V;
int tmp;
int idx;
for (row = 0; row < height; row++)
{
idx = row * widthStep;
int rowptr = row*width;
for (col = 0; col < width; col++)
{
tmp = (row / 2)*(width / 2) + (col / 2);
Y = (unsigned int)inYv12[row*width + col];
U = (unsigned int)inYv12[width*height + width*height / 4 + tmp];
V = (unsigned int)inYv12[width*height + tmp];
outYuv[idx + col * 3] = Y;
outYuv[idx + col * 3 + 1] = U;
outYuv[idx + col * 3 + 2] = V;
}
}
}
void CALLBACK DecCBFunMain(long nPort, char * pBuf, long nSize, FRAME_INFO * pFrameInfo, long nReserved1, long nReserved2)
{
long lFrameType = pFrameInfo->nType;
if (lFrameType == T_YV12)
{
#if USECOLOR
IplImage* pImgYCrCb = cvCreateImage(cvSize(pFrameInfo->nWidth, pFrameInfo->nHeight), 8, 3);
yv12toYUV(pImgYCrCb->imageData, pBuf, pFrameInfo->nWidth, pFrameInfo->nHeight, pImgYCrCb->widthStep);
IplImage* pImg = cvCreateImage(cvSize(pFrameInfo->nWidth, pFrameInfo->nHeight), 8, 3);
cvCvtColor(pImgYCrCb, pImg, CV_YCrCb2RGB);
#else
IplImage* pImg = cvCreateImage(cvSize(pFrameInfo->nWidth, pFrameInfo->nHeight), 8, 1);
memcpy(pImg->imageData, pBuf, pFrameInfo->nWidth*pFrameInfo->nHeight);
#endif
WaitForSingleObject(MainVectorhMutex, INFINITE);
MainImageVector.push_back(pImg);
if (MainImageVector.size() > 5)
{
IplImage *data = *MainImageVector.begin();
MainImageVector.erase(MainImageVector.begin());
cvReleaseImage(&data);
}
ReleaseMutex(MainVectorhMutex);
#if USECOLOR
cvReleaseImage(&pImgYCrCb);
#else
#endif
}
}
void CALLBACK fRealDataCallBackMain(LONG lRealHandle, DWORD dwDataType, BYTE *pBuffer, DWORD dwBufSize, void *pUser)
{
DWORD dRet;
switch (dwDataType)
{
case NET_DVR_SYSHEAD:
if (!PlayM4_GetPort(&nPort))
{
break;
}
if (dwBufSize > 0)
{
if (!PlayM4_OpenStream(nPort, pBuffer, dwBufSize, 1024 * 1024))
{
dRet = PlayM4_GetLastError(nPort);
break;
}
if (!PlayM4_SetDecCallBack(nPort, DecCBFunMain))
{
dRet = PlayM4_GetLastError(nPort);
break;
}
if (!PlayM4_Play(nPort, hWnd))
{
dRet = PlayM4_GetLastError(nPort);
break;
}
}
break;
case NET_DVR_STREAMDATA:
if (dwBufSize > 0 && nPort != -1)
{
BOOL inData = PlayM4_InputData(nPort, pBuffer, dwBufSize);
while (!inData)
{
Sleep(10);
inData = PlayM4_InputData(nPort, pBuffer, dwBufSize);
OutputDebugString(L"PlayM4_InputData failed \n");
}
}
break;
}
}
void CALLBACK g_ExceptionCallBack(DWORD dwType, LONG lUserID, LONG lHandle, void *pUser)
{
char tempbuf[256] = {0};
switch(dwType)
{
case EXCEPTION_RECONNECT:
printf(“----------reconnect--------%d\n”, time(NULL));
break;
default:
break;
}
}
UINT ThreadProc_MainPlay(LPVOID lParam)
{
while (1)
{
while (MainImageVector.size() <= 0)
{
Sleep(20);
continue;
}
WaitForSingleObject(MainVectorhMutex, INFINITE);
IplImage *pImg;
if (MainImageVector.size() > 0)
{
pImg = *(MainImageVector.begin());
}
else
{
return 0;
}
MainImageVector.erase(MainImageVector.begin());
ReleaseMutex(MainVectorhMutex);
hWnd = AfxGetApp()->GetMainWnd()->GetSafeHwnd();
HDC hDC = GetDC(GetDlgItem(hWnd, IDC_PIC));
CRect rect;
GetClientRect(GetDlgItem(hWnd, IDC_PIC), &rect);
CvvImage cvvimg;
cvvimg.CopyOf(pImg);
cvvimg.DrawToHDC(hDC, &rect);
cvReleaseImage(&pImg);
}
return 0;
}
void main() {
NET_DVR_Init();
NET_DVR_SetConnectTime(2000, 1);
NET_DVR_SetReconnect(10000, true);
LONG lUserID;
NET_DVR_DEVICEINFO_V30 struDeviceInfo;
lUserID = NET_DVR_Login_V30(“172.0.0.100”, 8000, “admin”, “12345”, &struDeviceInfo);
if (lUserID < 0)
{
printf(“Login error, %d\n”, NET_DVR_GetLastError());
NET_DVR_Cleanup();
return;
}
NET_DVR_SetExceptionCallBack_V30(0, NULL,g_ExceptionCallBack, NULL);
LONG lRealPlayHandle;
NET_DVR_PREVIEWINFO struPlayInfo = { 0 };
struPlayInfo.hPlayWnd = GetDlgItem(hWnd, IDC_PIC);
struPlayInfo.lChannel = 1;
struPlayInfo.dwStreamType = 0;
struPlayInfo.dwLinkMode = 1;
struPlayInfo.bBlocked = 0;
lRealPlayHandle = NET_DVR_RealPlay_V40(lUserID, &struPlayInfo, fRealDataCallBackMain, NULL);
if (lRealPlayHandle < 0)
{
AfxMessageBox(_T("获取失败"));
}
ProcThread_MainPlay = AfxBeginThread(ThreadProc_MainPlay, this, THREAD_PRIORITY_NORMAL);
Sleep(10000);
NET_DVR_StopRealPlay(lRealPlayHandle);
NET_DVR_Logout(lUserID);
NET_DVR_Cleanup();
return;
}
程序主要思路是,从视频流中解码出图像数据pImg,然后存入图像容器MainImageVector中。在显示线程ThreadProc_MainPlay中再从图像容器MainImageVector中读取出图像数据,往MFC图片控件中显示;为了防止读写冲突,需要设立一读写互斥量MainVectorhMutex来保护MainImageVector。
3 图像字符叠加
很多时候,我们不仅要实时显示监控画面,还想在画面上显示一些字符、图像信息,比如在进行行人识别、跟踪时,我们往往需要在画面上框出目标,因此图像字符有很大的应用。
3.1 基于实时流数据的字符叠加
基于实时流数据回调显示方法的字符叠加方法比较简单,直接在显示前往图像中绘制字符、图像即可(用opencv实现),即:
UINT ThreadProc_MainPlay(LPVOID lParam)
{
while (1)
{
while (MainImageVector.size() <= 0)
{
Sleep(20);
continue;
}
WaitForSingleObject(MainVectorhMutex, INFINITE);
IplImage *pImg;
if (MainImageVector.size() > 0)
{
pImg = *(MainImageVector.begin());
}
else
{
return 0;
}
MainImageVector.erase(MainImageVector.begin());
ReleaseMutex(MainVectorhMutex);
cv::rectangle(Mat(pImg), CvRect(0,0,60,60), CV_RGB(255, 255, 0), 4, 8);
hWnd = AfxGetApp()->GetMainWnd()->GetSafeHwnd();
HDC hDC = GetDC(GetDlgItem(hWnd, IDC_PIC));
CRect rect;
GetClientRect(GetDlgItem(hWnd, IDC_PIC), &rect);
CvvImage cvvimg;
cvvimg.CopyOf(pImg);
cvvimg.DrawToHDC(hDC, &rect);
cvReleaseImage(&pImg);
}
return 0;
}
3.2 基于SDK解码的字符叠加
而基于SDK解码的方式,由于我没有直接操作图像,所以不能往图像上绘制东西。实际海康开发人员早就意识到了这一点,在SDK中留有叠加字符图像接口 NET_DVR_RigisterDrawFun。该接口主要完成注册回调函数,获得当前表面的设备上下文(Device Context,DC) 。用户可以在这个 DC 上画图或写字,就好像在窗口的客户区 DC 上绘图,但这个 DC 不是窗口客户区的 DC,而是播放器 DirectDraw里的 Off-Screen 表面的 DC。
我们在NET_DVR_RealPlay_V40函数后面添实现NET_DVR_RigisterDrawFun接口即可:
void main() {
//---------------------------------------
// 初始化
NET_DVR_Init();
...
lRealPlayHandle = NET_DVR_RealPlay_V40(lUserID, &struPlayInfo, fRealDataCallBackMain, NULL);
if (lRealPlayHandle < 0)
{
AfxMessageBox(_T("获取失败"));
}
//注册绘制回调函数
if (!NET_DVR_RigisterDrawFun(lRealPlayHandle, fDrawFun, NULL))
{
AfxMessageBox(_T("绘图失败"));
}
...
}
// 绘图回调函数
void CALLBACK fDrawFun(LONG lRealHandle, HDC hDc, DWORD dwUser)
{
CDC dc;
dc.Attach(hDc);
CPen pen; //生成画笔,黄色
pen.CreatePen(PS_SOLID, 2, RGB(255, 255, 0));
CPen* pOldPen = (CPen*)dc->SelectObject(&pen);
dc->SelectStockObject(NULL_BRUSH);//选入空刷子
dc->Rectangle(CRect(0, 0, 60, 60)); //在左上角绘制一60*60的黄色正方形框
}
4 总结
在我们的实验过程中,发现基于数据流回调的方法由于需要用户自行解码、绘制、显示,会占用电脑太多CPU,造成很严重的卡帧,并且由于读取不及时,造成显示延后,不顺畅等问题。而基于SDK解码的方式,毕竟这是人家公司提供的开发包,优化做得非常好,显示得非常顺畅和及时,所以建议大家用SDK解码的方式。
经过测试,终于知道基于数据流的方法延迟问题出现在哪里了,其实因为用的是debug版本,把项目改成release版本就流畅好多了。
参考资料
1、海康摄像头 设备(IPC)网络 SDK 编程指南:http://www.hikvision.com/cn/download_more_570.html
2、OpenCV+海康威视摄像头的实时读取:http://blog.csdn.net/lonelyrains/article/details/50350052
本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)