#include "cameraplayer.h" #include #include #include "LHQLogAPI.h" CameraPlayer::CameraPlayer(QObject *parent) : QObject(parent) { // qDebug() << "主线程ID:" << QThread::currentThreadId(); m_yuvQueue = new RingQueue(10); m_yuvQueue->setDefaultValue(nullptr); m_player = new PlayerGLWidget(); /* 连接信号和槽 */ connect(&m_frameTimer, &QTimer::timeout, this, &CameraPlayer::do_updateFrame); } CameraPlayer::~CameraPlayer() { stopRealPlay(); /* 退出登录 */ NET_DVR_Logout(m_loginID); /* 释放SDK资源,重复清除资源会怎么样 */ NET_DVR_Cleanup(); } /* 设置摄像机信息 */ bool CameraPlayer::initCamera(QString cameraIP, int cameraPort, QString cameraUser, QString cameraPwd) { /* 初始化海康摄像头SDK */ if(!NET_DVR_Init()) { LH_WRITE_ERROR("NET_DVR_Init 失败!"); return false; } /* 设置超时时间 */ NET_DVR_SetConnectTime(3000,3); NET_DVR_SetReconnect(); /* 设置异常消息回调函数 */ #ifdef Q_OS_LINUX NET_DVR_SetExceptionCallBack_V30(0, nullptr, exceptionCallBack, nullptr); #endif /* 用户登录,同步登陆 */ NET_DVR_USER_LOGIN_INFO loginInfo = {0}; /* 返回的设备信息,包括序列号、设备类型、通道号等 */ NET_DVR_DEVICEINFO_V40 deviceInfo = {0}; strncpy(loginInfo.sDeviceAddress, cameraIP.toStdString().c_str(), cameraIP.count()); strncpy(loginInfo.sUserName, cameraUser.toStdString().c_str(), cameraUser.count()); strncpy(loginInfo.sPassword, cameraPwd.toStdString().c_str(), cameraPwd.count()); loginInfo.wPort = cameraPort; /* 端口 */ loginInfo.bUseAsynLogin = 0; /* 0是同步登陆, 1是异步登录 */ LH_WRITE_LOG_DEBUG(QString("摄像机登录信息: %1:%2:%3:%4") .arg(loginInfo.sDeviceAddress) .arg(loginInfo.wPort) .arg(loginInfo.sUserName) .arg(loginInfo.sPassword)); /* 这里是同步登陆,返回值就是设备操作ID,异步登录需要在回调函数中取出返回的ID */ m_loginID = NET_DVR_Login_V40(&loginInfo, &deviceInfo); if(m_loginID < 0) { LH_WRITE_ERROR(QString("登录摄像机 %1:%2 失败,错误代码: %3").arg(cameraIP).arg(cameraPort).arg(NET_DVR_GetLastError())); NET_DVR_Cleanup(); /* 需不需要在这里清除资源? */ return false; } /* 取出通道号 */ m_camInfo.AChannelNum = 0; m_camInfo.AChannelStart = 0; m_camInfo.DChannelNum = 0; m_camInfo.DChannelStart = 0; m_camInfo.AChannelNum = deviceInfo.struDeviceV30.byChanNum; /* 模拟通道个数 */ m_camInfo.AChannelStart = deviceInfo.struDeviceV30.byStartChan; /* 模拟通道的起始通道号 */ unsigned char h8 = deviceInfo.struDeviceV30.byIPChanNum; /* 数字通道的最大个数需要组合起来 */ unsigned char l8 = deviceInfo.struDeviceV30.byHighDChanNum; m_camInfo.DChannelNum = h8 * 256 + l8; m_camInfo.DChannelStart = deviceInfo.struDeviceV30.byStartDChan;/* 数字通道起始号 */ LH_WRITE_LOG_DEBUG(QString("最大模拟通道号数:%1,起始号:%2").arg(m_camInfo.AChannelNum).arg(m_camInfo.AChannelStart)); LH_WRITE_LOG_DEBUG(QString("最大数字通道号数:%1,起始号:%2").arg(m_camInfo.DChannelNum).arg(m_camInfo.DChannelStart)); LH_WRITE_LOG("****** HK SDK初始化完成! ******"); return true; } /** * @brief 实时播放监控,视频流在回调函数中 * * @param Channel * @return true * @return false */ bool CameraPlayer::realPlay(int channel) { /* 启动预览并设置回调数据流 */ NET_DVR_PREVIEWINFO playInfo = {0}; playInfo.hPlayWnd = 0; //需要SDK解码时句柄设为有效值,仅取流不解码时可设为空 playInfo.lChannel = channel; //预览通道号 playInfo.dwStreamType = 0; //0-主码流,1-子码流,2-码流3,3-码流4,以此类推 playInfo.dwLinkMode = 0; //0- TCP方式,1- UDP方式,2- 多播方式,3- RTP方式,4-RTP/RTSP,5-RSTP/HTTP playInfo.bBlocked = 1; //0- 非阻塞取流,1- 阻塞取流 /* 设置回调函数 */ m_realPlayHandle = NET_DVR_RealPlay_V40(m_loginID, &playInfo, realDataCallBack, this); if (m_realPlayHandle < 0) { LH_WRITE_ERROR(QString("NET_DVR_RealPlay_V40()调用错误,错误代码:%1").arg(NET_DVR_GetLastError())); return false; } /* 设置回调函数 */ // NET_DVR_SetStandardDataCallBack(m_realPlayHandle, realDataCallBackStandard, 0); // NET_DVR_SetRealDataCallBackEx(m_realPlayHandle, realDataCallBack, nullptr); return true; } /* 关闭预览 */ void CameraPlayer::stopRealPlay() { //关闭预览 NET_DVR_StopRealPlay(m_realPlayHandle); LH_WRITE_LOG("实时获取数据结束"); } /* 开始播放预览 */ void CameraPlayer::startPlay() { if(m_frameRate == 0) { LH_WRITE_ERROR("帧率为0,无法播放"); return; } /* 开启定时器 */ m_frameTimer.setTimerType(Qt::PreciseTimer); m_frameTimer.setInterval(1000 / m_frameRate); m_frameTimer.start(); } /* 设置播放窗口父指针 */ void CameraPlayer::setPlayerParent(QWidget* playWnd) { m_player->setParent(playWnd); } /* 设置播放窗口大小 */ void CameraPlayer::setPlayWndSize(int width, int height) { m_player->resize(width, height); } /* 更新一帧数据 */ void CameraPlayer::do_updateFrame() { /* 获取一帧 */ if(m_yuvQueue->isEmpty()) { LH_WRITE_LOG("环形队列为空,无法更新一帧数据"); return; } /* 以非阻塞的方式先获取一帧数据,获取成功后再出队 */ auto one = m_yuvQueue->front_pop_NoBlock(); if(one != nullptr) { m_player->updateFrame(*one); delete one; } } /** * @brief 异常回调函数 * * @param type 异常类型,目前就知道以下几个 * @param userID * @param handle * @param user */ void CameraPlayer::exceptionCallBack(unsigned int type, int userID,int handle,void* user) { /* 异常代码是16进制的 */ LH_WRITE_ERROR(QString("调用了异常回调函数,异常代码: %1").arg(type)); switch(type) { /* 报警上传时网络异常 */ case EXCEPTION_ALARM: LH_WRITE_ERROR("报警上传时网络异常!!!"); //TODO: 关闭报警上传 break; /* 网络预览时异常 */ case EXCEPTION_PREVIEW: LH_WRITE_ERROR("网络预览时网络异常!!!"); //TODO: 关闭网络预览 break; /* 预览时重连 */ case EXCEPTION_RECONNECT: break; case EXCEPTION_PLAYBACK: LH_WRITE_ERROR("回放异常!"); break; default: break; } } /** * @brief 实时预览回调函数,没有解码的数据流,这函数会运行在子线程中 * * @param realHandle 操作句柄 * @param dataType 数据类型 * @param pBuffer 数据指针 * @param bufSize 数据大小 * @param user */ void CameraPlayer::realDataCallBack(LONG realHandle, DWORD dataType, BYTE *pBuffer,DWORD bufSize,void* user) { // LH_WRITE_LOG_DEBUG(QString("realDataCallBack接收到了数据,数据大小为:%1").arg(bufSize)); // LH_WRITE_LOG_DEBUG(QString("realDataCallBack所在的线程: %1").arg(QThread::currentThreadId())); // qDebug() << "realDataCallBack所在的线程:" << QThread::currentThreadId(); /* 转换传入的指针,是这个类本身 */ auto cameraPlayer = static_cast(user); LONG playPort = cameraPlayer->m_playPort; PlayM4_GetPort(&realHandle); switch (dataType) { /* 系统头数据 */ case NET_DVR_SYSHEAD: LH_WRITE_LOG_DEBUG("系统头数据"); /* 获取播放库未使用的通道号,最多貌似可以有500个 */ if(!PlayM4_GetPort(&playPort)) { LH_WRITE_ERROR("获取播放库未使用的通道号失败"); break; } /* 打开流 */ if(bufSize > 0) { cameraPlayer->m_playPort = playPort; /* 最后一个参数是 设置播放器中存放数据流的缓冲区大小,不能太大也不能太小 */ if(!PlayM4_OpenStream(playPort, pBuffer, bufSize, 1024 * 1024)) { auto ret = PlayM4_GetLastError(playPort); LH_WRITE_ERROR(QString("打开流失败,错误代码: %1").arg(ret)); break; } /* 设置解码回调函数 */ bool ret2 = PlayM4_SetDecCallBackMend(playPort, DecCallBack, cameraPlayer); if(!ret2) { auto ret3 = PlayM4_GetLastError(playPort); LH_WRITE_ERROR(QString("设置解码回调函数失败,错误代码: %1").arg(ret3)); break; } /* 打开视频解码,不传入播放窗口 */ if(!PlayM4_Play(playPort, 0)) { auto ret4 = PlayM4_GetLastError(playPort); LH_WRITE_ERROR(QString("打开视频解码失败,错误代码: %1").arg(ret4)); break; } /* 打开音频解码,需要码流是复合流 */ if(!PlayM4_PlaySound(playPort)) { auto ret5 = PlayM4_GetLastError(playPort); LH_WRITE_ERROR(QString("打开音频解码失败,错误代码: %1").arg(ret5)); break; } }else { LH_WRITE_ERROR("系统头数据大小为0"); } break; /* 视频流数据(包括复合流和音视频分开的视频流数据) */ case NET_DVR_STREAMDATA: // LH_WRITE_LOG_DEBUG("音视频复合流数据"); /* 解码数据 */ if(bufSize > 0 && playPort != -1) { auto ret = PlayM4_InputData(playPort, pBuffer, bufSize); if(!ret) { LH_WRITE_ERROR("解码数据失败"); break; } } break; /* 其他数据 */ case NET_DVR_AUDIOSTREAMDATA: LH_WRITE_LOG_DEBUG("音频流数据"); break; case NET_DVR_PRIVATE_DATA: LH_WRITE_LOG_DEBUG("私有数据"); break; default: LH_WRITE_LOG_DEBUG("其他数据"); break; } } /* 标准数据流的预览回调函数 */ void CameraPlayer::realDataCallBackStandard(LONG realHandle, DWORD dataType, BYTE *pBuffer,DWORD bufSize,DWORD user) { LH_WRITE_LOG_DEBUG(QString("realDataCallBackStandard接收到了数据,数据大小为:%1").arg(bufSize)); switch(dataType) { case NET_DVR_SYSHEAD: LH_WRITE_LOG("系统头数据"); break; case NET_DVR_STREAMDATA: LH_WRITE_LOG("音视频流复合数据"); break; case NET_DVR_STD_VIDEODATA: LH_WRITE_LOG("标准视频流数据"); break; case NET_DVR_STD_AUDIODATA: LH_WRITE_LOG("标准音频流数据"); break; default: LH_WRITE_LOG("其他数据"); break; } } /** * @brief 解码回调函数,这个函数会运行在一个新的子线程中 * * @param nPort 播放端口号 * @param pBuf 需要解码的数据 * @param nSize 数据大小 * @param pFrameInfo 流格式 * @param nUser 用户数据指针 * @param nReserved2 */ void CameraPlayer::DecCallBack(int nPort,char * pBuf,int nSize,FRAME_INFO * pFrameInfo, void* nUser,int nReserved2) { auto player = static_cast(nUser); // LH_WRITE_LOG(QString("解码回调函数,解码通道号: %1 %2").arg(nPort).arg(player->m_playPort)); // qDebug() << "DecCallBack所在的线程:" << QThread::currentThreadId(); /* 获取视频帧率,如果是音频,就是采样率 */ int frameRate = pFrameInfo->nFrameRate; /* 视频数据是 T_YV12 音频数据是 T_AUDIO16 */ if(pFrameInfo->nType == T_YV12) { /* 获取视频帧率 */ if(player->m_frameRate == 0) { player->m_frameRate = frameRate; } int width = pFrameInfo->nWidth; int height = pFrameInfo->nHeight; LH_WRITE_LOG_DEBUG(QString("视频宽高: %1 x %2, 视频帧率: %3").arg(width).arg(height).arg(frameRate)); /* 截图标志位,保存图片 */ /* 转换成yuv,保存到环形队列中 */ Image_YUV420* image = new Image_YUV420(); player->YV12ToYUV420((unsigned char*)pBuf, *image, width, height); /* 判断环形队列是否满了,满了就出队一个 */ if(player->m_yuvQueue->isFull()) { LH_WRITE_LOG_DEBUG("环形队列满了,出队一个"); auto one = player->m_yuvQueue->front_pop(); delete one; } player->m_yuvQueue->push_NoBlock(image); } else if(pFrameInfo->nType == T_AUDIO16) { // LH_WRITE_LOG("音频数据"); } else { LH_WRITE_LOG("其他数据"); } } /* YV12转RGB888 */ void CameraPlayer::YV12ToRGB888(unsigned char *pYV12Data, unsigned char *pRGB24Data, int width, int height) { int ySize = width * height; int uvSize = ySize / 4; const uint8_t* yData = pYV12Data; const uint8_t* vData = pYV12Data + ySize; const uint8_t* uData = pYV12Data + ySize + uvSize; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { int yIndex = y * width + x; int uvIndex = (y / 2) * (width / 2) + (x / 2); uint8_t Y = yData[yIndex]; uint8_t U = uData[uvIndex]; uint8_t V = vData[uvIndex]; // YUV 转 RGB int R = Y + 1.403 * (V - 128); int G = Y - 0.344 * (U - 128) - 0.714 * (V - 128); int B = Y + 1.770 * (U - 128); // 限制范围 [0, 255] R = std::min(std::max(R, 0), 255); G = std::min(std::max(G, 0), 255); B = std::min(std::max(B, 0), 255); // 存储 RGB 数据 int rgbIndex = yIndex * 3; pRGB24Data[rgbIndex] = static_cast(R); pRGB24Data[rgbIndex + 1] = static_cast(G); pRGB24Data[rgbIndex + 2] = static_cast(B); } } } /* YV12转YUV420 */ // void CameraPlayer::YV12ToYUV420(unsigned char *pYV12Data, Image_YUV420& yuvData, int width, int height) // { // int y_size = width * height; // int uv_size = y_size / 4; // // 分配 YUV 数据的空间 // yuvData.yData.resize(y_size); // yuvData.uData.resize(uv_size); // yuvData.vData.resize(uv_size); // // 提取 Y、V、U 分量 // const uint8_t* y = vecYUV.data(); // const uint8_t* v = vecYUV.data() + y_size; // const uint8_t* u = vecYUV.data() + y_size + uv_size; // // 复制 Y 分量 // std::memcpy(vecYUV.data(), y, y_size); // // 复制 U 分量 // uint8_t* yuv_u = vecYUV.data() + y_size; // for (int i = 0; i < uv_size; ++i) { // yuv_u[i] = u[i]; // } // // 复制 V 分量 // uint8_t* yuv_v = vecYUV.data() + y_size + uv_size; // for (int i = 0; i < uv_size; ++i) { // yuv_v[i] = v[i]; // } // } void CameraPlayer::YV12ToYUV420(unsigned char *pYV12Data, Image_YUV420& yuvData, int width, int height) { int y_size = width * height; int uv_size = y_size / 4; // 分配 YUV 数据的空间 yuvData.yData.resize(y_size); yuvData.uData.resize(uv_size); yuvData.vData.resize(uv_size); // 提取 Y、V、U 分量 const uint8_t* y = pYV12Data; const uint8_t* v = pYV12Data + y_size; const uint8_t* u = pYV12Data + y_size + uv_size; // 复制 Y 分量 // std::memcpy(yuvData.yData.data(), y, y_size); yuvData.yData = QByteArray((const char*)y, y_size); // 复制 U 分量 // std::memcpy(yuvData.uData.data(), u, uv_size); yuvData.uData.append((const char*)u, uv_size); // 复制 V 分量 // std::memcpy(yuvData.vData.data(), v, uv_size); yuvData.vData.append((const char*)v, uv_size); /* 设置宽和高 */ yuvData.width = width; yuvData.height = height; }