You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2187 lines
68 KiB

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

#include "QVideoPlayer.h"
#include "VideoStream.h"
#include "RawFileStream.h"
#include "AviFileStream.h"
#include "GDFileStream.h"
#include "QRawFileConfig.h"
#include "Arith_Tracker.h"
#include <fstream>
#include <QHeaderView>
#include <debugExport.h>
#include <qmessagebox.h>
static void QPMap16BitTo8Bit(unsigned short* psh16BitData, long lDataLen, BYTE* pby8BitData)
{
if (psh16BitData == NULL || pby8BitData == NULL || lDataLen <= 0)
{
return;
}
//指向直方图的数据指针
int* pHist = new int[65536];
memset(pHist, 0, 65536 * sizeof(int));
int i = 0;
for (i = 0; i < lDataLen; i++)
{
pHist[psh16BitData[i]]++;
}
//设置阈值大小为: AreaSigma*图象大小/100
int nSigma = int(0.05 * lDataLen);
int nSum = 0;
int nMin = 0;
int nMax = 0;
//求映射的最大最小值
for (i = 0; i < 65536; i++)
{
nSum += pHist[i];
if (nSum >= nSigma)
{
nMin = i;
break;
}
}
nSum = 0;
for (i = 65535; i >= 0; i--)
{
nSum += pHist[i];
if (nSum >= nSigma)
{
nMax = i;
break;
}
}
//计算对比度亮度
float K = (float)(256.0 / (nMax - nMin + 1));
if (K > 1)
{
K = 1;
}
float C = (float)(128 - K * (nMax + nMin) / 2);
//20190601调光
SINT32 nBrightMean = 118;
SINT32 nB = 50;
K = (FLOAT32)(200.f / (nMax - nMin + nB));
C = (FLOAT32)(nBrightMean - K * (nMin + nMax) / 2);
//FLOAT32 g_iContrast = 10; //可调0~100
//FLOAT32 g_iBright = 10; //可调0~100
K = float(fmax(0.0f, K));
//K = max(0.0f, min(K, 3.0f));
//图像映射
for (i = 0; i < lDataLen; i++)
{
int nValue = (int)(K * psh16BitData[i] + C);
if (nValue < 0)
{
pby8BitData[i] = 0;
}
else if (nValue > 255)
{
pby8BitData[i] = 255;
}
else
{
pby8BitData[i] = nValue;
}
}
delete[]pHist;
}
QVideoPlayer::QVideoPlayer(QObject *parent)
: QObject(parent)
{
m_VideoStream = NULL;
}
QVideoPlayer::QVideoPlayer(QObject *parent, vector<QImageViewer*> ImageViwerVec,QPlayerControl* pPlayControl, QArithModule* pArithModule)
{
m_ImageViewerVec = ImageViwerVec;
this->m_ImageViewer = ImageViwerVec[0]; //约定第0个显示器为主显示器
this->m_PlayControl = pPlayControl;
this->timer = new QTimer(this);
this->config = new QRawFileConfig();
// 自动播放
connect(timer,&QTimer::timeout,this,&QVideoPlayer::Forward);
// 算法模块
m_ArithRunner = pArithModule;
mArithBusy = false;
// 播放控制
connect(m_PlayControl,&QPlayerControl::sig_Play,this,&QVideoPlayer::Play);
connect(m_PlayControl,&QPlayerControl::sig_Pause,this,&QVideoPlayer::Pause);
connect(m_PlayControl,&QPlayerControl::sig_Stop,this,&QVideoPlayer::Stop);
connect(m_PlayControl,&QPlayerControl::sig_Next,this,&QVideoPlayer::NextFrame);
connect(m_PlayControl,&QPlayerControl::sig_Pre,this,&QVideoPlayer::LastFrame);
connect(m_PlayControl,&QPlayerControl::sig_JumpToFrame,this,&QVideoPlayer::Jump);
connect(m_PlayControl, &QPlayerControl::sig_Recycle, this, &QVideoPlayer::SetRecyle);
connect(m_PlayControl, &QPlayerControl::sig_SkipForward, this, &QVideoPlayer::SkipForward);
connect(m_PlayControl, &QPlayerControl::sig_SkipBackward, this, &QVideoPlayer::SkipBackward);
// 绑定播放与算法线程处理
connect(this, &QVideoPlayer::sig_PlayOneFrame, m_ArithRunner, &QArithModule::ArithRun);
// 算法线程运行完成后发送信号,对结果进行处理
connect(m_ArithRunner, &QArithModule::resultReady, this, &QVideoPlayer::HandleArithDone); //播放器处理判断是否处理完成
// 绑定鼠标点选
connect(m_ImageViewer, &QImageViewer::sig_clcik, m_ArithRunner, &QArithModule::startTrack);
connect(m_ImageViewer, &QImageViewer::sig_rightClick, m_ArithRunner, &QArithModule::cancelTrack);
//GenColorTable(1000);
bRecordingFlag = false;
}
QVideoPlayer::~QVideoPlayer()
{
}
void QVideoPlayer::UpdateInfo()
{
// 绘制帧图像
DrawCurrentFrame();
// 绘制算法结果
DrawArithResult();
// 绘制其他标记
DrawPlayerInfo();
// 记录
if (bRecordingFlag)
{
addScenToVideo();
}
}
void QVideoPlayer::HandleArithDone()
{
// 标记算法空闲
mArithBusy = false;
// 更新界面
UpdateInfo();
}
void QVideoPlayer::SetTableWidget(QTableWidget* pTableWidget)
{
m_TableWidget = pTableWidget;
}
void QVideoPlayer::SetPlotView(QPlotView* pPlotView)
{
m_PlotView = pPlotView;
}
void QVideoPlayer::SetArithResourceView(QResourceView* pResourceView)
{
m_ResourceView = pResourceView;
}
void QVideoPlayer::SetArithInfoTextWindow(QTextBrowser* textWin)
{
m_TextInfo = textWin;
}
void QVideoPlayer::SetCompassInfoWidget(QCompassWidget* compassView)
{
m_CompassWidget = compassView;
}
void QVideoPlayer::SetPanImageWidget(QPanView* panViewer)
{
m_panViewer = panViewer;
}
// 播放视频
void QVideoPlayer::Play()
{
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
if (p_VideoStream)
{
p_VideoStream->Play();
}
}
timer->start(1);
this->m_ImageViewerVec[0]->setFocus();
}
void QVideoPlayer::SetRecyle(bool state)
{
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
if (p_VideoStream)
{
p_VideoStream->EnableCyclePlay(state);
}
}
}
// 暂停播放
void QVideoPlayer::Pause()
{
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
p_VideoStream->Pause();
}
timer->stop();
}
//
void QVideoPlayer::PlayorPause()
{
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
if (p_VideoStream->GetPlayState() == PS_PLAY)
{
Pause();
}
else if (p_VideoStream->GetPlayState() == PS_PAUSE)
{
Play();
}
}
}
// 停止播放
void QVideoPlayer::Stop()
{
for(int i = 0;i < m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
if(p_VideoStream != nullptr)
p_VideoStream->Stop();
}
timer->stop();
}
// 前进
void QVideoPlayer::Forward()
{
// 算法如果处于busy此时不应继续下一帧直接等待下一帧直到处理完成。
// 主线程不应使用任何阻塞式的等待如QMutex否则界面会卡顿教训
if (mArithBusy)
{
return;
}
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
if(p_VideoStream != nullptr)
p_VideoStream->Forward();
}
ProcessFrame();
}
// 后退
void QVideoPlayer::Backward()
{
// 算法如果处于busy此时不应继续下一帧直接等待下一帧直到处理完成。
if (mArithBusy)
{
return;
}
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
p_VideoStream->Backward();
}
ProcessFrame();
}
// 跳帧
void QVideoPlayer::Jump(int ind)
{
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
p_VideoStream->Goto(ind);
}
// 停止自动播放
Pause();
// 仅拖动进度条,只播放视频,不执行算法
//DrawCurrentFrame();
ProcessFrame();
}
// 下一帧
void QVideoPlayer::NextFrame()
{
if (!m_VideoStream)
{
return;
}
// 停止自动播放
Pause();
Forward();
}
// 上一帧
void QVideoPlayer::LastFrame()
{
if (!m_VideoStream)
{
return;
}
// 停止自动播放
Pause();
Backward();
}
// 设置快进倍数
void QVideoPlayer::SkipForward()
{
if (!m_VideoStream)
{
return;
}
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
p_VideoStream->m_skipFrameCount++;
}
//PrintText(m_ImageViewer, 20, 100, QString::number(m_VideoStream->m_skipFrameCount));
}
// 设置快退倍数
void QVideoPlayer::SkipBackward()
{
if (!m_VideoStream)
{
return;
}
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
p_VideoStream->m_skipFrameCount--;
}
//PrintText(m_ImageViewer, 20, 100, QString::number(m_VideoStream->m_skipFrameCount));
}
void QVideoPlayer::SetStreams(std::vector<VideoStream*> pStreamList)
{
m_VideoStreamVec = pStreamList;
}
void QVideoPlayer::SetImageViewers(std::vector<QImageViewer*> pImageViwerList)
{
m_ImageViewerVec = pImageViwerList;
}
void QVideoPlayer::Open(QString lpszFileName)
{
// 关闭已打开的视频流
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
if (p_VideoStream == nullptr)
{
continue;
}
if(p_VideoStream->IsOpen())
{
delete p_VideoStream;//释放视频流
p_VideoStream = nullptr;
}
}
m_ArithRunner->setEnable(false);// 算法停止
Stop();//播放器停止
m_PlayControl->setStream(NULL);
m_ArithRunner->Log_Off();//日志关闭
m_VideoStreamVec.clear();
// 打开一路,并加入视频流组
m_VideoStreamVec.push_back(OpenOneStream(lpszFileName));
m_VideoStream = m_VideoStreamVec[0];
// 保存文件名
m_FileFullPath = lpszFileName;
// 播放控制:设置视频流 进度条绑定第一个流
m_PlayControl->setStream(m_VideoStreamVec[0]);
m_PlayControl->setUpdatesEnabled(true);
// 清空视场
m_ImageViewer->InitView();
// 初始化日志
m_ArithRunner->Log_On(m_FileFullPath);
// 开始播放
Play();
m_ArithRunner->setEnable(true);// 算法开始执行
m_ArithRunner->m_streamVec = m_VideoStreamVec;//设置算法运行的数据流
if (m_VideoStreamVec[0] != nullptr)
{
m_ArithRunner->ArithInit(m_VideoStreamVec);//算法初始化
}
}
VideoStream* QVideoPlayer::OpenOneStream(QString lpszFileName)
{
// 打开一路
string extName = GetVideoFileExt(lpszFileName.toStdString());
VideoStream* pVid = NULL;
if(extName == "avi" || extName == "mp4" || extName == "MP4" )
{
pVid = new AviFileStream();
}
else if (extName == "GD")
{
pVid = new GDFileStream();
}
else if(extName == "raw" || extName == "xraw" || extName == "rawx" || extName == "yuv")
{
// 在当前路径下寻找raw文件默认配置
int first = lpszFileName.lastIndexOf("/");
QString fileDir = lpszFileName.left(first);
config->readDefaultSetings(fileDir);
bool reply = config->exec();//阻塞
if(reply == false)
{
return nullptr;
}
pVid = new RawFileStream(config->GetParaConfig());
// 例外
if (extName == "xraw" || extName == "rawx")
{
pVid->SetStreamType(eDSST_XRAW);
}
config->saveDefaultSetings(fileDir);
}
else
{
return nullptr;
}
// 打开视频
//m_VideoStream->Open(lpszFileName.toStdString());
pVid->Open(std::string(lpszFileName.toLocal8Bit()));
return pVid;
}
void QVideoPlayer::ProcessFrame()
{
// 算法置于忙
mArithBusy = true;
// 发出播放一帧信号,通知算法运行
emit sig_PlayOneFrame();
}
cv::Mat QVideoPlayer::getFrame(VideoStream* pStream)
{
cv::Mat varImg;
if (!pStream) {
return varImg;
}
BYTE* pData = pStream->GetFrameImageBuffer();
if (pData == nullptr)
{
return varImg;
}
int nWidth = pStream->GetImageWidth();
int nHeight = pStream->GetImageHeight();
PIXEL_TYPE type = pStream->GetPixelType();
if (type == PT_RGB8)
{
varImg = cv::Mat(nHeight, nWidth, CV_8UC1, pData);
}
else if (type == PT_RGB24)
{
varImg = cv::Mat(nHeight, nWidth, CV_8UC3, pData);
if (pStream->GetStreamType() == eDSST_RAW)
{
cv::cvtColor(varImg, varImg, COLOR_BGR2RGB);
}
}
else if (type == PT_Y16DATA)
{
varImg = cv::Mat(nHeight, nWidth, CV_16UC1, pData);
///*> 自定义调光以8位显示 */
//cv::Mat varImg1(nHeight, nWidth, CV_8UC1);
//QPMap16BitTo8Bit((unsigned short*)pData, nHeight * nWidth, varImg1.data);
//varImg = varImg1.clone();
}
else if (type == PT_UY16DATA)
{
varImg = cv::Mat(nHeight, nWidth, CV_16UC1, pData);
}
else if (type == PT_RGB32)
{
varImg = cv::Mat(nHeight, nWidth, CV_8UC4, pData);
}
else if (type == PT_YUV_UYVY)
{
cv::Mat src(nHeight, nWidth, CV_8UC2, pData);
cv::cvtColor(src, varImg, cv::COLOR_YUV2BGR_UYVY);
//cv::Mat varImg1(nHeight, nWidth, CV_8UC1);
//for (int i = 0; i < nHeight * nWidth; i++)
//{
// varImg1.data[i] = pData[2 * i + 1];
//}
//varImg = varImg1.clone();
}
else if (type == PT_YUV_NV12)
{
cv::Mat src(nHeight * 1.5, nWidth, CV_8UC1, pData);
cv::cvtColor(src, varImg, cv::COLOR_YUV2BGR_NV12);
}
return varImg;
}
void QVideoPlayer::DrawCurrentFrame()
{
for(int i = 0;i<m_VideoStreamVec.size();i++)
{
auto p_VideoStream = m_VideoStreamVec[i];
auto p_ImageViewer = m_ImageViewerVec[i];
if (p_VideoStream == nullptr)
{
continue;
}
BYTE* pData = p_VideoStream->GetFrameImageBuffer();
if (pData == nullptr)
{
return;
}
int nWidth = p_VideoStream->GetImageWidth();
int nHeight = p_VideoStream->GetImageHeight();
PIXEL_TYPE type = p_VideoStream->GetPixelType();
cv::Mat mat = getFrame(p_VideoStream);
p_ImageViewer->DrawCVImage(mat);
}
}
void QVideoPlayer::DrawArithResult()
{
// 未开启任何算法,不执行绘制
if(nArithVer == 0 || m_VideoStream == nullptr)
{
return;
}
// 更新指南针
UpdateCompass(m_stInputPara.stServoInfo.fServoAz, m_stInputPara.stServoInfo.fServoPt);
// 打印信息
PrintInfo();
// 表格
PrintTableInfo();
// 导引信息
DrawGuideRect();
// 绘制单帧检测(外部传入)
if (bEnableDrawFrameDetect)
{
DrawFrameDetectObjs();
//绘制小目标极值点
//DrawDSTPoint(m_ImageViewer);
}
// 绘制跟踪阶段单帧检测(跟踪模块内部检测)
//if (bEnableDrawTrackDet)
//{
// //DrawFrameRegionDetectObjs();
//}
// 绘制管道告警目标
DrawAlarmTarget();
// 绘制管道跟踪信息
DrawTrackersInfo();
// 绘制管道轨迹
DrawPipeAglInfo();
// 绘制引导、视场外锁定信息
DrawGuideRect();
// 扫描模式下更新全景图
if (m_stOutput.nSysMode == GLB_SYS_FSCAN)
{
UpdatePanImage();
}
// 绘制siamRPN
DrawAITrackerInfo();
}
void QVideoPlayer::DrawDSPResult()
{
}
void QVideoPlayer::DrawPlayerInfo()
{
// 录像
QGraphicsScene* scene = m_ImageViewer->imgScene;
if (bRecordingFlag)
{
if(m_VideoStream->GetCurrentFrameIndex() % 10 < 5)
scene->addEllipse(QRectF(-20, 0, 10, 10), QPen(QColor(255, 0, 0)),QBrush(QColor(255, 0, 0),Qt::SolidPattern));
}
}
void QVideoPlayer::PrintText(QImageViewer* view, int x, int y, QString str)
{
QGraphicsScene* scene = view->imgScene;
auto text = scene->addSimpleText(str);
text->setPos(x, y);
QFont ft; ft.setPointSize(4); ft.setBold(1);
text->setBrush(QBrush(QColor(255,255,0)));
text->setFont(ft);
}
void QVideoPlayer::PrintText(QImageViewer* view, int x, int y, QString str, QBrush brush)
{
QGraphicsScene* scene = view->imgScene;
auto text = scene->addSimpleText(str);
text->setPos(x, y);
QFont ft; ft.setPointSize(7); ft.setBold(1);
text->setBrush(brush);
text->setFont(ft);
}
void QVideoPlayer::DrawArtRect(QImageViewer* view, QPen pen, QRect rect, QString str, bool bCenter,int minSize = 10,QColor fontColor = QColor(255,0,0))
{
QGraphicsScene* scene = view->imgScene;
int cx = rect.x() + rect.width() * 0.5;
int cy = rect.y() + rect.height() * 0.5;
int w = MAX(rect.width(), minSize);
int h = MAX(rect.height(), minSize);
scene->addLine(cx - w * 0.5, cy - h * 0.5, cx - w * 0.25, cy - h * 0.5, pen);
scene->addLine(cx + w * 0.25, cy - h * 0.5, cx + w * 0.5, cy - h * 0.5, pen);
scene->addLine(cx - w * 0.5, cy + h * 0.5, cx - w * 0.25, cy + h * 0.5, pen);
scene->addLine(cx + w * 0.25, cy + h * 0.5, cx + w * 0.5, cy + h * 0.5, pen);
scene->addLine(cx - w * 0.5, cy - h * 0.5, cx - w * 0.5, cy - h * 0.25, pen);
scene->addLine(cx - w * 0.5, cy + h * 0.25, cx - w * 0.5, cy + h * 0.5, pen);
scene->addLine(cx + w * 0.5, cy - h * 0.5, cx + w * 0.5, cy - h * 0.25, pen);
scene->addLine(cx + w * 0.5, cy + h * 0.25, cx + w * 0.5, cy + h * 0.5, pen);
auto text = scene->addSimpleText(str);
text->setPos(QPoint(rect.x() - 10, rect.y() - 10));
QFont ft; ft.setPointSize(8); ft.setBold(0);
text->setBrush(QBrush(fontColor));
text->setFont(ft);
}
void QVideoPlayer::DrawTrackersInfo()
{
QGraphicsScene* scene = m_ImageViewer->imgScene;
int num = m_stOutput.nTrackObjCnts;
for (size_t i = 0; i < num; i++)
{
auto obj = &m_stOutput.stTrackers[i];
auto SA_SizeType = m_stOutput.stTrackers[i].SA_SizeType;
auto fconf = m_stOutput.stTrackers[i].fConf;
auto src = m_stOutput.stTrackers[i].ArithSrc;
auto status = m_stOutput.stTrackers[i].unTrackingStatus;
auto lostCnt = m_stOutput.stTrackers[i].nPipeLostCnt;
auto nInPipesID = m_stOutput.stTrackers[i].nInPipesID;
GLB_SCEN_MODE m_type = ARIDLL_ExportTrkSceneMode(pEOTracker, obj->nInPipesID);
// 绘制TLD检测
RECT32S RC = TLD_GetBestNNRect(pEOTracker, obj->nInPipesID);
//Tracker_Ptr pTracker = ARIDLL_ExportTrackerPtr(pEOTracker, obj->nInPipesID);
if (m_type == GLB_SCEN_GROUND && true == bShowDebugMode)
{
//RECT32S RC = pTracker->pGroundTracker->m_pTLDTracker->TLD_GetBestNNRect();
//SINT32 nTLDClusterNum = pTracker->pGroundTracker->m_pTLDTracker->TLD_GetPara()->nClusterNum;
//for (size_t i = 0; i < nTLDClusterNum; i++)
{
//QRectF clusterRect(RC[i].x, RC[i].y, RC[i].w, RC[i].h);
QRectF clusterRect(RC.x, RC.y, RC.w, RC.h);
scene->addRect(clusterRect, QPen(QColor(255, 255, 255), 1));
}
//绘制AID监控框信息
SINT32 nNeighborCnt = 0;
RECT16S* RM = ARIDLL_AID_GetObservInfo(pEOTracker, obj->nInPipesID, nNeighborCnt);
if (NULL != RM && RM->w > 0 && RM->h > 0)
{
QRectF AIDMonitorBox(RM->x, RM->y, RM->w, RM->h);
scene->addRect(AIDMonitorBox, QPen(QColor(255, 255, 255), 0.5));
QString str = QString(_S("观测区_%1")).arg(nNeighborCnt);
PrintText(m_ImageViewer, RM->x, RM->y - 10, str, QBrush(Qt::white));
}
//绘制AID重捕框信息
if (MEM_TRACKING == status)
{
RECT16S* RC = ARIDLL_AID_GetRecapRect(pEOTracker, obj->nInPipesID);
if (NULL != RC && RC->w > 0 && RC->h > 0)
{
QRectF AIDRecapBox(RC->x, RC->y, RC->w, RC->h);
scene->addRect(AIDRecapBox, QPen(QColor(0, 0, 0), 0.8));
QString str = _S("重捕区");
PrintText(m_ImageViewer, RC->x, RC->y - 10, str, QBrush(Qt::white));
}
}
auto tldBox = m_stOutput.stTrackers[i].TLD_SrBox;
QRectF qtldBox(tldBox.x, tldBox.y, tldBox.w, tldBox.h);
// 绘制TLD波门
scene->addRect(qtldBox, QPen(QColor(0, 0, 255), 0.5));
}
else
{
if (GLB_SCEN_MODE::GLB_SCEN_SKY == m_type)
{
DrawTrackSegResults(m_ImageViewerVec[5], pEOTracker, obj->nInPipesID);
}
}
// 绘制当前跟踪框
int cx = obj->nX;
int cy = obj->nY;
int w = obj->nObjW;
int h = obj->nObjH;
QRect bbox(cx - w / 2, cy - h / 2, w, h);
QString str;
QString str1;
if (true == bShowDebugMode)
{
str = QString::number(obj->nOutputID);
}
DrawArtRect(m_ImageViewer, QPen(QColor(255, 0, 0)), bbox, str, false,15, QColor(255,0,0));
// 绘制跟踪波门
if (true == bShowDebugMode)
{
auto sabox = m_stOutput.stTrackers[i].SA_SrBox;
auto kcfBox = m_stOutput.stTrackers[i].KCF_SrBox;
auto tldBox = m_stOutput.stTrackers[i].TLD_SrBox;
QRectF qsabox(sabox.x, sabox.y, sabox.w, sabox.h);
QRectF qkcfBox(kcfBox.x, kcfBox.y, kcfBox.w, kcfBox.h);
QRectF qtldBox(tldBox.x, tldBox.y, tldBox.w, tldBox.h);
// 绘制SA波门
if (SA_SizeType == SizeType::AreaTarget)
{
scene->addRect(qsabox, QPen(QBrush(QColor(255, 255, 255)), 0.3, Qt::DashLine));
}
else if (SA_SizeType == SizeType::SmallTarget)
{
scene->addRect(qsabox, QPen(QColor(0, 0, 255), 0.3, Qt::DashLine));
}
else if (SA_SizeType == SizeType::MiddleTarget)
{
scene->addRect(qsabox, QPen(QColor(25, 25, 205), 0.3, Qt::DashLine));
}
else if (SA_SizeType == SizeType::DimTarget)
{
scene->addRect(qsabox, QPen(QColor(68, 68, 205), 0.3, Qt::DashLine));
}
// 绘制KCF波门--黄色
scene->addRect(qkcfBox, QPen(QColor(255, 255, 0), 0.5));
// 绘制TLD波门
scene->addRect(qtldBox, QPen(QColor(0, 0, 255), 0.5));
// 显示状态字符
str = QString(_S("ID:%1 conf%2,Src:%3,mem:%4")).arg(obj->nOutputID).arg(QString::number(fconf, 'f', 3)).arg(src).arg(lostCnt);
str1 = QString(_S("type:%1")).arg(obj->unClsType);
if (obj->nOutputID >= 2)
{
continue;
}
auto text = scene->addSimpleText(str);
auto text1 = scene->addSimpleText(str1);
if (sabox.w == 0 || sabox.h == 0)
{
text->setPos(QPoint(kcfBox.x - 10, kcfBox.y - 10));
text1->setPos(QPoint(kcfBox.x - 10, kcfBox.y + kcfBox.h + 10));
}
else
{
text->setPos(QPoint(sabox.x - 10, sabox.y - 10));
text1->setPos(QPoint(sabox.x - 10, sabox.y + sabox.h + 10));
}
QFont ft; ft.setPointSize(8);
text->setBrush(QBrush(QColor(Qt::red)));
text->setFont(ft);
}
if (bEnableDrawTrackDet)
{
// 查询跟踪阶段目标列表
DrawFrameRegionDetectObjs();
// 绘制小目标极值点
//POINT16S* pList = ARIDLL_GetSATracker_DSTPoint(m_ArithRunner->pEOTracker, nInPipesID);
//for (size_t i = 0; i < (640 / 16 * 512 / 16) * 2; i++)
//{
// auto p = pList[i];
// if (p.x > 0 && p.y > 0)
// {
// DrawCrossInImage(m_ImageViewer, QRect(p.x - 2, p.y - 2, 4, 4), QPen(QColor(100, 100, 34)));
// }
//}
}
if (bEnableDrawFastMatcher)
{
DrawFrameMatcherDetectObjs();
}
}
return;
}
void QVideoPlayer::DrawLostInfo()
{
return;
}
void QVideoPlayer::DrawFrameDetectObjs()
{
QGraphicsScene* scene = m_ImageViewer->imgScene;
GLB_SCEN_MODE m_SceneType = ARIDLL_ExportSceneMode(pEOTracker);
if (GLB_SCEN_MODE::GLB_SCEN_SKY == m_SceneType)
{
int SANum = 0;
TARGET_OBJECT* pFrameTargetArray = ARIDLL_GetFullSATarget(pEOTracker, SANum);
for (size_t i = 0; i < SANum; ++i)
{
auto obj = &pFrameTargetArray[i];
int cx = obj->pfCenPos.x;
int cy = obj->pfCenPos.y;
int w = MAX(10, obj->snSize.w);
int h = MAX(10, obj->snSize.h);
//int w = obj->snSize.w;
//int h = obj->snSize.h;
QRectF bbox(cx - w / 2, cy - h / 2, w, h);
scene->addRect(bbox, QPen(QColor(255, 20, 147), 0.3));
}
}
int num = m_Dll_RecordInput.nInputAINum;
for (size_t i = 0; i < num; ++i)
{
auto obj = &m_Dll_RecordInput.stInputAI[i];
//int w = MAX(15, obj->snSize.w);
//int h = MAX(15, obj->snSize.h);
int w = obj->x2 - obj->x1;
int h = obj->y2 - obj->y1;
QRectF bbox(obj->x1, obj->y1, w, h);
scene->addRect(bbox, QPen(QColor(255, 147, 20), 0.3));
}
}
void QVideoPlayer::DrawFrameRegionDetectObjs()
{
QGraphicsScene* scene = m_ImageViewer->imgScene;
auto obj = &m_stOutput.stTrackers[0];
//Tracker_Ptr pTracker = ARIDLL_ExportTrackerPtr(pEOTracker, obj->nInPipesID);
//if (NULL == pTracker)
//{
// return;
//}
TSky_Output* pSky_Output = GetSkyTrackerObjectStatus(pEOTracker, obj->nInPipesID);
// 如果是对空跟踪场景
if (NULL != pSky_Output)
{
QString str;
for (size_t i = 0; i < pSky_Output->m_nTargetNum; i++)
{
auto obj = &pSky_Output->mTarget_Array[i];
int cx = obj->pfCenPos.x;
int cy = obj->pfCenPos.y;
//int w = MAX(15, obj->snSize.w);
//int h = MAX(15, obj->snSize.h);
int w = obj->snSize.w;
int h = obj->snSize.h;
QRectF bbox(cx - w / 2, cy - h / 2, w, h);
scene->addRect(bbox, QPen(QColor(20, 255, 147), 0.3));
str = QString::number(obj->fMatchConf, 'f', 2);
auto text = scene->addSimpleText(str);
text->setPos(QPoint(bbox.x() - 3, bbox.y() - 3));
QFont ft; ft.setPointSize(5);
text->setBrush(QBrush(QColor(Qt::black)));
text->setFont(ft);
}
}
}
void QVideoPlayer::DrawFrameMatcherDetectObjs()
{
QGraphicsScene* scene = m_ImageViewer->imgScene;
auto obj = &m_stOutput.stTrackers[0];
TSky_Output* pSky_Output = GetSkyTrackerObjectStatus(pEOTracker, obj->nInPipesID);
// 如果是对空跟踪场景
if (NULL != pSky_Output)
{
QString str;
for (size_t i = 0; i < pSky_Output->m_nMatcherNum; i++)
{
MATCHER_TARGET *obj = &pSky_Output->mMatcher_Array[i];
int cx = obj->pfCenPos.x;
int cy = obj->pfCenPos.y;
//int w = MAX(15, obj->snSize.w);
//int h = MAX(15, obj->snSize.h);
int w = obj->snSize.w;
int h = obj->snSize.h;
QRectF bbox(cx - w / 2, cy - h / 2, w, h);
scene->addRect(bbox, QPen(QColor(0, 255, 255), 0.3));
str = QString::number(obj->fDetConf, 'f', 2);
auto text = scene->addSimpleText(str);
text->setPos(QPoint(bbox.x() - 3, bbox.y() - 3));
QFont ft; ft.setPointSize(5);
text->setBrush(QBrush(QColor(Qt::white)));
text->setFont(ft);
}
}
}
void QVideoPlayer::UpdateCompass(float fAz, float fPt)
{
m_CompassWidget->updateDir(fAz, fPt);
}
void QVideoPlayer::DrawArithResourceOccupy()
{
}
void QVideoPlayer::DrawDetectionsInfo()
{
}
void QVideoPlayer::PrintTableInfo()
{
// 初始化表头
QStringList headerText;
headerText << _S("批号") << _S("方位")<< _S("俯仰") << _S("预测方位") << _S("预测俯仰") << _S("管道Ind") << _S("检测情况D/InFOV/ALL")<< _S("ClassID");
m_TableWidget->setColumnCount(headerText.count());
QTableWidgetItem* headerItem;
for (int i = 0; i < m_TableWidget->columnCount(); i++)
{
headerItem = new QTableWidgetItem(headerText.at(i));
QFont font = headerItem->font();
font.setBold(true);
font.setPointSize(12);
//headerItem->setTextColor(Qt::blue);
headerItem->setFont(font);
m_TableWidget->setHorizontalHeaderItem(i, headerItem);
}
//m_TableWidget->horizontalHeader()->setSectionResizeMode(0, QHeaderView::Stretch);
//m_TableWidget->horizontalHeader()->setSectionResizeMode(1, QHeaderView::Stretch);
ARIDLL_OBJINFO* stAlarmObjs = m_stOutput.stAlarmObjs;
auto* stTrackObjs = m_stOutput.stTrackers;
//管道信息
auto pPipeArray = ARIDLL_ExportPipeArrayPtr(pEOTracker);
//auto PipeNum = m_ArithRunner->m_stOutput.stDebugInfo.nMaxPipeNum;
int validObjNum = 0;
int index = 0;
// 输出跟踪目标
for (size_t i = 0; i < 10; i++)
{
auto obj = &stTrackObjs[i];
if (obj->nObjW == 0)
{
continue;
}
index = 0;
// 目标在管道位置,输出更多信息
PIPE* pPipe = &pPipeArray[obj->nInPipesID];
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString::number(obj->nOutputID)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString("%1").arg(obj->fAz)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString("%1").arg(obj->fPt)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString("%1").arg(pPipe->afCurrentAgl.fAz)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString("%1").arg(pPipe->afCurrentAgl.fPt)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString::number(obj->nInPipesID)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString("[%1 / %2 / %3]").
arg(pPipe->unExistCnt).arg(pPipe->unInsideFOVCnt).arg(pPipe->unTotalCnt)));
validObjNum++;
}
// 输出告警目标
for (size_t i = 0; i < 50; i++)
{
auto obj = &stAlarmObjs[i];
if (stAlarmObjs[i].nObjW == 0)
{
continue;
}
index = 0;
// 目标在管道位置,输出更多信息
PIPE* pPipe = &pPipeArray[obj->nInPipesID];
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString::number(obj->nOutputID)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString("%1").arg(obj->fAz)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString("%1").arg(obj->fPt)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString("%1").arg(pPipe->afCurrentAgl.fAz)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString("%1").arg(pPipe->afCurrentAgl.fPt)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString::number(obj->nInPipesID)));
m_TableWidget->setItem(validObjNum, index++, new QTableWidgetItem(QString("[%1 / %2 / %3]").
arg(pPipe->unExistCnt).arg(pPipe->unInsideFOVCnt).arg(pPipe->unTotalCnt)));
validObjNum++;
}
m_TableWidget->setRowCount(validObjNum);
}
void QVideoPlayer::PrintSkyInfo(QString str, QString strShow, ArithHandle hArithSrc, ARIDLL_OBJINFO* obj, GLB_OUTPUT out, ARIDLL_DEBUG_OUTPUT g_DbugOut)
{
////QString str1 = "<font color=\"#FF0000\">" + "1111" + "</font>";
//GLB_OUTPUT out = getGLBoutput();
//str = QString(_S("干扰弹:%1")).arg(out.stEvent.JMBMonitor.bJammingBombLaunch);
//strShow = "<font color=\"#FF0000\">" + str + "</font>";
//m_TextInfo->append(strShow);
//// 在图像上打印,用于录像
////int y0 = 20;
////if (!out.stEvent.JMBMonitor.bJammingBombLaunch)
////{
//// str = _S("抗干扰:无");
//// PrintText(m_ImageViewer, 0, y0, str,QBrush(Qt::green));
//// y0 += 20;
////}
////else
////{
//// str = _S("抗干扰:干扰弹事件");
//// if (out.stEvent.JMBMonitor.bReFindFlag)
//// {
//// str = _S("抗干扰:重捕模块激活……");
//// }
//// PrintText(m_ImageViewer, 0, y0, str, QBrush(Qt::red));
//// y0 += 20;
////}
//int y0 = 20;
//if (out.stEvent.JMBMonitor.bJammingBombLaunch)
//{
// str = _S("抗干扰:干扰弹事件");
// if (out.stEvent.JMBMonitor.bReFindFlag)
// {
// str = _S("抗干扰:重捕");
// }
// PrintText(m_ImageViewer, 0, y0, str, QBrush(Qt::red));
// y0 += 20;
//}
//if (out.stEvent.HLMonitor.bExplode)
//{
// str = _S("抗干扰:爆炸");
// PrintText(m_ImageViewer, 0, y0, str, QBrush(Qt::red));
// y0 += 20;
//}
//if(out.stEvent.HLMonitor.bJammingInterface)
//{
// str = _S("抗干扰:交错干扰");
// PrintText(m_ImageViewer, 0, y0, str, QBrush(Qt::red));
// y0 += 20;
//}
//if (out.stEvent.bSplitJamming)
//{
// str = _S("抗干扰:分裂");
// PrintText(m_ImageViewer, 0, y0, str, QBrush(Qt::red));
// y0 += 20;
//}
//
//str = QString("%1").arg(m_ArithRunner->m_stInputPara.unFrmId);
//PrintText(m_ImageViewer, 0, y0, str, QBrush(Qt::red));
//y0 += 20;
//if (out.stEvent.JMBMonitor.bJammingBombLaunch)
//{
// str = QString(_S("干扰计数:%1")).arg(out.stEvent.JMBMonitor.nJammingAllCnt);
// PrintText(m_ImageViewer, 0, y0, str, QBrush(Qt::red));
// y0 += 20;
// str = QString(_S("投放计数:%1pos%2%3")).arg(out.stEvent.JMBMonitor.nLaunchBombs)
// .arg(out.stEvent.JMBMonitor.ptBombPoint.x).arg(out.stEvent.JMBMonitor.ptBombPoint.y);
// PrintText(m_ImageViewer, 0, y0, str, QBrush(Qt::red));
// y0 += 20;
//}
if (NOT_TRACKING == obj->unTrackingStatus)
{
DAT_PARAMETERS* pDAT_stPara = ARIDLL_GetDAT_stPara(hArithSrc);
str = _S("检测类型:") + QString("DetectGrayType:%1").arg(pDAT_stPara->nDetectGrayType);
strShow += "<font color=\"#FFF\">" + str + "</font><br />";
//str = _S("信噪比阈值") + QString("fgdk:%1, fDimGdk:%2").arg(param_SkyTracker->prmTSkyDet.fSmallDetectGDK).arg(param_SkyTracker->prmTSkyDet.fDimGdk);
//strShow += "<font color=\"#FFF\">" + str + "</font><br />";
str = _S("分割阈值") + QString("Grad:%1, GrayBright:%2, GrayDark:%3").arg(pDAT_stPara->nGradThresMin).arg(pDAT_stPara->nGrayThresMinBright).
arg(pDAT_stPara->nGrayThresMinDark);
strShow += "<font color=\"#FFF\">" + str + "</font><br />";
str = _S("降采样信息") + QString("DetRatio: %1").arg(pDAT_stPara->nDSmpScale);
strShow += "<font color=\"#FFF\">" + str + "</font>";
m_TextInfo->append(strShow);
}
else if (STABLE_TRACKING == obj->unTrackingStatus || MEM_TRACKING == obj->unTrackingStatus)
{
FLOAT32 pdObj8BkgStdArray[80] = { 0 };
BBOOL pbObj8BkgStatus[80] = { 0 };
MINMAXRECT32S mrnBkgBlks[8] = { 0 };
UBYTE8 ubBkgIndex = 0;
BBOOL bComplexEnv = FALSE;
BBOOL bInterferenceMem = FALSE;
UBYTE8 bInfrnDirection = 0;
getBKMInform(hArithSrc, obj->nInPipesID, pdObj8BkgStdArray, pbObj8BkgStatus, &ubBkgIndex,
&bComplexEnv, &bInterferenceMem, &bInfrnDirection, mrnBkgBlks);
QGraphicsScene* scene = m_ImageViewer->imgScene;
for (size_t i = 0; i < 8; i++)
{
// 划分为0 - 上、1 - 右上、2 - 右、3 - 右下、4 - 下、5 - 左下、6 - 左、7 - 左上8个背景区域
auto obj = &mrnBkgBlks[i];
QRectF bbox(obj->minX, obj->minY, obj->maxX - obj->minX, obj->maxY - obj->minY);
scene->addRect(bbox, QPen(QColor(147, 20, 255), 0.3));
}
TSky_Output* pSky_Output = GetSkyTrackerObjectStatus(pEOTracker, obj->nInPipesID);
str = _S("------------------检测信息------------------");
m_TextInfo->append(WHITE(str));
str = _S(" 复杂背景:") + QString("%1").arg(bComplexEnv);
strShow += "<font color=\"#FFF\">" + str + "</font> ";
str = _S(" 纵向杆遮挡:") + QString("%1").arg(bInterferenceMem);
strShow += "<font color=\"#FFF\">" + str + "</font><br />";
Param_SkyTracker* param_SkyTracker = &pSky_Output->mTrakingPara_Output;
str = _S("检测类型:") + QString("DetectGrayType:%1").arg(param_SkyTracker->prmTSkyDet.nDetectGrayType);
strShow += "<font color=\"#FFF\">" + str + "</font><br />";
str = _S("信噪比阈值") + QString("fgdk:%1, fDimGdk:%2").arg(param_SkyTracker->prmTSkyDet.fSmallDetectGDK).arg(param_SkyTracker->prmTSkyDet.fDimGdk);
strShow += "<font color=\"#FFF\">" + str + "</font><br />";
str = _S("分割阈值") + QString("Grad:%1, GrayBright:%2, GrayDark:%3").
arg(pSky_Output->mTrakingPara_Output.prmTSkyDet.fAreaDetectGradDiffThre).arg(pSky_Output->mTrakingPara_Output.prmTSkyDet.nGrayThresMinBright).
arg(pSky_Output->mTrakingPara_Output.prmTSkyDet.nGrayThresMinDark);
strShow += "<font color=\"#FFF\">" + str + "</font><br />";
DAT_PARAMETERS* pDAT_Trk_stPara = ARIDLL_GetTrkDAT_stPara(hArithSrc, obj->nInPipesID);
DAT_PARAMETERS* pDAT_stPara = ARIDLL_GetDAT_stPara(hArithSrc);
str = _S("降采样信息") + QString("DetRatio: %1, TrkRatio: %2").arg(pDAT_stPara->nDSmpScale).arg(pDAT_Trk_stPara->nDSmpScale);
strShow += "<font color=\"#FFF\">" + str + "</font>";
m_TextInfo->append(strShow);
str = _S("------------------跟踪信息------------------");
m_TextInfo->append(WHITE(str));
str = QString(_S("TST跟踪器——————————"));
m_TextInfo->append(WHITE(str));
str = QString(_S("X:%1,Y=%2,W=%3,H=%4,fObjPxlsCnt=%5").arg(QString::number(pSky_Output->ObjectStatusTST.ptPos.x, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusTST.ptPos.y, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusTST.sfSize.w, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusTST.sfSize.h, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusTST.fObjPxlsCnt, 'f', 2)));
m_TextInfo->append(WHITE(str));
str = QString(_S("unTotalCnt:%1,unContiLostCnt:%2,conf:%3").arg(pSky_Output->ObjectStatusTST.unTotalCnt).arg(pSky_Output->ObjectStatusTST.unContiLostCnt)
.arg(QString::number(pSky_Output->ObjectStatusTST.fConfidence, 'f', 3)));
m_TextInfo->append(str);
str = QString(_S("CEND跟踪器——————————"));
m_TextInfo->append(WHITE(str));
str = QString(_S("X:%1,Y=%2,W=%3,H=%4,fObjPxlsCnt=%5").arg(QString::number(pSky_Output->ObjectStatusCEND.ptPos.x, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusCEND.ptPos.y, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusCEND.sfSize.w, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusCEND.sfSize.h, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusCEND.fObjPxlsCnt, 'f', 2)));
m_TextInfo->append(WHITE(str));
str = QString(_S("unTotalCnt:%1,unContiLostCnt:%2,conf:%3").arg(pSky_Output->ObjectStatusCEND.unTotalCnt).arg(pSky_Output->ObjectStatusCEND.unContiLostCnt)
.arg(QString::number(pSky_Output->ObjectStatusCEND.fConfidence, 'f', 3)));
m_TextInfo->append(WHITE(str));
str = QString(_S("KCF跟踪器——————————"));
m_TextInfo->append(WHITE(str));
str = QString(_S("X:%1,Y=%2,W=%3,H=%4").arg(QString::number(pSky_Output->ObjectStatusKCF.ptPos.x, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusKCF.ptPos.y, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusKCF.sfSize.w, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusKCF.sfSize.h, 'f', 2)));
m_TextInfo->append(WHITE(str));
str = QString(_S("unTotalCnt:%1,unContiLostCnt:%2,conf:%3").arg(pSky_Output->ObjectStatusKCF.unTotalCnt).arg(pSky_Output->ObjectStatusKCF.unContiLostCnt)
.arg(QString::number(pSky_Output->ObjectStatusKCF.fConfidence, 'f', 3)));
m_TextInfo->append(WHITE(str));
str = QString(_S("决策跟踪器——————————"));
str = "<font color=\"#FF0000\">" + str + "</font>";
m_TextInfo->append(WHITE(str));
str = QString(_S("X:%1,Y=%2,W=%3,H=%4,fObjPxlsCnt=%5").arg(QString::number(pSky_Output->ObjectStatusDesc.ptPos.x, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusDesc.ptPos.y, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusDesc.sfSize.w, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusDesc.sfSize.h, 'f', 2))
.arg(QString::number(pSky_Output->ObjectStatusDesc.fObjPxlsCnt, 'f', 2)));
m_TextInfo->append(WHITE(str));
str = QString(_S("unTotalCnt:%1,unContiLostCnt:%2,conf:%3").arg(pSky_Output->ObjectStatusDesc.unTotalCnt).arg(pSky_Output->ObjectStatusDesc.unContiLostCnt)
.arg(QString::number(pSky_Output->ObjectStatusDesc.fConfidence, 'f', 3)));
m_TextInfo->append(WHITE(str));
}
//if (out.stScene.bComplexBKG)
//{
// str = QString(_S("进入复杂场景"));
// strShow = "<font color=\"#FF0000\">" + str + "</font>";
// m_TextInfo->append(strShow);
//}
//else
//{
// str = QString(_S("退出复杂场景"));
// strShow = "<font color=\"#FF0000\">" + str + "</font>";
// m_TextInfo->append(strShow);
//}
// ALARM_TARGET* TARGETLIST = ARIDLL_GetAlarmArray(m_ArithRunner->pEOTracker);
// BBOOL* idoccp = ARIDLL_GetIDArray(m_ArithRunner->pEOTracker);
// str = QString();
// for (size_t i = 0; i < GLB_ALARM_NUM; i++)
// {
// auto AlarmT = &TARGETLIST[i];
// str = QString("%1:ID:%2,PipesID:%3,(%4,%5),idOcc:%6").arg(i).arg(AlarmT->nBatchID_1).arg(AlarmT->nInPipesID_1)
// .arg(AlarmT->centerPTx).arg(AlarmT->centerPTy).arg(idoccp[i]);
// strShow = "<font color=\"#FF0000\">" + str + "</font>";
// m_TextInfo->append(strShow);
// }
}
void QVideoPlayer::PrintGroundInfo(QString str, GLB_OUTPUT out, ARIDLL_DEBUG_OUTPUT g_DbugOut)
{
str = QString(_S("对地跟踪器——————————"));
m_TextInfo->append(WHITE(str));
str = QString(_S("DeciStatus:%1").arg(g_DbugOut.nDecisionStatus));
m_TextInfo->append(WHITE(str));
str = QString(_S("[Desc输出]:XYWH(%1,%2,%3,%4)").arg(g_DbugOut.nX).arg(g_DbugOut.nY).arg(g_DbugOut.nW).arg(g_DbugOut.nH));
m_TextInfo->append(WHITE(str));
str = QString(_S("kcfStatus:%1,kcfRes:%2,kcfTH:%3").arg(g_DbugOut.nKcfStatus).arg(g_DbugOut.fKCFRes).arg(g_DbugOut.fLargeResTH));
m_TextInfo->append(WHITE(str));
str = QString(_S("kcfOcc:%1,kcfArrest:%2").arg(g_DbugOut.nOccKCFStatus).arg(g_DbugOut.nArrestKCFStatus));
m_TextInfo->append(WHITE(str));
str = QString(_S("AIStatus:%1,AIRes:%2,AINum:%3,AIDetTH:%4").arg(g_DbugOut.nAIStatus).arg(g_DbugOut.fAITRes).arg(g_DbugOut.nAITNum).arg(g_DbugOut.nObjNumArrestTH));
m_TextInfo->append(WHITE(str));
str = QString(_S("AIJam:%1,AIChange:%2").arg(g_DbugOut.nAIJamCnt).arg(g_DbugOut.nAIChangCnt));
m_TextInfo->append(WHITE(str));
str = QString(_S("AIOcc:%1,AIArrest:%2").arg(g_DbugOut.nOccAIStatus).arg(g_DbugOut.nArrestAIStatus));
m_TextInfo->append(WHITE(str));
str = QString(_S("TLDNum:%1,TLDRes:%2, LearnNum:%3").arg(g_DbugOut.nTLDNum).arg(g_DbugOut.fMaxNNConf).arg(g_DbugOut.nLearnCnt));
m_TextInfo->append(WHITE(str));
str = QString(_S("决策跟踪器——————————"));
m_TextInfo->append(WHITE(str));
str = QString(_S("X:%1Y=%2").arg(out.ObjectStatus.ptPos.x).arg(out.ObjectStatus.ptPos.y));
m_TextInfo->append(WHITE(str));
str = QString(_S("unTotalCnt:%1").arg(out.ObjectStatus.unTotalCnt));
m_TextInfo->append(WHITE(str));
str = QString(_S("unContiLostCnt:%1").arg(out.ObjectStatus.unContiLostCnt));
m_TextInfo->append(WHITE(str));
}
void QVideoPlayer::PrintInfo()
{
//QGraphicsScene* scene = m_ImageViwerListVec[2]->imgScene;
//scene->clear();
GLB_OUTPUT out = ARIDLL_Export_GLB_OUTPUT(pEOTracker);
ARIDLL_INPUTPARA *pStInputPara = &m_stInputPara;
m_TextInfo->clear();
//状态
QString str;
QString strShow;
str = QString(_S("帧编号:%1,帧频:%2")).arg(pStInputPara->unFrmId).arg(pStInputPara->unFreq);
m_TextInfo->append(WHITE(str));
str = QString(_S("方位角:%1,俯仰角:%2")).arg(m_stInputPara.stServoInfo.fServoAz).arg(m_stInputPara.stServoInfo.fServoPt);
m_TextInfo->append(WHITE(str));
float fAglReso = pStInputPara->stCameraInfo.fAglReso < 1e-6 ?
FLOAT32(ANGLE(pStInputPara->stCameraInfo.fPixelSize / pStInputPara->stCameraInfo.nFocus / 1000.0f)) : pStInputPara->stCameraInfo.fAglReso;
str = QString(_S("焦距:%1,像元尺寸:%2,视频源:%3,角分辨率:%4")).arg(m_stInputPara.stCameraInfo.nFocus).arg(m_stInputPara.stCameraInfo.fPixelSize)
.arg(m_stInputPara.stCameraInfo.unVideoType).arg(fAglReso);
m_TextInfo->append(WHITE(str));
str = QString(_S("Yaw:%1,Pitch:%2,Roll:%3")).arg(m_stInputPara.stAirCraftInfo.stAtt.fYaw)
.arg(m_stInputPara.stAirCraftInfo.stAtt.fPitch)
.arg(m_stInputPara.stAirCraftInfo.stAtt.fRoll);
m_TextInfo->append(WHITE(str));
if (m_stOutput.nSysMode == GLB_SYS_WAIT)
str = QString(_S("系统状态:空闲 "));
else if (m_stOutput.nSysMode == GLB_SYS_FSCAN)
str = QString(_S("系统状态:扫描 "));
else if (m_stOutput.nSysMode == GLB_SYS_AUTO)
str = QString(_S("系统状态:无人值守 "));
else if (m_stOutput.nSysMode == GLB_SYS_STARE)
str = QString(_S("系统状态:凝视"));
m_TextInfo->append(WHITE(str));
if (m_stOutput.nStatus == GLB_STATUS_WAIT)
str = QString(_S("算法状态:待命 "));
if (m_stOutput.nStatus == GLB_STATUS_SEARCH)
str = QString(_S("算法状态:搜索 "));
else if (m_stOutput.nStatus == GLB_STATUS_TRACK)
str = QString(_S("算法状态:单目标跟踪 "));
else if (m_stOutput.nStatus == GLB_STATUS_LOST)
str = QString(_S("算法状态:丢失 "));
else if (m_stOutput.nStatus == GLB_STATUS_MOTRACK)
str = QString(_S("状态:多目标跟踪 "));
else if (m_stOutput.nStatus == GLB_STATUS_FSCAN)
str = QString(_S("状态:扇扫 "));
else if (m_stOutput.nStatus == 11)
str = QString(_S("状态:自动截获 "));
else if (m_stOutput.nStatus == 12)
str = QString(_S("状态:半自动截获 "));
else
str = QString(_S("状态:未知 "));
m_TextInfo->append(WHITE(str));
str = QString(_S("输入单帧目标:%1个")).arg(m_stDebugOutPut.nDetectObjsNum);
m_TextInfo->append(WHITE(str));
str = QString(_S("输出告警目标:%1个")).arg(m_stOutput.nAlarmObjCnts);
m_TextInfo->append(WHITE(str));
str = QString(_S("算法耗时:%1ms")).arg(m_stDebugOutPut.Arith_time);
m_TextInfo->append(WHITE(str));
// 获取主跟踪器目标的跟踪对象,依据其内部绑定的跟踪场景显示调试信息
ARIDLL_OBJINFO* obj = &m_stOutput.stTrackers[0];
GLB_SCEN_MODE m_type = GLB_SCEN_NONE;
if (GLB_STATUS_SEARCH == m_stOutput.nStatus)
{
m_type = ARIDLL_ExportSceneMode(pEOTracker);
}
else if (GLB_STATUS_TRACK == m_stOutput.nStatus)
{
m_type = ARIDLL_ExportTrkSceneMode(pEOTracker, obj->nInPipesID);
}
if (GLB_SCEN_MODE::GLB_SCEN_GROUND == m_type)
{
PrintGroundInfo(str, out, m_stDebugOutPut);
}
else if (GLB_SCEN_MODE::GLB_SCEN_SKY == m_type)
{
PrintSkyInfo(str, strShow, pEOTracker, obj, out, m_stDebugOutPut);
}
}
void QVideoPlayer::DrawLineInImage(QImageViewer* view, QVector<QPoint> pSet, QPen pen, bool bRectBlock)
{
if (pSet.size() == 0)
{
return;
}
QRect viewRect = view->geometry();
QGraphicsScene* scene = view->imgScene;
QPainterPath path;
path.moveTo(pSet[0]);
for (size_t i = 0; i < pSet.size(); i++)
{
path.lineTo(pSet[i]);
// 在端点绘制小方块标记位置
QRect rc(pSet[i].x() - 3, pSet[i].y() - 3, 6, 6);
if (bRectBlock)
{
scene->addRect(rc, pen);
}
}
scene->addPath(path, pen);
}
void QVideoPlayer::DrawCrossInImage(QImageViewer* view, QRect rect, QPen pen)
{
QRect viewRect = view->geometry();
QGraphicsScene* scene = view->imgScene;
QPoint left1(rect.x(), rect.y() + rect.height() * 0.5);
QPoint right1(rect.x() + rect.width() * 0.3, rect.y() + rect.height() * 0.5);
QPoint left2(rect.x() + rect.width() * 0.7, rect.y() + rect.height() * 0.5);
QPoint right2(rect.x() + rect.width(), rect.y() + rect.height() * 0.5);
QPoint up1(rect.x() + rect.width() * 0.5, rect.y());
QPoint down1(rect.x() + rect.width() * 0.5, rect.y() + rect.height() * 0.3);
QPoint up2(rect.x() + rect.width() * 0.5, rect.y() + rect.height() * 0.7);
QPoint down2(rect.x() + rect.width() * 0.5, rect.y() + rect.height());
scene->addLine(QLine(left1, right1),pen);
scene->addLine(QLine(left2, right2), pen);
scene->addLine(QLine(up1, down1), pen);
scene->addLine(QLine(up2, down2), pen);
}
void QVideoPlayer::DrawSkyLine(QImageViewer* view,int y)
{
}
void QVideoPlayer::DrawGuideRect()
{
auto guide_Targets = ARIDLL_GetTargetGuide(pEOTracker);
QGraphicsScene* scene = m_ImageViewer->imgScene;
for (size_t i = 0; i < GUIDE_TARGET_NUM; i++)
{
auto t = &guide_Targets[i];
if (t->ID)
{
QString str = "Guide:" + QString::number(t->ID);
auto text = scene->addSimpleText(str);
text->setPos(QPoint(t->ptGuidePOS.x - 50, t->ptGuidePOS.y - 50 - 9));
QFont ft; ft.setPointSize(3);
text->setBrush(QBrush(QColor(Qt::green)));
text->setFont(ft);
scene->addRect(QRectF(t->ptGuidePOS.x - 50, t->ptGuidePOS.y - 50, 100, 100), QPen(QColor(255, 255, 0)));
}
}
auto capturebox = m_stOutput.CaptureBox;
if (capturebox.w && capturebox.h)
{
QRectF qcapbox(capturebox.cx - capturebox.w / 2, capturebox.cy - capturebox.h / 2, capturebox.w, capturebox.h);
//绘制截获波门
scene->addRect(qcapbox, QPen(QColor(0, 255, 0), 0.5, Qt::DashDotLine));
}
}
void QVideoPlayer::DrawTrackListInfo(QImageViewer* view)
{
//DrawAglList(view, &ArithOutput.ObjAglListsNear, QPen(QColor(255, 255, 255), 0.5, Qt::DotLine));
//DrawAglList(view, &ArithOutput.ObjAglListsLong, QPen(QColor(255, 100, 100), 0.5, Qt::DotLine));
// 绘制轨迹预测位置
//POINT32F pt;
//ARIDLL_EORadarCalcObjX_Y(ArithOutput.ObjAglListsNear.arfPredict.afAngle.fAz, ArithOutput.ObjAglListsNear.arfPredict.afAngle.fPt, &pt);
//QRect rc(pt.x - 10, pt.y - 10, 20, 20);
//DrawCrossInImage(m_ImageViewer, rc, QPen(QColor(255, 255, 255), 1, Qt::DotLine));
//ARIDLL_EORadarCalcObjX_Y(ArithOutput.ObjAglListsLong.arfPredict.afAngle.fAz, ArithOutput.ObjAglListsLong.arfPredict.afAngle.fPt, &pt);
//QRect rc1(pt.x - 30, pt.y - 30, 60, 60);
//DrawCrossInImage(m_ImageViewer, rc1, QPen(QColor(255, 100, 100), 1, Qt::DotLine));
}
void QVideoPlayer::DrawDSTPoint(QImageViewer * view)
{
auto pList = ARIDLL_GetMaxPoint(pEOTracker);
for (size_t i = 0; i < (640/16 * 512/16) * 2; i++)
{
auto p = pList[i];
if(p.x > 0 && p.y > 0)
{
DrawCrossInImage(view,QRect(p.x - 2,p.y -2 ,4,4),QPen(QColor(100,100,34)));
}
}
}
// void QVideoPlayer::GenColorTable(int nLen)
// {
// // for (size_t i = 0; i < nLen; i++)
// // {
// // int r = qrand() % 200;
// // int g = qrand() % 200;
// // int b = qrand() % 200;
// // m_colorTable[i] = QColor(r, g, b);
// // }
// }
void QVideoPlayer::addScenToVideo()
{
// 抓图
QPixmap pixSaveImage(nVideoWidth, nVideoHeight);
pixSaveImage.fill(QColor(0, 0, 0, 0));//用透明色填充
QPainter painterTanns(&pixSaveImage);
painterTanns.setRenderHint(QPainter::Antialiasing, true);
painterTanns.setRenderHint(QPainter::TextAntialiasing, true);
painterTanns.setRenderHint(QPainter::SmoothPixmapTransform, true);
auto imgScene = m_ImageViewer->scene();
//imgScene->render(&painterTanns);
imgScene->render(&painterTanns);
//m_ImageViewer->render(&painterTanns);
QImage Image = pixSaveImage.toImage();
cv::Mat src = m_ImageViewer->QImage2cvMat(Image);
cv::cvtColor(src, src, cv::COLOR_RGBA2RGB);
cvVideoWriter << src;
}
void QVideoPlayer::OnStopRecord()
{
if (!bRecordingFlag)
{
return;
}
if (!cvVideoWriter.isOpened())
{
return;
}
cvVideoWriter.release();
bRecordingFlag = false;
}
void QVideoPlayer::OnGrabImage()
{
if (m_VideoStream == nullptr)
{
return;
}
// 确定拍照大小,设置大一点比较清晰
int nImageWidth = m_VideoStream->GetImageWidth();
int nImageHeight = m_VideoStream->GetImageHeight();
if (nImageWidth < 1024)
{
float r = 1024.0 / nImageWidth;
nImageWidth *= r;
nImageHeight *= r;
}
if (nImageHeight < 768)
{
float r = 768.0 / nImageHeight;
nImageWidth *= r;
nImageHeight *= r;
}
QDateTime current_date_time = QDateTime::currentDateTime();
QString current_date = current_date_time.toString("yyyyMMddhhmm");
int ind = m_FileFullPath.lastIndexOf('.');
QString FrameID = "_" + QString::number(m_VideoStream->GetCurrentFrameIndex());
QString recordPath = m_FileFullPath.left(ind) + FrameID + ".jpg";
QPixmap pixSaveImage(nImageWidth, nImageHeight);
pixSaveImage.fill(QColor(0, 0, 0, 0));//用透明色填充
QPainter painterTanns(&pixSaveImage);
painterTanns.setRenderHint(QPainter::Antialiasing, true);
painterTanns.setRenderHint(QPainter::TextAntialiasing, true);
painterTanns.setRenderHint(QPainter::SmoothPixmapTransform, true);
auto imgScene = m_ImageViewer->scene();
//imgScene->render(&painterTanns);
imgScene->render(&painterTanns);
QImage Image = pixSaveImage.toImage();
Image.save(recordPath);
}
void QVideoPlayer::OnStartRecord()
{
if (cvVideoWriter.isOpened())
{
cvVideoWriter.release();
}
if (m_VideoStream == nullptr)
{
return;
}
// 确定录像大小,设置大一点比较清晰
nVideoWidth = m_VideoStream->GetImageWidth();
nVideoHeight = m_VideoStream->GetImageHeight();
if (nVideoWidth < 1024 || nVideoHeight < 768)
{
float r = 1024.0 /nVideoWidth;
nVideoWidth *= r;
nVideoHeight *= r;
}
if (nVideoHeight < 768)
{
float r = 768.0 / nVideoHeight;
nVideoWidth *= r;
nVideoHeight *= r;
}
QDateTime current_date_time = QDateTime::currentDateTime();
QString current_date = current_date_time.toString("yyyyMMddhhmm");
int ind = m_FileFullPath.lastIndexOf('.');
string recordPath = m_FileFullPath.left(ind).toStdString() + current_date.toStdString() + ".avi";
cvVideoWriter.open(recordPath, cv::VideoWriter::fourcc('M', 'J', 'P', 'G'),25, cv::Size(nVideoWidth, nVideoHeight),1);
if (!cvVideoWriter.isOpened())
{
QMessageBox::critical(NULL, _S("错误"), _S("无法创建录像文件"));
return;
}
// 标记开始录像
bRecordingFlag = true;
}
void QVideoPlayer::DrawAITrackerInfo()
{
auto p_GLB_AITrackOutput = &g_GLB_AITrackOutput;
if (p_GLB_AITrackOutput->fObjW * p_GLB_AITrackOutput->fObjH == 0)
{
return;
}
// 绘制当前跟踪框
int cx = p_GLB_AITrackOutput->fX;
int cy = p_GLB_AITrackOutput->fY;
int w = MAX(15, p_GLB_AITrackOutput->fObjW);
int h = MAX(15, p_GLB_AITrackOutput->fObjH);
QRect bbox(cx - w / 2, cy - h / 2, w, h);
QString str;
if (g_GLB_AITrackOutput.type == AITrackerType::DaSaimRPN)
{
str = "siamRPN";
}
else if(g_GLB_AITrackOutput.type == AITrackerType::NanoTrack)
{
str = "NANO";
}
DrawArtRect(m_ImageViewer, QPen(QBrush(QColor(255,255,0)), 2), bbox, str, false);
}
void QVideoPlayer::UpdatePanImage()
{
if (!m_VideoStream) {
return;
}
//auto panImage = m_ArithRunner->m_panParam;
//cv::Mat image(panImage.PanHeight, panImage.PanWidth, CV_8UC1, panImage.pPanImage);
//m_panViewer->showPanImage(image);
}
void QVideoPlayer::DrawPipeAglInfo()
{
if (!bEnableDrawFlightPath)
{
return;
}
QGraphicsScene* scene = m_ImageViewer->imgScene;
auto pPipeArray = ARIDLL_ExportPipeArrayPtr(pEOTracker);
auto PipeNum = 99;//m_stDebugOutPut.nMaxPipeNum;
//管道编号对应颜色表 by wcw04046 @ 2019/01/21
UBYTE8 ubClorR[10] = { 200,35,50,75,100,125,155,170,190,200 };
UBYTE8 ubClorG[10] = { 120,170,190,200,10,35,50,75,100,125 };
UBYTE8 ubClorB[10] = { 120,200,10,35,50,155,170,75,100,125 };
PIPE* pPipe = NULL;
//遍历管道
for (int P = 0; P < PipeNum; P++)
{
pPipe = &pPipeArray[P];
if (!pPipe->bOccupy)
{
continue;
}
// 跳过非告警管道
if (!pPipe->bAlarm)
{
continue;
}
if (!pPipe->bInsideFOV)
{
continue;
}
if (pPipe->bTrackingPipe)
{
////// 主跟踪目标绘制长短时预测
auto TrackListInfo = ARIDLL_GetTrackerHistInfo(pEOTracker, P);
DrawAglList(m_ImageViewer, &TrackListInfo->ObjAglListsNear, QPen(QColor(255, 255, 255), 0.5, Qt::DotLine));
DrawAglList(m_ImageViewer, &TrackListInfo->ObjAglListsLong, QPen(QColor(255, 100, 100), 0.5, Qt::DotLine));
QPen pen1(QPen(QColor(255, 255, 255), 0.5, Qt::SolidLine));
DrawCrossInImage(m_ImageViewer, QRect(pPipe->stMotionMod_mean.crnObjPrediRtLong.cx - 40, pPipe->stMotionMod_mean.crnObjPrediRtLong.cy - 40, 80, 80), pen1);
QPen pen2(QPen(QColor(255, 20, 20), 0.5, Qt::SolidLine));
DrawCrossInImage(m_ImageViewer, QRect(pPipe->stMotionMod_mean.crnObjPrediRtNear.cx - 25, pPipe->stMotionMod_mean.crnObjPrediRtNear.cy - 25, 50, 50), pen2);
}
else if(bEnableDrawFrameDetect)
{
int m = pPipe->ubEnd;
int nTargetCnt = MIN(GLB_PIPE_DEPTH_MAX, (SINT32)(pPipe->unExistCnt));
// 将角度转为坐标存入队列
QVector<QPoint>pSet;
//遍历管道目标
for (int i = 0; i < nTargetCnt; i++)
{
TARGET_OBJECT* pTargetObj = &pPipe->objHistoryList[m];
m = (m + GLB_PIPE_DEPTH_MAX - 1) % GLB_PIPE_DEPTH_MAX;
float fAz = pTargetObj->afAngle.fAz;
float fPt = pTargetObj->afAngle.fPt;
if (pTargetObj->bObject)
{
// 获取管道目标在图像中的位置
POINT32F pt;
pt = ARIDLL_GetImagePosFromStablePole(pEOTracker, pTargetObj->afAngle);
pSet.push_back(QPoint(pt.x, pt.y));
}
}
if (pSet.size() == 0)
{
continue;
}
QPen pen(QColor(ubClorR[P], ubClorG[P], ubClorB[P]));
// 绘制丢失管道的预测位置
if (pPipe->bLost)
{
DrawCrossInImage(m_ImageViewer, QRect(pPipe->ptCurrentPnt.x - 3, pPipe->ptCurrentPnt.y - 3, 9, 9), pen);
}
DrawLineInImage(m_ImageViewer, pSet, pen);
if (pPipe->bAlarm)
{
QString str = QString(_S("ID:%1,P:%2 D:%3/%4")).arg(pPipe->nAlarmBatchID_1).arg(P).arg(pPipe->unExistCnt).arg(pPipe->unTotalCnt);
PrintText(m_ImageViewer, pSet[0].x(), pSet[0].y() + 10, str);
}
}
}
}
void QVideoPlayer::DrawAlarmTarget()
{
// 管道告警信息绘制开关关闭时,跳过绘制
if (!bEnableDrawPipeInfo)
{
return;
}
QGraphicsScene* scene = m_ImageViewer->imgScene;
for (size_t i = 0; i < m_stOutput.nAlarmObjCnts; i++)
{
auto obj = m_stOutput.stAlarmObjs[i];
QString str = QString(_S("ID:%1,Type:%2")).arg(obj.nOutputID).arg(obj.unClsType);
auto cloor = QColor(0, 255, 0);
if(obj.nPipeLostCnt > 0)
{
cloor = QColor(20, 20, 20);
}
DrawArtRect(m_ImageViewer, QPen(cloor),
QRect(obj.nX - obj.nObjW / 2, obj.nY - obj.nObjH / 2, obj.nObjW, obj.nObjH), str, FALSE, 15, QColor(25,255,255));
}
auto obj = &m_stOutput.stTrackers[0];
//if (GLB_SCEN_MODE::GLB_SCEN_SKY == pTracker->m_type)
{
DrawDetectSegResults(m_ImageViewerVec[5], pEOTracker);
}
QString str = QString("%1").arg(m_stInputPara.unFrmId);
auto text = scene->addSimpleText(str);
text->setPos(QPoint(10, 10));
QFont ft; ft.setPointSize(4); ft.setBold(1);
text->setBrush(QBrush(QColor(Qt::blue)));
text->setFont(ft);
}
/*************************************************
// Method: convertTo3Channels
// Description: 将单通道图像转为三通道图像
// Returns: cv::Mat
// Parameter: binImg 单通道图像对象
*************************************************/
cv::Mat convertTo3Channels(const cv::Mat& binImg)
{
cv::Mat three_channel = cv::Mat::zeros(binImg.rows, binImg.cols, CV_8UC3);
vector<cv::Mat> channels;
for (int i = 0; i < 3; i++)
{
channels.push_back(binImg);
}
merge(channels, three_channel);
return three_channel;
}
void QVideoPlayer::DrawTrackSegResults(QImageViewer* view, ArithHandle hArithSrc, int pipeID)
{
DAT_PARAMETERS* pDAT_Trk_stPara = ARIDLL_GetTrkDAT_stPara(hArithSrc, pipeID);
DAT_OUTPUT* pDAT_Trk_stOutput = ARIDLL_GetTrkDAT_stOutput(hArithSrc, pipeID);
//DAT_OUTPUT* pDAT_stOutput = pTracker->pSkyTracker->pSATracker->pDAT_Module->getDAT_stOutput();
if (NULL == pDAT_Trk_stOutput->DAT_pBinary_Gray || NULL == pDAT_Trk_stOutput->DAT_pBinary_Mag)
{
return;
}
cv::Mat TrkGrayBinary(pDAT_Trk_stOutput->snSrDSmp.h, pDAT_Trk_stOutput->snSrDSmp.w, CV_8UC1, pDAT_Trk_stOutput->DAT_pBinary_Gray);
cv::Mat TrkGradBinary(pDAT_Trk_stOutput->snSrDSmp.h, pDAT_Trk_stOutput->snSrDSmp.w, CV_8UC1, pDAT_Trk_stOutput->DAT_pBinary_Mag);
TrkGrayBinary = TrkGrayBinary * 255;
TrkGradBinary = TrkGradBinary * 255;
cv::resize(TrkGrayBinary, TrkGrayBinary, cv::Size(), pDAT_Trk_stPara->nDSmpScale, pDAT_Trk_stPara->nDSmpScale);
cv::resize(TrkGradBinary, TrkGradBinary, cv::Size(), pDAT_Trk_stPara->nDSmpScale, pDAT_Trk_stPara->nDSmpScale);
cv::Mat FuseImg(pDAT_Trk_stPara->nDSmpScale * pDAT_Trk_stOutput->snSrDSmp.h, pDAT_Trk_stPara->nDSmpScale * pDAT_Trk_stOutput->snSrDSmp.w, CV_8UC3);
std::vector<cv::Mat> planes;
cv::split(FuseImg, planes);
planes.at(0) = TrkGrayBinary;
planes.at(2) = TrkGradBinary;
planes.at(1) = 0;
cv::merge(planes, FuseImg);
//for (int nIndex = 0; nIndex < nObjsCnt; nIndex++)
//{
// cv::Rect ret(ptTargetArray[nIndex].mrnRect.minX, ptTargetArray[nIndex].mrnRect.minY, (ptTargetArray[nIndex].mrnRect.maxX - ptTargetArray[nIndex].mrnRect.minX), (ptTargetArray[nIndex].mrnRect.maxY - ptTargetArray[nIndex].mrnRect.minY));
// cv::rectangle(FuseImg, ret, cv::Scalar(0, 255, 0));
//}
//cv::imshow("分割结果:灰度(B)_梯度(R)", FuseImg);
//cv::imshow("灰度分割结果", TmpGrayBinary);
//cv::imshow("梯度分割结果", TmpGradBinary);
//cv::waitKey(1);
cv::Mat TmpGrayBinary3C = convertTo3Channels(TrkGrayBinary);
cv::Mat TmpGradBinary3C = convertTo3Channels(TrkGradBinary);
cv::Mat lineImage = cv::Mat(TmpGradBinary3C.rows, 6, CV_8UC3, cv::Scalar(255, 255, 255));
//cv::line(TmpGradBinary3C, cv::Point(0, 0), cv::Point(0, TmpGradBinary3C.rows - 1), Scalar(255, 255, 255), 2, LINE_8);
//cv::line(TmpGradBinary3C, cv::Point(TmpGradBinary3C.cols - 1, 0), cv::Point(TmpGradBinary3C.cols - 1, TmpGradBinary3C.rows - 1),
// Scalar(255, 255, 255), 2, LINE_8);
std::vector<cv::Mat> matrices = { TmpGrayBinary3C, lineImage,
TmpGradBinary3C, lineImage, FuseImg };
cv::Mat out;
cv::hconcat(matrices, out);
view->DrawCVImage(out);
}
void QVideoPlayer::DrawDetectSegResults(QImageViewer* view, ArithHandle hArithSrc)
{
//Detectors* pDetectors = ARIDLL_ExportDetectorsPtr(pEOTracker);
//DAT_PARAMETERS* pDAT_stPara = pDetectors->pDAT_Module->GetDatParm();
//DAT_OUTPUT* pDAT_stOutput = pDetectors->pDAT_Module->getDAT_stOutput();
DAT_PARAMETERS* pDAT_stPara = ARIDLL_GetDAT_stPara(hArithSrc);
DAT_OUTPUT* pDAT_stOutput = ARIDLL_GetDAT_stOutput(hArithSrc);
if (NULL == pDAT_stOutput ||
NULL == pDAT_stOutput->DAT_pBinary_Gray || NULL == pDAT_stOutput->DAT_pBinary_Mag)
{
return;
}
cv::Mat DetGrayBinary(pDAT_stOutput->snSrDSmp.h, pDAT_stOutput->snSrDSmp.w, CV_8UC1, pDAT_stOutput->DAT_pBinary_Gray);
cv::Mat DetGradBinary(pDAT_stOutput->snSrDSmp.h, pDAT_stOutput->snSrDSmp.w, CV_8UC1, pDAT_stOutput->DAT_pBinary_Mag);
DetGrayBinary = DetGrayBinary * 255;
DetGradBinary = DetGradBinary * 255;
cv::resize(DetGrayBinary, DetGrayBinary, cv::Size(), pDAT_stPara->nDSmpScale, pDAT_stPara->nDSmpScale);
cv::resize(DetGradBinary, DetGradBinary, cv::Size(), pDAT_stPara->nDSmpScale, pDAT_stPara->nDSmpScale);
cv::Mat FuseImg(pDAT_stPara->nDSmpScale * pDAT_stOutput->snSrDSmp.h, pDAT_stPara->nDSmpScale * pDAT_stOutput->snSrDSmp.w, CV_8UC3);
std::vector<cv::Mat> planes;
cv::split(FuseImg, planes);
planes.at(0) = DetGrayBinary;
planes.at(2) = DetGradBinary;
planes.at(1) = 0;
cv::merge(planes, FuseImg);
cv::Mat DetGrayBinary3C = convertTo3Channels(DetGrayBinary);
cv::Mat DetGradBinary3C = convertTo3Channels(DetGradBinary);
cv::Mat lineImage = cv::Mat(DetGradBinary3C.rows, 3, CV_8UC3, cv::Scalar(255, 255, 255));
//cv::line(TmpGradBinary3C, cv::Point(0, 0), cv::Point(0, TmpGradBinary3C.rows - 1), Scalar(255, 255, 255), 1, LINE_8);
//cv::line(TmpGradBinary3C, cv::Point(TmpGradBinary3C.cols - 1, 0), cv::Point(TmpGradBinary3C.cols - 1, TmpGradBinary3C.rows - 1),
// Scalar(255, 255, 255), 1, LINE_8);
std::vector<cv::Mat> matrices = { DetGrayBinary3C, lineImage,
DetGradBinary3C, lineImage, FuseImg };
cv::Mat out;
cv::hconcat(matrices, out);
view->DrawCVImage(out);
}
void QVideoPlayer::DrawAglList(QImageViewer* view, OBJ_ANGLE_R* pObjAglLists, QPen pen)
{
QRect viewRect = view->geometry();
QGraphicsScene* scene = view->imgScene;
int nEnd = pObjAglLists->nEnd;
ANGLE_R* parHistoryList = (ANGLE_R*)pObjAglLists->parHistoryList;
int m = nEnd = pObjAglLists->nEnd;
int nListSize = pObjAglLists->nListSize;
if (nListSize == 0)
return;
int nStep = 1;
int nViewCnt = MAX(1, MIN(pObjAglLists->nCnt / nStep, nListSize));
FLOAT32 fAz, fPt;
//MSSu, 20160409: 目标轨迹按从历史到现在的方向绘制
if (pObjAglLists->nCnt >= nListSize)
{
m = (nEnd + 1) % nListSize;
}
else
{
m = 0;
}
// 将角度转为坐标存入队列
QVector<QPoint>pSet;
for (int i = 0; i < nViewCnt; i += nStep)
{
fAz = parHistoryList[m].afAngle.fAz;
fPt = parHistoryList[m].afAngle.fPt;
POINT32F pt = ARIDLL_GetImagePosFromStablePole(pEOTracker, parHistoryList[m].afAngle);
//m = (m + nListSize + nStep) % nListSize;
m = (m + nListSize + 1) % nListSize;
//POINT32F pt;
//ARIDLL_EORadarCalcObjX_Y(fAz, fPt, &pt);
//if (i % 2)
{
QRect label(QPoint(pt.x - 1, pt.y - 1), QPoint(pt.x + 1, pt.y + 1));
auto a = scene->sceneRect();
if (a.contains(label))//不要绘制出界
{
pSet.push_back(QPoint(pt.x, pt.y));
//scene->addRect(label, QPen(QColor(212, 55, 141), 0.3));
}
}
}
DrawLineInImage(view, pSet, pen);
}