1.S3315外场调试增加原始数据解析,待继续完善协议解析内容

2.修复目标出搜索区域范围限制
main
11566hyc 7 months ago
parent b9cebc5aa3
commit 3b7caaddb9

@ -319,7 +319,7 @@ int main()
if (2 == bcmd)
{
// 解锁传统跟踪器
ARIDLL_unLockCommand(pTracker, SelectCX, SelectCY, 0);
ARIDLL_unLockCommand(pTracker);
#if TEST_WITH_AIT
// 解锁AI跟踪器
g_GLB_AITracker->stopTrack();

@ -36,7 +36,7 @@ BBOOL Arith_EOController::Arith_Controller(GD_VIDEO_FRAME_S img)
BBOOL ubSuccessFlag = FALSE;
//XLOG_DEBUG("XLOGGER IS RUNNIG!");
LOG_DEBUG("Arith_Controller!:g_GLB_stPara.nStatus:{},g_GLB_stCommand.ubCmd", g_GLB_stPara.nStatus, g_GLB_stCommand.ubCmd);
LOG_DEBUG("Arith_Controller!:g_GLB_stPara.nStatus:{},g_GLB_stCommand.ubCmd:{}", g_GLB_stPara.nStatus, g_GLB_stCommand.ubCmd);
//获取并响应任务机指令
CMD_GetCommand();

@ -214,7 +214,7 @@ void Arith_EOController::CMD_RespondCommand(void)
g_GLB_stPara.bSelectObjManual = true;
// 自动锁定清除锁定标记
if (g_GLB_stPara.stLockCtrl = LockMode::LOCK_AUTO)
if (g_GLB_stPara.stLockCtrl == LockMode::LOCK_AUTO)
{
g_GLB_stPara.stLockCtrl = LockMode::LOCK_NONE;
}

@ -1992,10 +1992,14 @@ BBOOL DetectSmallObj::DST_PipeTargetReDetect(GD_VIDEO_FRAME_S img, SINT32 nWidth
FLOAT32 fgdk = MAX(m_DST_stPara.fgdk / 2.0f,2.0f);
//若目标超出搜索区域,则不降低阈值再查找,让管道消失
if (mrnSrRect.minX < pnPipeSearchPoint.x
&& pnPipeSearchPoint.x < mrnSrRect.maxX
&& mrnSrRect.minY < pnPipeSearchPoint.y
&& pnPipeSearchPoint.y < mrnSrRect.maxY)
//if (mrnSrRect.minX < pnPipeSearchPoint.x
// && pnPipeSearchPoint.x < mrnSrRect.maxX
// && mrnSrRect.minY < pnPipeSearchPoint.y
// && pnPipeSearchPoint.y < mrnSrRect.maxY)
if (mrnRect.minX < pnPipeSearchPoint.x
&& pnPipeSearchPoint.x < mrnRect.maxX
&& mrnRect.minY < pnPipeSearchPoint.y
&& pnPipeSearchPoint.y < mrnRect.maxY)
{
//若检测到目标则重置管道待删除标记pPipe->bLost清零
memset(ptTarget, 0, sizeof(TARGET_OBJECT));
@ -2258,10 +2262,10 @@ BBOOL DetectSmallObj::DST_PipeTargetRReDetect(GD_VIDEO_FRAME_S img, SINT32 nWidt
FLOAT32 fgdk = MIN(m_DST_stPara.fgdk / 2.0f, 2.0f);
//若目标超出搜索区域,则不降低阈值再查找,让管道消失
if (mrnSrRect.minX < pnPipeSearchPoint.x
&& pnPipeSearchPoint.x < mrnSrRect.maxX
&& mrnSrRect.minY < pnPipeSearchPoint.y
&& pnPipeSearchPoint.y < mrnSrRect.maxY)
if (mrnRect.minX < pnPipeSearchPoint.x
&& pnPipeSearchPoint.x < mrnRect.maxX
&& mrnRect.minY < pnPipeSearchPoint.y
&& pnPipeSearchPoint.y < mrnRect.maxY)
{
//若检测到目标则重置管道待删除标记pPipe->bLost清零
memset(ptTarget, 0, sizeof(TARGET_OBJECT));

@ -48,63 +48,39 @@ void ARIDLL_EOArithInitWithMode(ArithHandle hArith, int nWidth, int nHeight, GD_
{
return ((Arith_EOController*)hArith)->Arith_SystemInit(nWidth, nHeight, nSysMode, nScenMode);
}
int save_Y16(ArithHandle h,GD_VIDEO_FRAME_S img, const int width, const int height, const int unFrmId)
{
const std::string path = "./Neolog/Ori_Y16_";
// 将Y16数据保存为二进制文件
const std::string filePath = path + std::to_string(unFrmId) + ".yuv";
std::ofstream outFile(filePath, std::ios::binary);
if (!outFile.is_open())
{
LOG_DEBUG("无法创建文件");
std::cerr << "无法创建文件: " << filePath << std::endl;
return -1;
}
outFile.write(reinterpret_cast<const char*>(img.u64VirAddr[0]), width * height * sizeof(uint16_t));
outFile.close();
LOG_DEBUG("Y16图像数据已保存到:{}", filePath);
std::cout << "Y16图像数据已保存到: " << filePath << std::endl;
return 0;
}
int read_y16(Arith_EOController* pArith, GD_VIDEO_FRAME_S img, const int width, const int height, const int unFrmId)
{
// 创建OpenCV Mat对象存储Y16数据
cv::Mat y16Image(height, width, CV_16UC1, img.u64VirAddr[0]);
// 将Y16数据归一化到0-255范围转换为8位
cv::Mat y8Image;
double minVal, maxVal;
cv::minMaxLoc(y16Image, &minVal, &maxVal); // 找到最小值和最大值
y16Image.convertTo(y8Image, CV_8UC1, 255.0 / maxVal); // 归一化并转换为8位
return 0;
}
bool read_config(Arith_EOController* pArith, const char* path)
{
//读配置文件
char resPath[256] = { 0 };
static std::string name = "init";
snprintf(resPath, 256, "%s/%s", path, name.c_str());
std::string configpath = std::string(path) + "/ArithPara.json";
bool read_state = false;
if (ACCESS(resPath, 0) == 0 && "reset" == name)
{
std::cout << "name = " << name << std::endl;
LOG_DEBUG("name = :{}", name);
read_state = ARIDLL_ReadSetParamFile(pArith, configpath.c_str());
name = "init";
}
if (ACCESS(resPath, 0) == 0 && "init" == name)
{
std::cout << "name = " << name << std::endl;
LOG_DEBUG("name = :{}", name);
read_state = ARIDLL_ReadSetParamFile(pArith, configpath.c_str());
name = "reset";
}
return read_state;
}
//int save_Y16(ArithHandle h,GD_VIDEO_FRAME_S img, const int width, const int height, const int unFrmId)
//{
// const std::string path = "./Neolog/Ori_Y16_";
// // 将Y16数据保存为二进制文件
// const std::string filePath = path + std::to_string(unFrmId) + ".yuv";
//
// std::ofstream outFile(filePath, std::ios::binary);
// if (!outFile.is_open())
// {
// LOG_DEBUG("无法创建文件");
// std::cerr << "无法创建文件: " << filePath << std::endl;
// return -1;
// }
//
// outFile.write(reinterpret_cast<const char*>(img.u64VirAddr[0]), width * height * sizeof(uint16_t));
// outFile.close();
// LOG_DEBUG("Y16图像数据已保存到:{}", filePath);
// std::cout << "Y16图像数据已保存到: " << filePath << std::endl;
//
// return 0;
//}
//int read_y16(Arith_EOController* pArith, GD_VIDEO_FRAME_S img, const int width, const int height, const int unFrmId)
//{
// // 创建OpenCV Mat对象存储Y16数据
// cv::Mat y16Image(height, width, CV_16UC1, img.u64VirAddr[0]);
//
// // 将Y16数据归一化到0-255范围转换为8位
// cv::Mat y8Image;
// double minVal, maxVal;
// cv::minMaxLoc(y16Image, &minVal, &maxVal); // 找到最小值和最大值
// y16Image.convertTo(y8Image, CV_8UC1, 255.0 / maxVal); // 归一化并转换为8位
// return 0;
//}
int ARIDLL_RunController(ArithHandle hArithSrc, GD_VIDEO_FRAME_S img, ARIDLL_INPUTPARA stInputPara, ARIDLL_OUTPUT* pstOutput)
{
//算法输入信息整理【调试】
@ -121,14 +97,6 @@ int ARIDLL_RunController(ArithHandle hArithSrc, GD_VIDEO_FRAME_S img, ARIDLL_INP
// 获取算法指针
Arith_EOController* pArith = (Arith_EOController*)hArithSrc;
/*自行根据路径下文件状态读配置文件3588耗时读取配置文件约2ms不读取配置文件几乎不耗时*/
// auto start = std::chrono::steady_clock::now();
//bool state = read_config(pArith, "/nfsroot/hyc/config");
// 记录结束时间
//auto end = std::chrono::steady_clock::now();
// 计算耗时(毫秒)
//auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
//std::cout << "耗时: " << duration << " 毫秒"<<" state = "<< state << std::endl;
//每秒存1帧Y16 ---- 3315测试
/*if (0 == stInputPara.unFrmId % GLB_FRM_FREQ)
{
@ -448,7 +416,8 @@ void ARIDLL_Output(Arith_EOController* pArith, ARIDLL_OUTPUT* pstOutput)
// 返回跟踪器状态
pstOutput->nStatus = pArith->g_GLB_stPara.nStatus;
LOG_DEBUG("pstOutput:pstOutput->nFrmNum:{},pstOutput->nSysMode:{}, pstOutput->nStatus:{}",
pstOutput->nFrmNum,pstOutput->nSysMode,pstOutput->nStatus);
// 目标输出,统一由管道牵头输出
ARIDLL_OutputPipeTarget(pArith, pstOutput);
}
@ -680,10 +649,10 @@ void ARIDLL_OutputPipeTarget(Arith_EOController* pArith, ARIDLL_OUTPUT* pstOutpu
{
memcpy(pt_detObj, &obj, sizeof(ARIDLL_OBJINFO));
int nAlaNum = pstOutput->nAlarmObjCnts;
LOG_DEBUG("pstOutput->stAlarmObjs:{},nOutputID:{}, XYWH:[{} {} {} {}],nPipeLostCnt:{}, fAz:{},fPt:{},unClsType:{}",
/*LOG_DEBUG("pstOutput->stAlarmObjs:{},nOutputID:{}, XYWH:[{} {} {} {}],nPipeLostCnt:{}, fAz:{},fPt:{},unClsType:{}",
pstOutput->nAlarmObjCnts, pstOutput->stAlarmObjs[nAlaNum].nOutputID, (int)pstOutput->stAlarmObjs[nAlaNum].nX, (int)pstOutput->stAlarmObjs[nAlaNum].nY,
(int)pstOutput->stAlarmObjs[nAlaNum].nObjW, (int)pstOutput->stAlarmObjs[nAlaNum].nObjH, pstOutput->stAlarmObjs[nAlaNum].nPipeLostCnt,
pstOutput->stAlarmObjs[nAlaNum].fAz, pstOutput->stAlarmObjs[nAlaNum].fPt, pstOutput->stAlarmObjs[nAlaNum].unClsType);
pstOutput->stAlarmObjs[nAlaNum].fAz, pstOutput->stAlarmObjs[nAlaNum].fPt, pstOutput->stAlarmObjs[nAlaNum].unClsType);*/
pstOutput->nAlarmObjCnts++;
}
}
@ -888,10 +857,4 @@ STD_TRACKER_API int ARIDLL_Sort_PipeByDistance(ArithHandle hArithSrc, ARIDLL_OUT
}
}
return Index;
}
STD_TRACKER_API bool ARIDLL_Read_Config(ArithHandle hArithSrc, const char* ConfigPath)
{
Arith_EOController* pArith = (Arith_EOController*)hArithSrc;
bool read_state = read_config(pArith, ConfigPath);
return read_state;
}

@ -122,9 +122,26 @@ void RunNeoTracker(std::vector<VideoStream*> streamVec)
int frameID = (int)m_stream->GetCurrentFrameIndex();
short frameRate = (short)m_stream->GetFrameRate();
unsigned char* pImage = m_stream->GetFrameImageBuffer();
unsigned char* pArithInPut = m_stream->GetParamDataBuffer();
unsigned char* pArithOut = m_stream->GetResultDataBuffer();
//for (std::size_t i = 0; i < 1280; ++i)
//{
// // 每16个字节换行
// if (i % 16 == 0)
// {
// if (i != 0)
// {
// std::cout << std::endl;
// }
// // 打印地址
// std::cout << std::hex << std::setw(8) << std::setfill('0')
// << reinterpret_cast<std::uintptr_t>(pArithInPut + i) << ": ";
// }
// // 打印当前字节
// std::cout << std::hex << std::setw(2) << std::setfill('0')
// << static_cast<int>(pArithInPut[i]) << " ";
//}
// 像素类型
GD_PIXEL_FORMAT_E nPixelType = GD_PIXEL_FORMAT_GRAY_Y16;
if (m_stream->GetPixelType() == PT_RGB24)
@ -276,6 +293,16 @@ void RunNeoTracker(std::vector<VideoStream*> streamVec)
memcpy(&m_stOutput, &m_pParamData[5 * 2880], sizeof(ARIDLL_OUTPUT));//读取可见光录像中算法输入
}
if (m_stream->GetProjectName() == PROJECT::S3315_IR)
{
ARIDLL_INPUTPARA stInputPara = { 0 };
BYTE* m_pParamData = m_stream->GetParamDataBuffer();
memset(&m_stInputPara, 0, sizeof(ARIDLL_INPUTPARA));
memset(&m_stOutput, 0, sizeof(ARIDLL_OUTPUT));
S3315GetImagePara(m_pParamData, &stInputPara);
//memcpy(&m_stInputPara, &stInputPara, sizeof(ARIDLL_INPUTPARA));
//memcpy(&m_stInputPara, m_pParamData, sizeof(ARIDLL_INPUTPARA));//读取可见光录像中算法输入
}
// 如果是标准GD格式直接转子类读取参数
if (m_stream->GetStreamType() == eDSST_GDFILE)

@ -8,6 +8,7 @@
#include "Project/S338.h"
#include "Project/S335.h"
#include "Project/S731V2.h"
#include "Project/S3315.h"
#include "GDFileStream.h"
#include "debugExport.h"
// 日志

@ -117,7 +117,7 @@ void QArithModule::ArithRun()
}
else if(cmd == 2)
{
ARIDLL_unLockCommand_3315(pEOTracker,x,y,0);//点选解锁
ARIDLL_unLockCommand(pEOTracker);//点选解锁
}
setting.endArray();

@ -23,6 +23,7 @@ QRawFileConfig::QRawFileConfig(QWidget *parent) :
pButtonGroup->addButton(ui->rbProj_S3312_VL, PROJECT::S3312_VL);
pButtonGroup->addButton(ui->rbProj_S285_IR, PROJECT::S285_IR);
pButtonGroup->addButton(ui->rbProj_S285_VL, PROJECT::S285_VL);
pButtonGroup->addButton(ui->rbProj_S3315_IR, PROJECT::S3315_IR);
// 绑定切换数据类型
auto rbList = this->findChildren<QRadioButton*>();
for (int i = 0; i < rbList.size(); i++)
@ -94,6 +95,14 @@ void QRawFileConfig::switchProj()
ui->VideoFPS->setText("50");
ui->comboBoxPXtype->setCurrentIndex(5);
break;
case PROJECT::S3315_IR:
ui->Edit_ParaLineCnt->setText("1");
ui->ImageWidth->setText("640");
ui->ImageHeight->setText("512");
ui->ArithResutLine->setText("0");
ui->VideoFPS->setText("50");
//ui->comboBoxPXtype->setCurrentIndex(5);
break;
default:
break;
}
@ -144,7 +153,10 @@ void QRawFileConfig::readPara()
{
m_rawPara.cProjName = PROJECT::S285_VL;
}
if (ui->rbProj_S3315_IR->isChecked())
{
m_rawPara.cProjName = PROJECT::S3315_IR;
}
m_rawPara.nParamLineCnt = ui->Edit_ParaLineCnt->text().toInt();
m_rawPara.ushWidth = ui->ImageWidth->text().toInt();
m_rawPara.ushHeight = ui->ImageHeight->text().toInt();
@ -216,7 +228,10 @@ void QRawFileConfig::readDefaultSetings(QString path)
{
ui->rbProj_S285_VL->setChecked(1);
}
if (pro == "S3315_IR")
{
ui->rbProj_S3315_IR->setChecked(1);
}
ui->Edit_ParaLineCnt->setText(setting.value("ParaLineCnt").toString());
ui->ImageWidth->setText(setting.value("ImageWidth").toString());
ui->ImageHeight->setText(setting.value("ImageHeight").toString());
@ -285,6 +300,10 @@ void QRawFileConfig::saveDefaultSetings(QString path)
{
setting.setValue("ProjName", "S285_VL");
}
else if (ui->rbProj_S3315_IR->isChecked())
{
setting.setValue("ProjName", "S3315_IR");
}
else
{
setting.setValue("ProjName", "None");

@ -241,6 +241,13 @@
</property>
</widget>
</item>
<item row="4" column="1">
<widget class="QRadioButton" name="rbProj_S3315_IR">
<property name="text">
<string>S3315_IR</string>
</property>
</widget>
</item>
<item row="5" column="0">
<widget class="QRadioButton" name="rbProj_S731_V2">
<property name="text">

@ -98,7 +98,10 @@ bool RawFileStream::Open(const string & strStreamName)
}
m_lFrameDataSize = m_lParamDataSize + m_lImageDataSize + m_lArithReslutDataSize;
//if (m_ptPixelType == PT_Y16DATA && PROJECT::S3315_IR == m_ProjectName)
//{
// m_lFrameDataSize = m_lParamDataSize + m_lImageDataSize + m_lArithReslutDataSize + 640 * 510;
//}
m_lFrameCount = long(m_binFile.tellg() / m_lFrameDataSize);
@ -242,7 +245,13 @@ bool RawFileStream::ReadFrame(BYTE* pFrameData, BYTE* pImageData, BYTE* pParamDa
{
}
if (m_ptPixelType == PT_Y16DATA && PROJECT::S3315_IR == m_ProjectName)
{
//S3315存储数据格式为640*512*2--Y16 + 1280 -- 参数行 + 640 * 510 -- Y8
memcpy(pParamData, pFrameData + m_lArithReslutDataSize + m_lImageDataSize, m_lParamDataSize);
memcpy(pImageData, pFrameData + m_lArithReslutDataSize, m_lImageDataSize);
//memcpy(pResultData, pFrameData + m_lArithReslutDataSize + m_lImageDataSize, m_lParamDataSize);
}
unsigned short* pSrc = (unsigned short*)pImageData;

@ -57,6 +57,7 @@ typedef enum tagPROJECT
S3312_VL = 6,
S285_IR = 7,
S285_VL = 8,
S3315_IR = 9,
}PROJECT;
// 获取文件后缀名

Loading…
Cancel
Save