You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

355 lines
12 KiB

// 单目标对地跟踪流程测试:将TLD从算法中剥离到外部导致API调用形式调整
// 读取avi视频进行测试
#include "NeoArithStandardDll.h"
#include "S3312.h" // 解析3312参数行数据需要包含
#include <tchar.h>
#include <windows.h>
#include <iostream>
#include <fstream>
#include <memory>
#include <string.h>
#include <algorithm>
#include <thread>
#include "opencv2/opencv.hpp"
using namespace std;
using namespace cv;
#define TEST_WITH_AID 0 // 是否使用AI Detect
#define TEST_WITH_AIT 0 // 是否使用AI Tracker
#if TEST_WITH_AID
#include "Arith_YOLO_Detect.h"
#endif
#if TEST_WITH_AIT
#include "Arith_AITracker.h"
#endif
unsigned short bcmd = 0;
short SelectCX = 0;
short SelectCY = 0;
unsigned short setLockBoxW = 0;
unsigned short setLockBoxH = 0;
bool draw = false;
cv::Point cursor = cv::Point(0, 0);
cv::Rect rect;
int key;
cv::Mat frame, dstImage;
//控制视频流
VideoCapture capture;
const char* windowName = "test"; //播放窗口名称
const char* trackBarName = "Progress"; //trackbar控制条名称
double totalFrame = 1.0; //视频总帧数
double currentFrame = 1.0; //当前播放帧
int trackbarValue = 1; //trackbar控制量
int trackbarMax = 255; //trackbar控制条最大值
double frameRate = 1.0; //视频帧率
double controlRate = 0.1;
int frameValue = 0;
// 算法输入部分
ARIDLL_INPUTPARA stInputPara = { 0 };
// 算法输出部分
ARIDLL_OUTPUT stOutput = { 0 };
// 构建图像输入
GD_VIDEO_FRAME_S img = { 0 };
// S3312参数行解析结构体
Commdef::TParaWriteBackToFpga g_S3312_Para = { 0 }; // S3312录像参数临时变量
// AI Detect算法结果
#if TEST_WITH_AID
obj_res* g_pGLB_AIDetOutput = NULL;
int g_GLB_AIDetNum = 0;
#endif
// AI Tracker算法结果
#if TEST_WITH_AIT
API_AI_Tracker* g_GLB_AITracker = NULL;
AIT_OUTPUT g_GLB_AITrackOutput = { 0 };
#endif
//控制条回调函数
static void TrackBarFunc(int, void(*))
{
controlRate = (double)trackbarValue / trackbarMax * totalFrame; //trackbar控制条对视频播放进度的控制
capture.set(CAP_PROP_POS_FRAMES, controlRate); //设置当前播放帧
frameValue = controlRate;
}
static void onMouse(int event, int x, int y, int flags, void* param)
{
//flags定义#define CV_EVENT_FLAG_LBUTTON 1 左鍵拖曳
switch (event)
{
case cv::EVENT_LBUTTONDOWN: // 鼠标左键按下
SelectCX = x;
SelectCY = y;
cursor = cv::Point(x, y);
rect = cv::Rect(x, y, 0, 0);
draw = true;
break;
case cv::EVENT_LBUTTONUP: //鼠标左键松开
setLockBoxW = rect.width;
setLockBoxH = rect.height;
SelectCX = rect.x + rect.width / 2;
SelectCY = rect.y + rect.height / 2;
bcmd = 1;
draw = false;
break;
case cv::EVENT_RBUTTONDOWN: //鼠标右键按下
bcmd = 2;
break;
case cv::EVENT_RBUTTONUP: //鼠标右键松开
break;
case cv::EVENT_MBUTTONUP://左键双击
bcmd = 1;
break;
case cv::EVENT_MOUSEMOVE: //移动光标
if (draw)
{
cv::Mat dstImage1 = frame.clone();
rect.x = MIN(x, cursor.x);
rect.y = MIN(y, cursor.y);
rect.width = abs(cursor.x - x);
rect.height = abs(cursor.y - y);
rectangle(dstImage1, rect, cv::Scalar(0, 0, 255), 1);
cv::imshow("test", dstImage1);
}
break;
}
}
#if TEST_WITH_AID
static void AIDetRun(ArithHandle pTracker, GD_VIDEO_FRAME_S img, int frameID)
{
// 异步调用考虑机器上传输延时注意异步方式结果天然缓1帧。
Async_YOLO_DetectTarget(img.u64VirAddr[0], img.u32Width, img.u32Height, frameID);
g_pGLB_AIDetOutput = Async_YOLO_GetTargetArray(g_GLB_AIDetNum);
int targetNum = 0;
TARGET_OBJECT* pArray = ARIDLL_SearchFrameTargets(pTracker, img, &targetNum);
int mergeNum = ARIDLL_MergeAITargets(pTracker, pArray, targetNum, g_pGLB_AIDetOutput, g_GLB_AIDetNum);
stInputPara.nInputTargetNum = mergeNum;
memcpy(stInputPara.stInputTarget, pArray, sizeof(TARGET_OBJECT) * mergeNum);
}
#endif
#if TEST_WITH_AIT
static int AITrackerRun(GD_VIDEO_FRAME_S img, int frameID)
{
// 从传统算法输出中获取AI跟踪器的控制指令
CENTERRECT32F InitBox = stOutput.stAI_TkCmd.InitBox;
CENTERRECT32F TargetBox = stOutput.stAI_TkCmd.TargetBox;
if (InitBox.w > 0 && InitBox.h > 0)
{
g_GLB_AITracker->init(img, InitBox);
return 0;
}
if (!stOutput.stAI_TkCmd.bTrack)
{
g_GLB_AITracker->stopTrack();
memset(&stInputPara.stAITrackerInfo, 0, sizeof(AIT_OUTPUT));
memset(&g_GLB_AITrackOutput, 0, sizeof(AIT_OUTPUT));
return 0;
}
g_GLB_AITracker->Track(img, TargetBox);
g_GLB_AITracker->getTrackResult_Async(&g_GLB_AITrackOutput);
// 向传统算法传参
memcpy(&stInputPara.stAITrackerInfo, &g_GLB_AITrackOutput, sizeof(AIT_OUTPUT));
return 0;
}
#endif
static void RunProcess(ArithHandle pTracker, GD_VIDEO_FRAME_S img)
{
#if TEST_WITH_AID
// 运行AI识别算法
AIDetRun(pTracker, img, stInputPara.unFrmId);
#endif
#if TEST_WITH_AIT
// 运行SiamRPN跟踪算法
AITrackerRun(img, stInputPara.unFrmId);
#endif
// 调用小面目标检测
int targetNum = ARIDLL_SearchFrameTargets(pTracker, img);
// 运行算法主控逻辑API
ARIDLL_RunController(pTracker, img, stInputPara, &stOutput);
}
int main()
{
string video_fileName = "F:\\Datasets\\20240327_S3312\\20240605_114245_可见光面目标\\0_1920x1080_ParamLine3_Fps25_UYVY422.yuv";
ifstream inFile(video_fileName, ios::in | ios::binary);
int nParamLineCnt2 = 3; // 参数行数
int nImageWidth = 1920; // 图像宽度
int nImageHeight = 1080; // 图像高度
int nHeightWithParm = nImageHeight + nParamLineCnt2; // 叠加参数行后图像高度
int m_lImageDataSize = nImageWidth * nImageHeight; //图像大小,不包括参数行
int m_lArithReslutDataSize = nImageWidth * 2 * nParamLineCnt2; // 帧后参数占用空间(包括算法结果以及录像参数行)
cv::Mat yuvWithparm, yuv, m_gray, dst_down;
yuvWithparm = cv::Mat(1080 + 3, 1920, CV_8UC2);
yuv = cv::Mat(1080, 1920, CV_8UC2);
// 创建算法句柄
ArithHandle pTracker = STD_CreatEOArithHandle();
#if TEST_WITH_AID
// AI检测初始化
//YOLO_Init();
Async_YOLO_Init();
#endif
#if TEST_WITH_AIT
// AI跟踪器初始化
//CreatSiamRPN();
//Async_CreatSiamRPN();
// AI跟踪器初始化
g_GLB_AITracker = API_AI_Tracker::Create(AITrackerType::DaSaimRPN);
g_GLB_AITracker->loadModel();
memset(&g_GLB_AITrackOutput, 0, sizeof(AIT_OUTPUT));
#endif
// 初始化为凝视-对地模式
ARIDLL_EOArithInitWithMode(pTracker, 640, 512, GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y8,
GLB_SYS_MODE::GLB_SYS_STARE, GLB_SCEN_MODE::GLB_SCEN_SKY);
std::string configFilePath = std::string(SOURCE_PATH) + "/NeoTracker/vot_test/ArithParaVL.json";
ARIDLL_ReadSetParamFile(pTracker, configFilePath.c_str());
stInputPara.unFreq = frameRate;
stInputPara.stAirCraftInfo.stAtt.fYaw = 0;
stInputPara.stAirCraftInfo.stAtt.fRoll = 0;
stInputPara.stAirCraftInfo.stAtt.fPitch = 0;
stInputPara.stServoInfo.fServoAz = 0;
stInputPara.stServoInfo.fServoPt = 0;
stInputPara.stCameraInfo.fPixelSize = 15;
stInputPara.stCameraInfo.nFocus = 600;
stInputPara.unFrmId = 0;
while(1)
{
stInputPara.unFrmId++;
inFile.read((char*)yuvWithparm.data, 2 * nHeightWithParm * nImageWidth);// 所有的单帧数据(包括图像和参数行)
memcpy(yuv.data, yuvWithparm.data, m_lImageDataSize * 2); // 图像数据
cv::Mat image_part = yuv(cv::Rect(320, 28, 1280, 1024)); // 裁剪后的图
cv::cvtColor(image_part, m_gray, cv::COLOR_YUV2GRAY_UYVY); // 转GRAY
cv::pyrDown(m_gray, dst_down, cv::Size(image_part.cols / 2, image_part.rows / 2)); // 2倍降采样
// 构建图像类型
img.enPixelFormat = GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y8;
img.u32Width = dst_down.cols;
img.u32Height = dst_down.rows;
img.u32Stride[0] = img.u32Width;
img.u64VirAddr[0] = dst_down.data;
memcpy(&g_S3312_Para, yuvWithparm.data + 1080 * 1920 * 2, m_lArithReslutDataSize); // 参数行数据
Commdef::TServoInfoOutput* tServoInfo = &g_S3312_Para.stTrackResultInfo.tServoAndPosInfo.tServoInfo;
stInputPara.unFreq = 50;
stInputPara.stAirCraftInfo.stAtt.fYaw = 0;
stInputPara.stAirCraftInfo.stAtt.fRoll = 0;
stInputPara.stAirCraftInfo.stAtt.fPitch = 0;
stInputPara.stCameraInfo.unVideoType = GLB_VIDEO_TYPE::GLB_VIDEO_VL;
stInputPara.stCameraInfo.fPixelSize = 10;
stInputPara.stCameraInfo.nFocus = g_S3312_Para.stTrackResultInfo.usFocus;
stInputPara.stCameraInfo.fAglReso = 2 * 0.0001f * g_S3312_Para.stTrackResultInfo.usResol; // 角分辨率
stInputPara.stServoInfo.fServoAz = tServoInfo->fServoPtAngle;
stInputPara.stServoInfo.fServoPt = -tServoInfo->fServoAzAngle;
stInputPara.stServoInfo.fServoAzSpeed = 0;
stInputPara.stServoInfo.fServoPtSpeed = 0;
//点选转跟踪
if (1 == bcmd)
{
/* setLockBoxW = 65;
setLockBoxH = 31;
SelectCX = 1525 + setLockBoxW / 2;
SelectCY = 354 + setLockBoxH / 2;*/
//ARIDLL_LockCommand(pTracker, SelectCX, SelectCY, setLockBoxW, setLockBoxH);
// 调用该接口,可在当前帧完成锁定响应,不必缓一帧得到结果有利于运动目标锁定,同时将锁定决策结果返回,提供外部跟踪器同步锁定
// 主要原因是点选锁定时无宽高,波门设置等来源于传统跟踪器决策结果。
ARIDLL_OBJINFO obj = { 0 };
obj = ARIDLL_LockTarget(pTracker, img, SelectCX, SelectCY, setLockBoxW, setLockBoxH);
#if TEST_WITH_AIT
if (obj.nObjW > 0)
{
// 使用EOTracker的锁定决策初始化AI跟踪器
CENTERRECT32F initBox = { obj.nX,obj.nY, obj.nObjW, obj.nObjH };
g_GLB_AITracker->init(img, initBox);
// 获取跟踪结果
g_GLB_AITracker->getTrackResult_Async(&g_GLB_AITrackOutput);
}
#endif
}
//右键解锁退出跟踪状态
if (2 == bcmd)
{
// 解锁传统跟踪器
ARIDLL_unLockCommand(pTracker);
#if TEST_WITH_AIT
// 解锁AI跟踪器
g_GLB_AITracker->stopTrack();
#endif
}
RunProcess(pTracker, img);
dstImage = dst_down.clone();
auto trackerOut = stOutput.stTrackers[0];
cv::Rect outRect;
outRect.width = (int)trackerOut.nObjW;
outRect.height= (int)trackerOut.nObjH;
outRect.x = (int)trackerOut.nX - outRect.width / 2;
outRect.y = (int)trackerOut.nY - outRect.height / 2;
cv::rectangle(dstImage, outRect, cv::Scalar(0, 0, 255));
//cv::rectangle(dstImage, cv::Rect(g_GLB_AITrackOutput.fX - g_GLB_AITrackOutput.nObjW / 2,
// g_GLB_AITrackOutput.fY - g_GLB_AITrackOutput.nObjH / 2,
// g_GLB_AITrackOutput.nObjW,
// g_GLB_AITrackOutput.nObjH), cv::Scalar(0, 255, 255));
bcmd = 0;
SelectCX = 0;
SelectCY = 0;
setLockBoxW = 0;
setLockBoxH = 0;
cv::imshow("test", dstImage);
key = cv::waitKey(10);
}
return 0;
}