You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

261 lines
8.3 KiB

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

// 单目标对地跟踪流程测试:将TLD从算法中剥离到外部导致API调用形式调整
// 读取avi视频进行测试
#include "NeoArithStandardDll.h"
#include "S3312.h" // 解析3312参数行数据需要包含
#include <iostream>
#include <memory>
#include <string.h>
#include <algorithm>
#include <thread>
#include "opencv2/opencv.hpp"
#define TEST_WITH_AID 0 // 是否使用AI Detect
#define TEST_WITH_AIT 0 // 是否使用AI Tracker
#define VOT_RECTANGLE
#define VOT_IR
#include "vot.h"
#if TEST_WITH_AID
#include "Arith_YOLO_Detect.h"
#endif
#if TEST_WITH_AIT
#include "Arith_AITracker.h"
#endif
using std::cout;
using std::endl;
short SelectCX = 0;
short SelectCY = 0;
unsigned short setLockBoxW = 0;
unsigned short setLockBoxH = 0;
int m_ArithName = 3312; // 针对S3312可见光测试数据
// 算法输入部分
ARIDLL_INPUTPARA stInputPara = { 0 };
// 算法输出部分
ARIDLL_OUTPUT stOutput = { 0 };
// 构建图像输入
GD_VIDEO_FRAME_S img = { 0 };
// S3312参数行解析结构体
Commdef::TParaWriteBackToFpga g_S3312_Para = { 0 }; // S3312录像参数临时变量
// AI Detect算法结果
#if TEST_WITH_AID
obj_res* g_pGLB_AIDetOutput = NULL;
int g_GLB_AIDetNum = 0;
#endif
// AI Tracker算法结果
#if TEST_WITH_AIT
API_AI_Tracker* g_GLB_AITracker = NULL;
AIT_OUTPUT g_GLB_AITrackOutput = { 0 };
#endif
#if TEST_WITH_AID
static void AIDetRun(ArithHandle pTracker, GD_VIDEO_FRAME_S img, int frameID)
{
// 异步调用考虑机器上传输延时注意异步方式结果天然缓1帧。
Async_YOLO_DetectTarget(img.u64VirAddr[0], img.u32Width, img.u32Height, frameID);
g_pGLB_AIDetOutput = Async_YOLO_GetTargetArray(g_GLB_AIDetNum);
int targetNum = 0;
TARGET_OBJECT* pArray = ARIDLL_SearchFrameTargets(pTracker, img, &targetNum);
int mergeNum = ARIDLL_MergeAITargets(pTracker, pArray, targetNum, g_pGLB_AIDetOutput, g_GLB_AIDetNum);
stInputPara.nInputTargetNum = mergeNum;
memcpy(stInputPara.stInputTarget, pArray, sizeof(TARGET_OBJECT) * mergeNum);
}
#endif
#if TEST_WITH_AIT
static int AITrackerRun(GD_VIDEO_FRAME_S img, int frameID)
{
// 从传统算法输出中获取AI跟踪器的控制指令
CENTERRECT32F InitBox = stOutput.stAI_TkCmd.InitBox;
CENTERRECT32F TargetBox = stOutput.stAI_TkCmd.TargetBox;
if (InitBox.w > 0 && InitBox.h > 0)
{
g_GLB_AITracker->init(img, InitBox);
return 0;
}
if (!stOutput.stAI_TkCmd.bTrack)
{
g_GLB_AITracker->stopTrack();
memset(&stInputPara.stAITrackerInfo, 0, sizeof(AIT_OUTPUT));
memset(&g_GLB_AITrackOutput, 0, sizeof(AIT_OUTPUT));
return 0;
}
g_GLB_AITracker->Track(img, TargetBox);
// 获取跟踪结果
g_GLB_AITracker->getTrackResult_Async(&g_GLB_AITrackOutput);
// 向传统算法传参
memcpy(&stInputPara.stAITrackerInfo, &g_GLB_AITrackOutput, sizeof(AIT_OUTPUT));
return 0;
}
#endif
static void RunProcess(ArithHandle pTracker, GD_VIDEO_FRAME_S img)
{
#if TEST_WITH_AID
// 运行AI识别算法
AIDetRun(pTracker, img, stInputPara.unFrmId);
#endif
#if TEST_WITH_AIT
// 运行SiamRPN跟踪算法
AITrackerRun(img, stInputPara.unFrmId);
#endif
// 调用小面目标检测
int targetNum = ARIDLL_SearchFrameTargets(pTracker, img);
// 运行算法主控逻辑API
ARIDLL_RunController(pTracker, img, stInputPara, &stOutput);
}
int main()
{
VOT vot;
cv::Rect initialization;
int nParamLineCnt2 = 3; // 参数行数
int nImageWidth = 1920; // 图像宽度
int nImageHeight = 1080; // 图像高度
int nHeightWithParm = nImageHeight + nParamLineCnt2; // 叠加参数行后图像高度
int m_lImageDataSize = nImageWidth * nImageHeight; //图像大小,不包括参数行
int m_lArithReslutDataSize = nImageWidth * 2 * nParamLineCnt2; // 帧后参数占用空间(包括算法结果以及录像参数行)
///////////////////输入图像处理//////////////////////////////
// 算法库的输入数据类型PIXELTYPE需要定位为unsigned short
cv::Mat yuvWithparm, yuv, m_gray, dst_down;
cv::Mat parm = cv::Mat(3, 1920, CV_8UC2);
yuvWithparm = cv::Mat(1080 + 3, 1920, CV_8UC2);
yuv = cv::Mat(1080, 1920, CV_8UC2);
// 创建算法句柄
ArithHandle pTracker = STD_CreatEOArithHandle();
#if TEST_WITH_AID
// AI检测初始化
//YOLO_Init();
Async_YOLO_Init();
#endif
#if TEST_WITH_AIT
// AI跟踪器初始化
g_GLB_AITracker = API_AI_Tracker::Create(AITrackerType::DaSaimRPN);
g_GLB_AITracker->loadModel();
memset(&g_GLB_AITrackOutput, 0, sizeof(AIT_OUTPUT));
#endif
// 初始化为凝视-对空模式
ARIDLL_EOArithInitWithMode(pTracker, 640, 512, GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y8,
GLB_SYS_MODE::GLB_SYS_STARE, GLB_SCEN_MODE::GLB_SCEN_SKY);
std::string configFilePath = std::string(SOURCE_PATH) + "/NeoTracker/vot_test/ArithParaVL.json";
ARIDLL_ReadSetParamFile(pTracker, configFilePath.c_str());
stInputPara.unFreq = 50;
stInputPara.stAirCraftInfo.stAtt.fYaw = 0;
stInputPara.stAirCraftInfo.stAtt.fRoll = 0;
stInputPara.stAirCraftInfo.stAtt.fPitch = 0;
stInputPara.stServoInfo.fServoAz = 0;
stInputPara.stServoInfo.fServoPt = 0;
stInputPara.stCameraInfo.fPixelSize = 15;
stInputPara.stCameraInfo.nFocus = 600;
stInputPara.unFrmId = 0;
// 模拟算法执行流程
while(!vot.end())
{
string imagepath = vot.frame();
if (imagepath.empty())
{
break;
}
ifstream inFile(imagepath, ios::in | ios::binary);
inFile.read((char*)yuvWithparm.data, 2 * nHeightWithParm * nImageWidth);// 所有的单帧数据(包括图像和参数行)
memcpy(yuv.data, yuvWithparm.data, m_lImageDataSize * 2); // 图像数据
cv::Mat image_part = yuv(cv::Rect(320, 28, 1280, 1024)); // 裁剪后的图
cv::cvtColor(image_part, m_gray, cv::COLOR_YUV2GRAY_UYVY); // 转GRAY
cv::pyrDown(m_gray, dst_down, cv::Size(image_part.cols / 2, image_part.rows / 2)); // 2倍降采样
// 构建图像类型
img.enPixelFormat = GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y8;
img.u32Width = dst_down.cols;
img.u32Height = dst_down.rows;
img.u32Stride[0] = img.u32Width;
img.u64VirAddr[0] = dst_down.data;
if (3312 == m_ArithName)
{
memcpy(&g_S3312_Para, yuvWithparm.data + 1080 * 1920 * 2, m_lArithReslutDataSize); // 参数行数据
Commdef::TServoInfoOutput* tServoInfo = &g_S3312_Para.stTrackResultInfo.tServoAndPosInfo.tServoInfo;
stInputPara.unFreq = 50;
stInputPara.stAirCraftInfo.stAtt.fYaw = 0;
stInputPara.stAirCraftInfo.stAtt.fRoll = 0;
stInputPara.stAirCraftInfo.stAtt.fPitch = 0;
stInputPara.stCameraInfo.unVideoType = GLB_VIDEO_TYPE::GLB_VIDEO_VL;
stInputPara.stCameraInfo.fPixelSize = 10;
stInputPara.stCameraInfo.nFocus = g_S3312_Para.stTrackResultInfo.usFocus;
stInputPara.stCameraInfo.fAglReso = 2 * 0.0001f * g_S3312_Para.stTrackResultInfo.usResol; // 角分辨率
stInputPara.stServoInfo.fServoAz = tServoInfo->fServoPtAngle;
stInputPara.stServoInfo.fServoPt = -tServoInfo->fServoAzAngle;
stInputPara.stServoInfo.fServoAzSpeed = 0;
stInputPara.stServoInfo.fServoPtSpeed = 0;
}
if (6 == stInputPara.unFrmId)
{
initialization << vot.region();
// 下发锁定
SelectCX = initialization.x + initialization.width / 2;
SelectCY = initialization.y + initialization.height / 2;
setLockBoxW = initialization.width;
setLockBoxH = initialization.height;
ARIDLL_OBJINFO obj = { 0 };
obj = ARIDLL_LockTarget(pTracker, img, SelectCX, SelectCY, setLockBoxW, setLockBoxH);
}
stInputPara.unFrmId++;
RunProcess(pTracker, img);
auto trackerOut = stOutput.stTrackers[0];
cv::Rect outRect;
outRect.width = (int)trackerOut.nObjW;
outRect.height= (int)trackerOut.nObjH;
outRect.x = (int)trackerOut.nX - outRect.width / 2;
outRect.y = (int)trackerOut.nY - outRect.height / 2;
vot.report(outRect, trackerOut.fConf);
}
return 0;
}