You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

320 lines
9.9 KiB

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

// 单目标对地跟踪流程测试:将TLD从算法中剥离到外部导致API调用形式调整
// 读取avi视频进行测试
#include "NeoArithStandardDll.h"
#include "S3312.h" // 解析3312参数行数据需要包含
#include "S338.h" // 解析338参数行数据需要包含
#include <iostream>
#include <memory>
#include <string.h>
#include <algorithm>
#include <thread>
#include "opencv2/opencv.hpp"
#define TEST_WITH_AID 0 // 是否使用AI Detect
#define TEST_WITH_AIT 0 // 是否使用AI Tracker
#define VOT_RECTANGLE
#define VOT_IR
#include "vot.h"
#if TEST_WITH_AID
#include "Arith_YOLO_Detect.h"
#endif
#if TEST_WITH_AIT
#include "Arith_AITracker.h"
#endif
using std::cout;
using std::endl;
short SelectCX = 0;
short SelectCY = 0;
unsigned short setLockBoxW = 0;
unsigned short setLockBoxH = 0;
int m_ArithName = 338; // 项目对应的红外测试数据338,3312
// 算法输入部分
ARIDLL_INPUTPARA stInputPara = { 0 };
// 算法输出部分
ARIDLL_OUTPUT stOutput = { 0 };
// 构建图像输入
GD_VIDEO_FRAME_S img = { 0 };
// S3312参数行解析结构体
Commdef::TParaWriteBackToFpga g_S3312_Para = { 0 }; // S3312录像参数临时变量
// AI Detect算法结果
#if TEST_WITH_AID
obj_res* g_pGLB_AIDetOutput = NULL;
int g_GLB_AIDetNum = 0;
#endif
// AI Tracker算法结果
#if TEST_WITH_AIT
API_AI_Tracker* g_GLB_AITracker = NULL;
AIT_OUTPUT g_GLB_AITrackOutput = { 0 };
#endif
#if TEST_WITH_AID
static void AIDetRun(ArithHandle pTracker, GD_VIDEO_FRAME_S img, int frameID)
{
// 异步调用考虑机器上传输延时注意异步方式结果天然缓1帧。
Async_YOLO_DetectTarget(img.u64VirAddr[0], img.u32Width, img.u32Height, frameID);
g_pGLB_AIDetOutput = Async_YOLO_GetTargetArray(g_GLB_AIDetNum);
int targetNum = 0;
TARGET_OBJECT* pArray = ARIDLL_SearchFrameTargets(pTracker, img, &targetNum);
int mergeNum = ARIDLL_MergeAITargets(pTracker, pArray, targetNum, g_pGLB_AIDetOutput, g_GLB_AIDetNum);
stInputPara.nInputTargetNum = mergeNum;
memcpy(stInputPara.stInputTarget, pArray, sizeof(TARGET_OBJECT) * mergeNum);
}
#endif
#if TEST_WITH_AIT
static int AITrackerRun(GD_VIDEO_FRAME_S img, int frameID)
{
// 从传统算法输出中获取AI跟踪器的控制指令
CENTERRECT32F InitBox = stOutput.stAI_TkCmd.InitBox;
CENTERRECT32F TargetBox = stOutput.stAI_TkCmd.TargetBox;
if (InitBox.w > 0 && InitBox.h > 0)
{
g_GLB_AITracker->init(img, InitBox);
return 0;
}
if (!stOutput.stAI_TkCmd.bTrack)
{
g_GLB_AITracker->stopTrack();
memset(&stInputPara.stAITrackerInfo, 0, sizeof(AIT_OUTPUT));
memset(&g_GLB_AITrackOutput, 0, sizeof(AIT_OUTPUT));
return 0;
}
g_GLB_AITracker->Track(img, TargetBox);
// 获取跟踪结果
g_GLB_AITracker->getTrackResult_Async(&g_GLB_AITrackOutput);
// 向传统算法传参
memcpy(&stInputPara.stAITrackerInfo, &g_GLB_AITrackOutput, sizeof(AIT_OUTPUT));
return 0;
}
#endif
static void RunProcess(ArithHandle pTracker, GD_VIDEO_FRAME_S img)
{
#if TEST_WITH_AID
// 运行AI识别算法
AIDetRun(pTracker, img, stInputPara.unFrmId);
#endif
#if TEST_WITH_AIT
// 运行SiamRPN跟踪算法
AITrackerRun(img, stInputPara.unFrmId);
#endif
// 调用小面目标检测
int targetNum = ARIDLL_SearchFrameTargets(pTracker, img);
// 运行算法主控逻辑API
ARIDLL_RunController(pTracker, img, stInputPara, &stOutput);
}
#ifdef _WIN32 // Windows平台
#elif __linux__ // Linux平台
#include <unistd.h>
//检查文件(所有类型)是否存在
//-1:存在 0:不存在
int IsFileExist(const char* path)
{
return !access(path, F_OK);
}
#else
#error Unsupported platform
#endif
int main()
{
VOT vot;
cv::Rect initialization;
int nParamLineCnt2 = 4; // 参数行数
int nImageWidth = 512; // 图像宽度
int nImageHeight = 640; // 图像高度
if (338 == m_ArithName)
{
nParamLineCnt2 = 4;
nImageWidth = 512;
nImageHeight = 640;
}
else if (3312 == m_ArithName)
{
nParamLineCnt2 = 9;
nImageWidth = 640;
nImageHeight = 512;
}
int nHeightWithParm = nImageHeight + nParamLineCnt2; // 叠加参数行后图像高度
int m_lImageDataSize = nImageWidth * nImageHeight; //图像大小,不包括参数行
int m_lArithReslutDataSize = nImageWidth * 2 * nParamLineCnt2; // 帧后参数占用空间(包括算法结果以及录像参数行)
///////////////////输入图像处理//////////////////////////////
// 算法库的输入数据类型PIXELTYPE需要定位为unsigned short
cv::Mat srcImageWithPara(nHeightWithParm, nImageWidth, CV_16UC1);
cv::Mat srcImage(nImageHeight, nImageWidth, CV_16UC1);
// 创建算法句柄
ArithHandle pTracker = STD_CreatEOArithHandle();
#if TEST_WITH_AID
// AI检测初始化
//YOLO_Init();
Async_YOLO_Init();
#endif
#if TEST_WITH_AIT
// AI跟踪器初始化
g_GLB_AITracker = API_AI_Tracker::Create(AITrackerType::DaSaimRPN);
g_GLB_AITracker->loadModel();
memset(&g_GLB_AITrackOutput, 0, sizeof(AIT_OUTPUT));
#endif
// 初始化为凝视-对空模式
ARIDLL_EOArithInitWithMode(pTracker, nImageWidth, nImageHeight, GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y16,
GLB_SYS_MODE::GLB_SYS_STARE, GLB_SCEN_MODE::GLB_SCEN_SKY);
// 加载的参数文件需要跟随算法库一同更新
#ifdef _WIN32
std::string configFilePath = std::string(SOURCE_PATH) + "/NeoTracker/vot_test/ArithParaIR.json";
#else
std::string configFilePath = std::string(SOURCE_PATH) + "/NeoTracker/vot_test/ArithParaIR.json";
int isexist = IsFileExist(configFilePath.c_str());
if (0 == isexist)
{
// 用于自动化测试路径192.168.1.11自动化测试服务器模型路径
configFilePath = "/mnt/dataD/VotToolkit/trackers/Native/NeoTracker/ArithParaIR.json";
}
#endif
ARIDLL_ReadSetParamFile(pTracker, configFilePath.c_str());
stInputPara.unFreq = 50;
stInputPara.stAirCraftInfo.stAtt.fYaw = 0;
stInputPara.stAirCraftInfo.stAtt.fRoll = 0;
stInputPara.stAirCraftInfo.stAtt.fPitch = 0;
stInputPara.stServoInfo.fServoAz = 0;
stInputPara.stServoInfo.fServoPt = 0;
stInputPara.stCameraInfo.fPixelSize = 15;
stInputPara.stCameraInfo.nFocus = 600;
stInputPara.unFrmId = 0;
// 模拟算法执行流程
while(!vot.end())
{
string imagepath = vot.frame();
if (imagepath.empty())
{
break;
}
ifstream inFile(imagepath, ios::in | ios::binary);
inFile.read((char*)srcImageWithPara.data, sizeof(short) * nHeightWithParm * nImageWidth);// 所有的单帧数据(包括图像和参数行)
memcpy(srcImage.data, srcImageWithPara.data, m_lImageDataSize * sizeof(short)); // 图像数据
if (3312 == m_ArithName)
{
// 参数行数据在图像数据的尾部
memcpy(&g_S3312_Para, &srcImageWithPara.data[m_lImageDataSize * sizeof(short)], m_lArithReslutDataSize); // 参数行数据
Commdef::TServoInfoOutput* tServoInfo = &g_S3312_Para.stTrackResultInfo.tServoAndPosInfo.tServoInfo;
stInputPara.unFreq = 50;
stInputPara.stAirCraftInfo.stAtt.fYaw = 0;
stInputPara.stAirCraftInfo.stAtt.fRoll = 0;
stInputPara.stAirCraftInfo.stAtt.fPitch = 0;
stInputPara.stCameraInfo.unVideoType = GLB_VIDEO_TYPE::GLB_VIDEO_IR_MW;
stInputPara.stCameraInfo.fPixelSize = 25;
stInputPara.stCameraInfo.nFocus = 400;//基本都是小视场
stInputPara.stCameraInfo.fAglReso = 0.0001f * g_S3312_Para.stTrackResultInfo.usResol; // 角分辨率
stInputPara.stServoInfo.fServoAz = -tServoInfo->fServoPtAngle;
stInputPara.stServoInfo.fServoPt = tServoInfo->fServoAzAngle;
stInputPara.stServoInfo.fServoAzSpeed = 0;
stInputPara.stServoInfo.fServoPtSpeed = 0;
}
else if (338 == m_ArithName)
{
EORADERINPUT g_S338_Para = { 0 };
// 参数行的第二行
memcpy(&g_S338_Para, &srcImageWithPara.data[m_lImageDataSize * sizeof(short) + nImageWidth * sizeof(short)], sizeof(EORADERINPUT));//读取录像中算法输入
stInputPara.stCameraInfo.fPixelSize = 25;
stInputPara.stCameraInfo.nFocus = 400;//基本都是小视场
stInputPara.stServoInfo.fServoAz = g_S338_Para.ServoAngel.fOriAngle;
stInputPara.stServoInfo.fServoPt = g_S338_Para.ServoAngel.fPitAngle;
stInputPara.stServoInfo.fServoAzSpeed = g_S338_Para.ServoAngel.fOriAngleSpeed;
stInputPara.stServoInfo.fServoPtSpeed = g_S338_Para.ServoAngel.fPitAngleSpeed;
}
if (6 == stInputPara.unFrmId)
{
initialization << vot.region();
// 下发锁定
SelectCX = initialization.x + initialization.width / 2;
SelectCY = initialization.y + initialization.height / 2;
setLockBoxW = initialization.width;
setLockBoxH = initialization.height;
ARIDLL_OBJINFO obj = { 0 };
obj = ARIDLL_LockTarget(pTracker, img, SelectCX, SelectCY, setLockBoxW, setLockBoxH);
}
stInputPara.unFrmId++;
// 构建图像类型
img.enPixelFormat = GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y16;
img.u32Width = nImageWidth;
img.u32Height = nImageHeight;
img.u32Stride[0] = img.u32Width * 2;
img.u64VirAddr[0] = srcImage.data;
RunProcess(pTracker, img);
auto trackerOut = stOutput.stTrackers[0];
cv::Rect outRect;
outRect.width = (int)trackerOut.nObjW;
outRect.height= (int)trackerOut.nObjH;
outRect.x = (int)trackerOut.nX - outRect.width / 2;
outRect.y = (int)trackerOut.nY - outRect.height / 2;
vot.report(outRect, trackerOut.fConf);
}
return 0;
}