// 单目标对地跟踪流程测试:将TLD从算法中剥离到外部,导致API调用形式调整 // 读取avi视频进行测试 #include "NeoArithStandardDll.h" #include #include #include #include #include #include #include #include "opencv2/opencv.hpp" using namespace std; using namespace cv; #define TEST_WITH_AID 0 // 是否使用AI Detect #define TEST_WITH_AIT 0 // 是否使用AI Tracker,如果设置为1,最外部的CMakeLists.txt 需要设置set(BUILD_AI_TRACK TRUE) #if TEST_WITH_AID #include "Arith_YOLO_Detect.h" #endif #if TEST_WITH_AIT #include "Arith_AITracker.h" #endif unsigned short bcmd = 0; short SelectCX = 0; short SelectCY = 0; unsigned short setLockBoxW = 0; unsigned short setLockBoxH = 0; bool draw = false; cv::Point cursor = cv::Point(0, 0); cv::Rect rect; int key; cv::Mat frame, dstImage; //控制视频流 VideoCapture capture; const char* windowName = "test"; //播放窗口名称 const char* trackBarName = "Progress"; //trackbar控制条名称 double totalFrame = 1.0; //视频总帧数 double currentFrame = 1.0; //当前播放帧 int trackbarValue = 1; //trackbar控制量 int trackbarMax = 255; //trackbar控制条最大值 double frameRate = 1.0; //视频帧率 double controlRate = 0.1; int frameValue = 0; // 算法输入部分 ARIDLL_INPUTPARA stInputPara = { 0 }; // 算法输出部分 ARIDLL_OUTPUT stOutput = { 0 }; // AI Detect算法结果 #if TEST_WITH_AID obj_res* g_pGLB_AIDetOutput = NULL; int g_GLB_AIDetNum = 0; #endif // AI Tracker算法结果 #if TEST_WITH_AIT API_AI_Tracker* g_GLB_AITracker = NULL; AIT_OUTPUT g_GLB_AITrackOutput = { 0 }; #endif //控制条回调函数 static void TrackBarFunc(int, void(*)) { controlRate = (double)trackbarValue / trackbarMax * totalFrame; //trackbar控制条对视频播放进度的控制 capture.set(CAP_PROP_POS_FRAMES, controlRate); //设置当前播放帧 frameValue = controlRate; } static void onMouse(int event, int x, int y, int flags, void* param) { //flags定义#define CV_EVENT_FLAG_LBUTTON 1 左鍵拖曳 switch (event) { case cv::EVENT_LBUTTONDOWN: // 鼠标左键按下 SelectCX = x; SelectCY = y; cursor = cv::Point(x, y); rect = cv::Rect(x, y, 0, 0); draw = true; break; case cv::EVENT_LBUTTONUP: //鼠标左键松开 setLockBoxW = rect.width; setLockBoxH = rect.height; SelectCX = rect.x + rect.width / 2; SelectCY = rect.y + rect.height / 2; bcmd = 1; draw = false; break; case cv::EVENT_RBUTTONDOWN: //鼠标右键按下 bcmd = 2; break; case cv::EVENT_RBUTTONUP: //鼠标右键松开 break; case cv::EVENT_MBUTTONUP://左键双击 bcmd = 1; break; case cv::EVENT_MOUSEMOVE: //移动光标 if (draw) { cv::Mat dstImage1 = frame.clone(); rect.x = MIN(x, cursor.x); rect.y = MIN(y, cursor.y); rect.width = abs(cursor.x - x); rect.height = abs(cursor.y - y); rectangle(dstImage1, rect, cv::Scalar(0, 0, 255), 1); cv::imshow("test", dstImage1); } break; } } #if TEST_WITH_AID static void AIDetRun(ArithHandle pTracker, GD_VIDEO_FRAME_S img, int frameID) { // 异步调用,考虑机器上传输延时,注意异步方式结果天然缓1帧。 Async_YOLO_DetectTarget(img.u64VirAddr[0], img.u32Width, img.u32Height, frameID); g_pGLB_AIDetOutput = Async_YOLO_GetTargetArray(g_GLB_AIDetNum); int targetNum = 0; TARGET_OBJECT* pArray = ARIDLL_SearchFrameTargets(pTracker, img, &targetNum); int mergeNum = ARIDLL_MergeAITargets(pTracker, pArray, targetNum, g_pGLB_AIDetOutput, g_GLB_AIDetNum); stInputPara.nInputTargetNum = mergeNum; memcpy(stInputPara.stInputTarget, pArray, sizeof(TARGET_OBJECT) * mergeNum); } #endif #if TEST_WITH_AIT static int AITrackerRun(GD_VIDEO_FRAME_S img, int frameID) { // 从传统算法输出中获取AI跟踪器的控制指令 CENTERRECT32F InitBox = stOutput.stAI_TkCmd.InitBox; CENTERRECT32F TargetBox = stOutput.stAI_TkCmd.TargetBox; if (InitBox.w > 0 && InitBox.h > 0) { g_GLB_AITracker->init(img, InitBox); return 0; } if (!stOutput.stAI_TkCmd.bTrack) { g_GLB_AITracker->stopTrack(); memset(&stInputPara.stAITrackerInfo, 0, sizeof(AIT_OUTPUT)); memset(&g_GLB_AITrackOutput, 0, sizeof(AIT_OUTPUT)); return 0; } g_GLB_AITracker->Track(img, TargetBox); g_GLB_AITracker->getTrackResult_Async(&g_GLB_AITrackOutput); // 向传统算法传参 memcpy(&stInputPara.stAITrackerInfo, &g_GLB_AITrackOutput, sizeof(AIT_OUTPUT)); return 0; } #endif static void RunProcess(ArithHandle pTracker, GD_VIDEO_FRAME_S img) { #if TEST_WITH_AID // 运行AI识别算法 AIDetRun(pTracker, img, stInputPara.unFrmId); #endif #if TEST_WITH_AIT // 运行SiamRPN跟踪算法 AITrackerRun(img, stInputPara.unFrmId); #endif // 调用TLD流程 //ARIDLL_RunTLDTracker(pTracker, img); // 运行算法主控逻辑API ARIDLL_RunController(pTracker, img, stInputPara, &stOutput); } int main() { #ifdef _WIN32 char message[128]; TCHAR szBuffer[MAX_PATH] = { 0 }; OPENFILENAME ofn = { 0 }; ofn.lStructSize = sizeof(ofn); //ofn.hwndOwner = m_hWnd; ofn.lpstrFilter = _T("所有文件(*.*)\0*.*\0Exe文件(*.mp4)\0*.avi\0");//要选择的文件后缀 ofn.lpstrInitialDir = _T("F:\\Datasets\\20240219_AutoTest");//默认的文件路径 ofn.lpstrFile = szBuffer;//存放文件的缓冲区 ofn.nMaxFile = sizeof(szBuffer) / sizeof(*szBuffer); ofn.nFilterIndex = 0; ofn.Flags = OFN_PATHMUSTEXIST | OFN_FILEMUSTEXIST | OFN_EXPLORER;//标志如果是多选要加上OFN_ALLOWMULTISELECT bool bSel = GetOpenFileName(&ofn); string video_fileName = ofn.lpstrFile; capture.open(video_fileName); if (!capture.isOpened()) return 0; #elif __linux__ cv::VideoCapture capture("/home/temp/VL_公司门口车辆3.avi"); #endif int nWidth = int(capture.get(cv::CAP_PROP_FRAME_WIDTH)); int nHeight = int(capture.get(cv::CAP_PROP_FRAME_HEIGHT)); //控制视频流进度 totalFrame = capture.get(CAP_PROP_FRAME_COUNT); //获取总帧数 frameRate = capture.get(CAP_PROP_FPS); //获取帧率 trackbarMax = totalFrame; namedWindow(windowName, 0); //resizeWindow(windowName, 960, 540); //在图像窗口上创建控制条 createTrackbar(trackBarName, windowName, &trackbarValue, trackbarMax, TrackBarFunc); TrackBarFunc(0, 0); // 创建算法句柄 ArithHandle pTracker = STD_CreatEOArithHandle(); #if TEST_WITH_AID // AI检测初始化 //YOLO_Init(); Async_YOLO_Init(); #endif #if TEST_WITH_AIT // AI跟踪器初始化 //CreatSiamRPN(); //Async_CreatSiamRPN(); // AI跟踪器初始化 g_GLB_AITracker = API_AI_Tracker::Create(AITrackerType::DaSaimRPN); g_GLB_AITracker->loadModel(); memset(&g_GLB_AITrackOutput, 0, sizeof(AIT_OUTPUT)); #endif // 初始化为凝视-对地模式 ARIDLL_EOArithInitWithMode(pTracker,nWidth,nHeight,GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_RGB_PACKED, GLB_SYS_MODE::GLB_SYS_STARE,GLB_SCEN_MODE::GLB_SCEN_GROUND); stInputPara.unFreq = frameRate; stInputPara.stAirCraftInfo.stAtt.fYaw = 0; stInputPara.stAirCraftInfo.stAtt.fRoll = 0; stInputPara.stAirCraftInfo.stAtt.fPitch = 0; stInputPara.stServoInfo.fServoAz = 0; stInputPara.stServoInfo.fServoPt = 0; stInputPara.stCameraInfo.fPixelSize = 15; stInputPara.stCameraInfo.nFocus = 600; stInputPara.unFrmId = 0; //stInputPara.nInputTargetNum = 0; while(1) { stInputPara.unFrmId++; //stInputPara.nInputTargetNum = 0; capture.read(frame); if (frame.empty()) { /* capture.set(CAP_PROP_POS_FRAMES, 0); frameValue = 0; capture >> frame;*/ break; } dstImage = frame.clone(); ///控制台交互界面调试用---------------------------------- //cv::namedWindow("SOT_TRACKING", cv::WINDOW_NORMAL); cv::setMouseCallback(windowName, onMouse, NULL); // 首帧算法内部需要初始化,不要进行锁定 if (key == 32 || stInputPara.unFrmId == 2) { imshow("test", frame); waitKey(0); } // 构建图像类型 GD_VIDEO_FRAME_S img = { 0 }; img.enPixelFormat = GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_RGB_PACKED; img.u32Width = nWidth; img.u32Height = nHeight; img.u32Stride[0] = img.u32Width * 3; img.u64VirAddr[0] = frame.data; //点选转跟踪 if (1 == bcmd) { /* setLockBoxW = 65; setLockBoxH = 31; SelectCX = 1525 + setLockBoxW / 2; SelectCY = 354 + setLockBoxH / 2;*/ //ARIDLL_LockCommand(pTracker, SelectCX, SelectCY, setLockBoxW, setLockBoxH); // 调用该接口,可在当前帧完成锁定响应,不必缓一帧得到结果有利于运动目标锁定,同时将锁定决策结果返回,提供外部跟踪器同步锁定 // 主要原因是点选锁定时无宽高,波门设置等来源于传统跟踪器决策结果。 ARIDLL_OBJINFO obj = { 0 }; obj = ARIDLL_LockTarget(pTracker, img, SelectCX, SelectCY, setLockBoxW, setLockBoxH); #if TEST_WITH_AIT if (obj.nObjW > 0) { // 使用EOTracker的锁定决策初始化AI跟踪器 CENTERRECT32F initBox = { obj.nX,obj.nY, obj.nObjW, obj.nObjH }; g_GLB_AITracker->init(img, initBox); // 获取跟踪结果 g_GLB_AITracker->getTrackResult_Async(&g_GLB_AITrackOutput); } #endif } //右键解锁退出跟踪状态 if (2 == bcmd) { // 解锁传统跟踪器 ARIDLL_unLockCommand(pTracker); #if TEST_WITH_AIT // 解锁AI跟踪器 g_GLB_AITracker->stopTrack(); #endif } RunProcess(pTracker, img); auto trackerOut = stOutput.stTrackers[0]; cv::Rect outRect; outRect.width = (int)trackerOut.nObjW; outRect.height= (int)trackerOut.nObjH; outRect.x = (int)trackerOut.nX - outRect.width / 2; outRect.y = (int)trackerOut.nY - outRect.height / 2; cv::rectangle(dstImage, outRect, cv::Scalar(0, 0, 255)); //cv::rectangle(dstImage, cv::Rect(g_GLB_AITrackOutput.fX - g_GLB_AITrackOutput.nObjW / 2, // g_GLB_AITrackOutput.fY - g_GLB_AITrackOutput.nObjH / 2, // g_GLB_AITrackOutput.nObjW, // g_GLB_AITrackOutput.nObjH), cv::Scalar(0, 255, 255)); bcmd = 0; SelectCX = 0; SelectCY = 0; setLockBoxW = 0; setLockBoxH = 0; cv::imshow("test", dstImage); key = cv::waitKey(10); } return 0; }