Compare commits

..

No commits in common. '04dd7d3816b06daf2d071a12d96948068bd53d1b' and 'b57ad52a0d0fd0298414fc27ed6b7284b0f8e1e2' have entirely different histories.

@ -61,7 +61,7 @@ ENABLE_TESTING()
# opencv + TensorRT # opencv + TensorRT
IF(WIN32) IF(WIN32)
set(OpenCV_DIR "C:/opencv/build/x64/vc15/lib") set(OpenCV_DIR "D:/Opencv_v/4.8.1/build/install/x64/vc17/lib")
set(TRT_DIR "D:/TensorRT/TensorRT7") set(TRT_DIR "D:/TensorRT/TensorRT7")
ELSE(WIN32) ELSE(WIN32)
set(OpenCV_DIR "/home/wcw/opencv-3.4.16/install/share/OpenCV") set(OpenCV_DIR "/home/wcw/opencv-3.4.16/install/share/OpenCV")

@ -3,7 +3,7 @@ SET(ArithTrkPubInc ${CMAKE_SOURCE_DIR}/public_include)
include_directories(${ArithTrkPubInc}) # include_directories(${ArithTrkPubInc}) #
set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD 17)
# #
add_subdirectory(profile) add_subdirectory(tests)
IF(WIN32) IF(WIN32)
# vot_test # vot_test

@ -1,36 +0,0 @@
message(STATUS "++++++++add Profile Test DIR++++++++")
# opencv
link_directories(${OpenCV_LIBS_DIR})
include_directories(${OpenCV_INCLUDE_DIRS})
#
SET(ArithTrkPubInc ${CMAKE_SOURCE_DIR}/public_include)
include_directories(${ArithTrkPubInc})
set(LIB_SRC_DIR ${CMAKE_SOURCE_DIR}/NeoTracker/src)
include_directories(${LIB_SRC_DIR})
link_directories(${CMAKE_SOURCE_DIR}/Bin)
set(EXECUTABLE_OUTPUT_PATH ${CMAKE_SOURCE_DIR}/Bin)
# :使仿
add_executable(TestAPI_Profile TestAPI_Profile.cpp
Profile_SOT_Ground_NV12.cpp
Profile_SOT_Ground_Y16.cpp
Profile_SOT_Ground_Y8.cpp
Profile_SOT_Ground_RGB.cpp
Profile_SOT_Sky_Y16.cpp
Profile_SOT_LockUnlockStress.cpp
utils.cpp
)
target_link_libraries(TestAPI_Profile PRIVATE
${LIB_TRACKER}
${LIB_DETECTOR} #
${LIB_GDKCF} #KCF
${LIB_GDTLD} #TLD
${LIB_PIPE}
${OpenCV_LIBS})

@ -1,128 +0,0 @@
// NV12 对地跟踪性能测试
#include "NeoArithStandardDll.h"
#include "utils.h"
#include <iostream>
#include <memory>
#include <string.h>
#include <algorithm>
#include <thread>
#include "opencv2/opencv.hpp"
#include "TestAPI_Profile.h"
using std::cout;
using std::endl;
using std::string;
#define Test_Len 1000
int TestAPI_SOT_Ground_NV12()
{
// 产生一个仿真Y16数据
int nWidth = 1920;
int nHeight = 1080;
SimTargetImage factory(nWidth, nHeight);
factory.setBackGround(128, 10);
// 叠加一个初始目标
Target t;
t.x = 100;
t.y = 100;
t.width = 30;
t.height = 30;
t.vw = 0;
t.vh = 0;
t.vx = 1;
t.vy = 1;
t.addTexture(cow_png,cow_png_len);
factory.addTarget(t);
//
t.x = 300;
t.y = 250;
t.width = 100;
t.height = 100;
t.vw = 0;
t.vh = 0;
t.vx = 0;
t.vy = 0;
t.color = cv::Scalar(20,20,20);
factory.addOcc(t);
// 创建算法句柄
ArithHandle pTracker = STD_CreatEOArithHandle();
// 初始化为凝视-对地模式
ARIDLL_EOArithInitWithMode(pTracker,nWidth,nHeight,GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_NV12,
GLB_SYS_MODE::GLB_SYS_STARE,GLB_SCEN_MODE::GLB_SCEN_GROUND);
// 算法输入部分
ARIDLL_INPUTPARA stInputPara = { 0 };
stInputPara.unFrmId++;
stInputPara.stCameraInfo.fPixelSize = 15;
stInputPara.stCameraInfo.nFocus = 300;
// 算法输出部分
ARIDLL_OUTPUT stOutput = { 0 };
// 模拟算法执行流程
int nTrackSuc = 0;
cv::Mat frame;
for(int i = 0; i < Test_Len; i++)
{
stInputPara.unFrmId++;
factory.update();
cv::Mat src = factory.getImageNV12();
Target* gt = factory.getTarget(0);
// 构建图像类型
GD_VIDEO_FRAME_S img = { 0 };
img.enPixelFormat = GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_NV12;
img.u32Width = nWidth;
img.u32Height = nHeight;
img.u32Stride[0] = img.u32Width * 1;
img.u64VirAddr[0] = (unsigned char*)src.data;
// 下发面锁定指令
if (stInputPara.unFrmId == 3)
{
//ARIDLL_LockCommand(pTracker, gt->x,gt->y,gt->width,gt->height);
ARIDLL_LockCommand(pTracker, gt->x, gt->y, gt->width, gt->height);
}
cv::TickMeter tm;
tm.start();
// 运行算法主控逻辑API
ARIDLL_RunController(pTracker, img, stInputPara, &stOutput);
tm.stop();
printf("time:%.2f\n",tm.getTimeMilli());
#ifdef SHOW
// 绘制跟踪结果
cv::Mat rgb = factory.getImageRGB();
showArithInfo(rgb,&stOutput);
imshow("res",rgb);
cv::waitKey(1);
#endif
if (stOutput.nStatus == GLB_STATUS_TRACK && stOutput.nTrackObjCnts == 1)
{
if (abs(stOutput.stTrackers[0].nX - gt->x) < 5 &&
abs(stOutput.stTrackers[0].nY - gt->y) < 5)
{
nTrackSuc++;
}
}
}
printf("Suc:%d/A:%d\n",nTrackSuc,Test_Len);
return 0;
}

@ -1,135 +0,0 @@
// 单目标对地跟踪流程测试:将TLD从算法中剥离到外部导致API调用形式调整
// 读取avi视频进行测试
#include "NeoArithStandardDll.h"
#include "utils.h"
#include <iostream>
#include <memory>
#include <string.h>
#include <algorithm>
#include <thread>
#include "opencv2/opencv.hpp"
#include "TestAPI_Profile.h"
using std::cout;
using std::endl;
using std::string;
#define Test_Len 1000
int TestAPI_SOT_Ground_RGB()
{
// 产生一个仿真Y16数据
int nWidth = 1280;
int nHeight = 1024;
SimTargetImage factory(nWidth, nHeight);
factory.setBackGround(128, 10);
// 叠加一个初始目标
Target t;
t.x = 100;
t.y = 100;
t.width = 30;
t.height = 30;
t.vw = 0;
t.vh = 0;
t.vx = 1;
t.vy = 1;
t.addTexture(cow_png,cow_png_len);
factory.addTarget(t);
//
t.x = 300;
t.y = 250;
t.width = 100;
t.height = 100;
t.vw = 0;
t.vh = 0;
t.vx = 0;
t.vy = 0;
t.color = cv::Scalar(20,20,20);
factory.addOcc(t);
// 创建算法句柄
ArithHandle pTracker = STD_CreatEOArithHandle();
// 初始化为凝视-对地模式
ARIDLL_EOArithInitWithMode(pTracker,nWidth,nHeight,GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_RGB_PACKED,
GLB_SYS_MODE::GLB_SYS_STARE,GLB_SCEN_MODE::GLB_SCEN_GROUND);
// 算法输入部分
ARIDLL_INPUTPARA stInputPara = { 0 };
stInputPara.unFrmId++;
stInputPara.stCameraInfo.fPixelSize = 15;
stInputPara.stCameraInfo.nFocus = 300;
// 算法输出部分
ARIDLL_OUTPUT stOutput = { 0 };
// 模拟算法执行流程
int nTrackSuc = 0;
cv::Mat frame;
for(int i = 0; i < Test_Len; i++)
{
stInputPara.unFrmId++;
factory.update();
cv::Mat src = factory.getImageRGB();
Target* gt = factory.getTarget(0);
// 构建图像类型
GD_VIDEO_FRAME_S img = { 0 };
img.enPixelFormat = GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_RGB_PACKED;
img.u32Width = nWidth;
img.u32Height = nHeight;
img.u32Stride[0] = img.u32Width * 3;
img.u64VirAddr[0] = (unsigned char*)src.data;
// 下发面锁定指令
if (stInputPara.unFrmId == 3)
{
ARIDLL_LockCommand(pTracker, gt->x,gt->y,gt->width,gt->height);
}
cv::TickMeter tm;
tm.start();
// 运行算法主控逻辑API
ARIDLL_RunController(pTracker, img, stInputPara, &stOutput);
tm.stop();
printf("time:%.2f\n",tm.getTimeMilli());
#ifdef SHOW
// 绘制跟踪结果
cv::Mat rgb = factory.getImageRGB();
showArithInfo(rgb,&stOutput);
imshow("res",rgb);
cv::waitKey(1);
#endif
if (stOutput.nStatus == GLB_STATUS_TRACK && stOutput.nTrackObjCnts == 1)
{
if (abs(stOutput.stTrackers[0].nX - gt->x) < 5 &&
abs(stOutput.stTrackers[0].nY - gt->y) < 5)
{
nTrackSuc++;
}
}
}
printf("Suc:%d/A:%d\n",nTrackSuc,Test_Len);
return 0;
}

@ -1,135 +0,0 @@
// 单目标对地跟踪流程测试:将TLD从算法中剥离到外部导致API调用形式调整
// 读取avi视频进行测试
#include "NeoArithStandardDll.h"
#include "utils.h"
#include <iostream>
#include <memory>
#include <string.h>
#include <algorithm>
#include <thread>
#include "opencv2/opencv.hpp"
#include "TestAPI_Profile.h"
using std::cout;
using std::endl;
using std::string;
#define Test_Len 1000
int TestAPI_SOT_Ground_Y16()
{
// 产生一个仿真Y16数据
int nWidth = 1280;
int nHeight = 1024;
SimTargetImage factory(nWidth, nHeight);
factory.setBackGround(128, 10);
// 叠加一个初始目标
Target t;
t.x = 100;
t.y = 100;
t.width = 30;
t.height = 30;
t.vw = 0;
t.vh = 0;
t.vx = 1;
t.vy = 1;
t.addTexture(cow_png,cow_png_len);
factory.addTarget(t);
//
t.x = 300;
t.y = 250;
t.width = 100;
t.height = 100;
t.vw = 0;
t.vh = 0;
t.vx = 0;
t.vy = 0;
t.color = cv::Scalar(20,20,20);
factory.addOcc(t);
// 创建算法句柄
ArithHandle pTracker = STD_CreatEOArithHandle();
// 初始化为凝视-对地模式
ARIDLL_EOArithInitWithMode(pTracker,nWidth,nHeight,GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y16,
GLB_SYS_MODE::GLB_SYS_STARE,GLB_SCEN_MODE::GLB_SCEN_GROUND);
// 算法输入部分
ARIDLL_INPUTPARA stInputPara = { 0 };
stInputPara.unFrmId++;
stInputPara.stCameraInfo.fPixelSize = 15;
stInputPara.stCameraInfo.nFocus = 300;
// 算法输出部分
ARIDLL_OUTPUT stOutput = { 0 };
// 模拟算法执行流程
int nTrackSuc = 0;
cv::Mat frame;
for(int i = 0; i < Test_Len; i++)
{
stInputPara.unFrmId++;
factory.update();
cv::Mat src = factory.getImageY16();
Target* gt = factory.getTarget(0);
// 构建图像类型
GD_VIDEO_FRAME_S img = { 0 };
img.enPixelFormat = GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y16;
img.u32Width = nWidth;
img.u32Height = nHeight;
img.u32Stride[0] = img.u32Width * 2;
img.u64VirAddr[0] = (unsigned char*)src.data;
// 下发面锁定指令
if (stInputPara.unFrmId == 3)
{
//ARIDLL_LockCommand(pTracker, gt->x,gt->y,gt->width,gt->height);
ARIDLL_LockCommand(pTracker, gt->x, gt->y, gt->width, gt->height);
}
cv::TickMeter tm;
tm.start();
// 运行算法主控逻辑API
ARIDLL_RunController(pTracker, img, stInputPara, &stOutput);
tm.stop();
printf("time:%.2f\n",tm.getTimeMilli());
#ifdef SHOW
// 绘制跟踪结果
cv::Mat rgb = factory.getImageRGB();
showArithInfo(rgb,&stOutput);
imshow("res",rgb);
cv::waitKey(1);
#endif
if (stOutput.nStatus == GLB_STATUS_TRACK && stOutput.nTrackObjCnts == 1)
{
if (abs(stOutput.stTrackers[0].nX - gt->x) < 5 &&
abs(stOutput.stTrackers[0].nY - gt->y) < 5)
{
nTrackSuc++;
}
}
}
printf("Suc:%d/A:%d\n",nTrackSuc,Test_Len);
return 0;
}

@ -1,137 +0,0 @@
// 单目标对地跟踪流程测试:将TLD从算法中剥离到外部导致API调用形式调整
// 读取avi视频进行测试
#include "NeoArithStandardDll.h"
#include "utils.h"
#include <iostream>
#include <memory>
#include <string.h>
#include <algorithm>
#include <thread>
#include "opencv2/opencv.hpp"
#include "TestAPI_Profile.h"
using std::cout;
using std::endl;
using std::string;
#define Test_Len 1000
int TestAPI_SOT_Ground_Y8()
{
// 产生一个仿真Y16数据
int nWidth = 1280;
int nHeight = 1024;
SimTargetImage factory(nWidth, nHeight);
factory.setBackGround(128, 10);
// 叠加一个初始目标
Target t;
t.x = 100;
t.y = 100;
t.width = 50;
t.height = 30;
t.vw = 0;
t.vh = 0;
t.vx = 0.3;
t.vy = 0.2;
t.color = cv::Scalar(25,255,1);
t.addTexture(cow_png,cow_png_len);
factory.addTarget(t);
t.x = 300;
t.y = 250;
t.width = 100;
t.height = 100;
t.vw = 0;
t.vh = 0;
t.vx = 0;
t.vy = 0;
t.color = cv::Scalar(20,20,20);
factory.addOcc(t);
// 创建算法句柄
ArithHandle pTracker = STD_CreatEOArithHandle();
// 初始化为凝视-对地模式
ARIDLL_EOArithInitWithMode(pTracker,nWidth,nHeight,GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y8,
GLB_SYS_MODE::GLB_SYS_STARE,GLB_SCEN_MODE::GLB_SCEN_GROUND);
// 算法输入部分
ARIDLL_INPUTPARA stInputPara = { 0 };
stInputPara.unFrmId++;
stInputPara.stCameraInfo.fPixelSize = 15;
stInputPara.stCameraInfo.nFocus = 300;
// 算法输出部分
ARIDLL_OUTPUT stOutput = { 0 };
// 模拟算法执行流程
int nTrackSuc = 0;
cv::Mat frame;
for(int i = 0; i < Test_Len; i++)
{
stInputPara.unFrmId++;
factory.update();
cv::Mat src = factory.getImageY8();
Target* gt = factory.getTarget(0);
// 构建图像类型
GD_VIDEO_FRAME_S img = { 0 };
img.enPixelFormat = GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y8;
img.u32Width = nWidth;
img.u32Height = nHeight;
img.u32Stride[0] = img.u32Width * 1;
img.u64VirAddr[0] = (unsigned char*)src.data;
// 下发面锁定指令
if (stInputPara.unFrmId == 3)
{
ARIDLL_LockCommand(pTracker, gt->x,gt->y,gt->width,gt->height);
}
cv::TickMeter tm;
tm.start();
// 运行算法主控逻辑API
ARIDLL_RunController(pTracker, img, stInputPara, &stOutput);
tm.stop();
printf("time:%.2f\n",tm.getTimeMilli());
#ifdef SHOW
// 绘制跟踪结果
cv::Mat rgb = factory.getImageRGB();
showArithInfo(rgb,&stOutput);
imshow("res",rgb);
cv::waitKey(1);
#endif
if (stOutput.nStatus == GLB_STATUS_TRACK && stOutput.nTrackObjCnts == 1)
{
if (abs(stOutput.stTrackers[0].nX - gt->x) < 5 &&
abs(stOutput.stTrackers[0].nY - gt->y) < 5)
{
nTrackSuc++;
}
}
}
printf("Suc:%d/A:%d\n",nTrackSuc,Test_Len);
return 0;
}

@ -1,118 +0,0 @@
// 暴力锁定解锁鲁棒性测试
#include "NeoArithStandardDll.h"
#include "utils.h"
#include <iostream>
#include <memory>
#include <string.h>
#include <algorithm>
#include <thread>
#include "opencv2/opencv.hpp"
#include "TestAPI_Profile.h"
#include <random>
using std::cout;
using std::endl;
int TestAPI_SOT_LockUnlock_Stress()
{
// 产生一个仿真数据
int nWidth = 1920;
int nHeight = 1080;
const int Test_Len = 100000;
// 初始化随机数生成器
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> offsetDist(-2000, 2000); // 在目标周围±5像素范围内随机
SimTargetImage factory(nWidth, nHeight);
factory.setCheckerboardBackGround(120, 0, 80);
// 创建算法句柄
ArithHandle pTracker = STD_CreatEOArithHandle();
// 初始化为凝视-对空模式
ARIDLL_EOArithInitWithMode(pTracker, nWidth, nHeight, GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_NV12,
GLB_SYS_MODE::GLB_SYS_STARE, GLB_SCEN_MODE::GLB_SCEN_GROUND);
ARIDLL_INPUTPARA stInputPara = { 0 };
stInputPara.unFrmId++;
stInputPara.unFreq = 50;
stInputPara.stCameraInfo.fPixelSize = 15;
stInputPara.stCameraInfo.nFocus = 300;
stInputPara.stCameraInfo.unVideoType = GLB_VIDEO_IR_MW;
ARIDLL_OUTPUT stOutput = { 0 };
// 生成随机锁定解锁序列
std::vector<int> lockUnlockSequence = generateRandomLockUnlockSequence(Test_Len);
int nTrackSuc = 0;
int nLock = 0, nUnlock = 0, nLost = 0;
bool locked = false;
for (int i = 0; i < Test_Len; i++)
{
stInputPara.unFrmId++;
factory.update();
cv::Mat src = factory.getImageNV12();
GD_VIDEO_FRAME_S img = { 0 };
img.enPixelFormat = GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_NV12;
img.u32Width = nWidth;
img.u32Height = nHeight;
img.u32Stride[0] = img.u32Width;
img.u64VirAddr[0] = (UBYTE8*)src.data;
int targetNum = 0;
cv::TickMeter tm1;
tm1.start();
{
//targetNum = ARIDLL_SearchFrameTargets(pTracker, img);
}
tm1.stop();
printf("det time:%.2f", tm1.getTimeMilli());
// 根据序列执行锁定/解锁操作
switch (lockUnlockSequence[i])
{
case 1: // 锁定
{
// 在目标周围随机选择锁定位置
int offsetX = offsetDist(gen);
int offsetY = offsetDist(gen);
//ARIDLL_LockCommand_DefaultSize(pTracker, (int)nWidth / 2 + offsetX, (int)nHeight / 2 + offsetY, 30, 30);
//ARIDLL_LockTarget_DefaultSize(pTracker, img,(int)nWidth / 2 + offsetX, (int)nHeight / 2 + offsetY, 30, 30);
locked = true;
nLock++;
}
break;
case 2: // 解锁
ARIDLL_unLockCommand(pTracker);
locked = false;
nUnlock++;
break;
default: // 普通帧
break;
}
cv::TickMeter tm2;
tm2.start();
ARIDLL_RunController(pTracker, img, stInputPara, &stOutput);
tm2.stop();
printf(" trk time:%.2f\n", tm2.getTimeMilli());
#ifdef SHOW
cv::Mat rgb = factory.getImageRGB();
showArithInfo(rgb, &stOutput);
imshow("res", rgb);
cv::waitKey(1);
#endif
printf("nStatus:%d,Proc:%d", stOutput.nStatus,stInputPara.unFrmId);
}
// 输出测试统计结果
return 0;
}

@ -1,148 +0,0 @@
// 单目标对空跟踪流程测试关注跟踪器FPS
#include "NeoArithStandardDll.h"
#include "utils.h"
#include <iostream>
#include <memory>
#include <string.h>
#include <algorithm>
#include <thread>
#include "opencv2/opencv.hpp"
#include "TestAPI_Profile.h"
using std::cout;
using std::endl;
#define Test_Len 1000
int TestAPI_SOT_Sky_Y16()
{
// 产生一个仿真Y16数据
int nWidth = 640;
int nHeight = 512;
SimTargetImage factory(nWidth, nHeight);
factory.setBackGround(120, 0);
// 叠加一个初始目标
Target t;
t.x = 100;
t.y = 100;
t.width = 4;
t.height = 4;
t.vw = 0;
t.vh = 0;
t.vx = 1;
t.vy = 1;
t.color = cv::Scalar(255,255,255);
t.vc = 10;
//t.addTexture(cow_png, cow_png_len);
//t.bGrayComplex = true;
//t.color2 = cv::Scalar(10, 10, 10);
factory.addTarget(t);
// 创建算法句柄
ArithHandle pTracker = STD_CreatEOArithHandle();
// 初始化为凝视-对空模式
ARIDLL_EOArithInitWithMode(pTracker,nWidth,nHeight,GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y16,
GLB_SYS_MODE::GLB_SYS_STARE,GLB_SCEN_MODE::GLB_SCEN_SKY);
// 算法输入部分
ARIDLL_INPUTPARA stInputPara = { 0 };
stInputPara.unFrmId++;
stInputPara.unFreq = 50;
stInputPara.stCameraInfo.fPixelSize = 15;
stInputPara.stCameraInfo.nFocus = 300;
stInputPara.stCameraInfo.unVideoType = GLB_VIDEO_IR_MW;
// 算法输出部分
ARIDLL_OUTPUT stOutput = { 0 };
// 模拟算法执行流程
int nTrackSuc = 0;
for (int i = 0; i < Test_Len; i++)
{
stInputPara.unFrmId++;
factory.update();
cv::Mat src = factory.getImageY16();
Target* gt = factory.getTarget(0);
// 下发点锁定指令
if (i == 50)
{
ARIDLL_LockCommand(pTracker, (int)gt->x, (int)gt->y,0,0);
}
// 构建图像类型
GD_VIDEO_FRAME_S img = { 0 };
img.enPixelFormat = GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y16;
img.u32Width = nWidth;
img.u32Height = nHeight;
img.u32Stride[0] = img.u32Width * 2;
img.u64VirAddr[0] = (UBYTE8*)src.data;
// 红外目标检测API调用
int targetNum = 0;
// 目标搜索仅在搜索状态执行,保持与经典对空算法一致,用于与上一版本耗时对比
// 新跟踪器中搜索在单独线程中不间断执行本demo中仅展示用法不做并行示范。
//if (stOutput.nStatus == GLB_STATUS_SEARCH)
cv::TickMeter tm1;
tm1.start();
{
targetNum = ARIDLL_SearchFrameTargets(pTracker, img);
}
tm1.stop();
printf("det time:%.2f", tm1.getTimeMilli());
cv::TickMeter tm2;
tm2.start();
// 运行算法主控逻辑API
ARIDLL_RunController(pTracker, img, stInputPara, &stOutput);
tm2.stop();
printf(" trk time:%.2f\n", tm2.getTimeMilli());
//std::cout << gt->x << " " << gt->y << std::endl;
#ifdef SHOW
// 绘制跟踪结果
cv::Mat rgb = factory.getImageRGB();
showArithInfo(rgb,&stOutput);
imshow("res",rgb);
cv::waitKey(1);
#endif
if (stOutput.nStatus == GLB_STATUS_TRACK && stOutput.nTrackObjCnts == 1)
{
if (abs(stOutput.stTrackers[0].nX - gt->x) < 5 &&
abs(stOutput.stTrackers[0].nY - gt->y) < 5)
{
nTrackSuc++;
}
}
}
if (nTrackSuc > Test_Len * 0.9)
{
cout << "pass" << endl;
}
printf("Suc:%d/A:%d\n",nTrackSuc,Test_Len);
return 0;
}

@ -1,17 +0,0 @@
# 单元测试为基础的性能和功能测试
test 中包含的内容为基准性能测试和最简单的功能测试。主要用于不同平台下耗时统计。
使用方式:
./TestAPI_Profile -s ground -d NV12
- 使用了cmdline进行参数解析
- 使用lambda进行参数与测试接口绑定减少可执行程序个数。
- 内置一个功能较为完整的目标图像仿真模块,可以制作出贴图目标的遮挡、放缩、运动等效果而不需要依赖外部视频或者贴图。
- 根据项目和硬件需要自由增加测试项。
- test项不再执行其他性能测试。

@ -1,55 +0,0 @@
#include "cmdline.h"//命令行解析
#include <functional>
#include <unordered_map>
#include <iostream>
#include "image_data.h"
#include "TestAPI_Profile.h"
using namespace std;
int main(int argc, char *argv[])
{
TestAPI_SOT_Sky_Y16();
return 0;
// 映射表
std::unordered_map<std::string, std::function<void()>> handlerMap = {
{"sky:Y16", TestAPI_SOT_Sky_Y16},
{"ground:Y8", TestAPI_SOT_Ground_Y8},
{"ground:RGB", TestAPI_SOT_Ground_RGB},
{"ground:Y16", TestAPI_SOT_Ground_Y16},
{"ground:NV12", TestAPI_SOT_Ground_NV12},
{"ground:NV12:LOCK_STRESS", TestAPI_SOT_LockUnlock_Stress},
};
cmdline::parser a;
a.add<string>("scen",'s',"sky or ground",true,"");
a.add<string>("dataType",'d',"Y8/Y16/RGB/NV12",true,"");
a.add<string>("testItem",'t',"other item",false,"");
a.parse_check(argc, argv);
cout << "scen:" << a.get<string>("scen")
<< "datatype:" << a.get<string>("dataType") << endl;
cout << "------------------ " << endl;
std::string scen = a.get<std::string>("scen");
std::string dataType = a.get<std::string>("dataType");
std::string testItem = a.get<std::string>("testItem");
std::string key;
if (!testItem.empty()) {
key = scen + ":" + dataType + ":" + testItem;
} else {
key = scen + ":" + dataType;
}
// 查找并执行逻辑
if (handlerMap.find(key) != handlerMap.end()) {
handlerMap[key]();
} else {
std::cout << "Invalid combination of scen and dataType: " << key << std::endl;
}
return 0;
}

@ -1,13 +0,0 @@
#include "utils.h"
#define SHOW
extern unsigned char cow_png[];
extern unsigned int cow_png_len;
int TestAPI_SOT_Ground_NV12();
int TestAPI_SOT_Ground_RGB();
int TestAPI_SOT_Ground_Y8();
int TestAPI_SOT_Ground_Y16();
int TestAPI_SOT_Sky_Y16();
int TestAPI_SOT_LockUnlock_Stress();

@ -1,821 +0,0 @@
/*
Copyright (c) 2009, Hideyuki Tanaka
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY <copyright holder> ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <copyright holder> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <iostream>
#include <sstream>
#include <vector>
#include <map>
#include <string>
#include <stdexcept>
#include <typeinfo>
#include <cstring>
#include <algorithm>
#ifdef __GNUC__
#include <cxxabi.h>
#endif
#include <cstdlib>
namespace cmdline{
namespace detail{
template <typename Target, typename Source, bool Same>
class lexical_cast_t{
public:
static Target cast(const Source &arg){
Target ret;
std::stringstream ss;
if (!(ss<<arg && ss>>ret && ss.eof()))
throw std::bad_cast();
return ret;
}
};
template <typename Target, typename Source>
class lexical_cast_t<Target, Source, true>{
public:
static Target cast(const Source &arg){
return arg;
}
};
template <typename Source>
class lexical_cast_t<std::string, Source, false>{
public:
static std::string cast(const Source &arg){
std::ostringstream ss;
ss<<arg;
return ss.str();
}
};
template <typename Target>
class lexical_cast_t<Target, std::string, false>{
public:
static Target cast(const std::string &arg){
Target ret;
std::istringstream ss(arg);
if (!(ss>>ret && ss.eof()))
throw std::bad_cast();
return ret;
}
};
template <typename T1, typename T2>
struct is_same {
static const bool value = false;
};
template <typename T>
struct is_same<T, T>{
static const bool value = true;
};
template<typename Target, typename Source>
Target lexical_cast(const Source &arg)
{
return lexical_cast_t<Target, Source, detail::is_same<Target, Source>::value>::cast(arg);
}
static inline std::string demangle(const std::string &name)
{
#ifdef _MSC_VER
return name; // 为MSVC编译器时直接返回name
#elif defined(__GNUC__)
// 为gcc编译器时还调用原来的代码
int status = 0;
char* p = abi::__cxa_demangle(name.c_str(), 0, 0, &status);
std::string ret(p);
free(p);
return ret;
#else
// 其他不支持的编译器需要自己实现这个方法
#error unexpected c complier (msc/gcc), Need to implement this method for demangle
#endif
}
template <class T>
std::string readable_typename()
{
return demangle(typeid(T).name());
}
template <class T>
std::string default_value(T def)
{
return detail::lexical_cast<std::string>(def);
}
template <>
inline std::string readable_typename<std::string>()
{
return "string";
}
} // detail
//-----
class cmdline_error : public std::exception {
public:
cmdline_error(const std::string &msg): msg(msg){}
~cmdline_error() throw() {}
const char *what() const throw() { return msg.c_str(); }
private:
std::string msg;
};
template <class T>
struct default_reader{
T operator()(const std::string &str){
return detail::lexical_cast<T>(str);
}
};
template <class T>
struct range_reader{
range_reader(const T &low, const T &high): low(low), high(high) {}
T operator()(const std::string &s) const {
T ret=default_reader<T>()(s);
if (!(ret>=low && ret<=high)) throw cmdline::cmdline_error("range_error");
return ret;
}
private:
T low, high;
};
template <class T>
range_reader<T> range(const T &low, const T &high)
{
return range_reader<T>(low, high);
}
template <class T>
struct oneof_reader{
T operator()(const std::string &s){
T ret=default_reader<T>()(s);
if (std::find(alt.begin(), alt.end(), ret)==alt.end())
throw cmdline_error("");
return ret;
}
void add(const T &v){ alt.push_back(v); }
private:
std::vector<T> alt;
};
template <class T>
oneof_reader<T> oneof(T a1)
{
oneof_reader<T> ret;
ret.add(a1);
return ret;
}
template <class T>
oneof_reader<T> oneof(T a1, T a2)
{
oneof_reader<T> ret;
ret.add(a1);
ret.add(a2);
return ret;
}
template <class T>
oneof_reader<T> oneof(T a1, T a2, T a3)
{
oneof_reader<T> ret;
ret.add(a1);
ret.add(a2);
ret.add(a3);
return ret;
}
template <class T>
oneof_reader<T> oneof(T a1, T a2, T a3, T a4)
{
oneof_reader<T> ret;
ret.add(a1);
ret.add(a2);
ret.add(a3);
ret.add(a4);
return ret;
}
template <class T>
oneof_reader<T> oneof(T a1, T a2, T a3, T a4, T a5)
{
oneof_reader<T> ret;
ret.add(a1);
ret.add(a2);
ret.add(a3);
ret.add(a4);
ret.add(a5);
return ret;
}
template <class T>
oneof_reader<T> oneof(T a1, T a2, T a3, T a4, T a5, T a6)
{
oneof_reader<T> ret;
ret.add(a1);
ret.add(a2);
ret.add(a3);
ret.add(a4);
ret.add(a5);
ret.add(a6);
return ret;
}
template <class T>
oneof_reader<T> oneof(T a1, T a2, T a3, T a4, T a5, T a6, T a7)
{
oneof_reader<T> ret;
ret.add(a1);
ret.add(a2);
ret.add(a3);
ret.add(a4);
ret.add(a5);
ret.add(a6);
ret.add(a7);
return ret;
}
template <class T>
oneof_reader<T> oneof(T a1, T a2, T a3, T a4, T a5, T a6, T a7, T a8)
{
oneof_reader<T> ret;
ret.add(a1);
ret.add(a2);
ret.add(a3);
ret.add(a4);
ret.add(a5);
ret.add(a6);
ret.add(a7);
ret.add(a8);
return ret;
}
template <class T>
oneof_reader<T> oneof(T a1, T a2, T a3, T a4, T a5, T a6, T a7, T a8, T a9)
{
oneof_reader<T> ret;
ret.add(a1);
ret.add(a2);
ret.add(a3);
ret.add(a4);
ret.add(a5);
ret.add(a6);
ret.add(a7);
ret.add(a8);
ret.add(a9);
return ret;
}
template <class T>
oneof_reader<T> oneof(T a1, T a2, T a3, T a4, T a5, T a6, T a7, T a8, T a9, T a10)
{
oneof_reader<T> ret;
ret.add(a1);
ret.add(a2);
ret.add(a3);
ret.add(a4);
ret.add(a5);
ret.add(a6);
ret.add(a7);
ret.add(a8);
ret.add(a9);
ret.add(a10);
return ret;
}
//-----
class parser{
public:
parser(){
}
~parser(){
for (std::map<std::string, option_base*>::iterator p=options.begin();
p!=options.end(); p++)
delete p->second;
}
void add(const std::string &name,
char short_name=0,
const std::string &desc=""){
if (options.count(name)) throw cmdline_error("multiple definition: "+name);
options[name]=new option_without_value(name, short_name, desc);
ordered.push_back(options[name]);
}
template <class T>
void add(const std::string &name,
char short_name=0,
const std::string &desc="",
bool need=true,
const T def=T()){
add(name, short_name, desc, need, def, default_reader<T>());
}
template <class T, class F>
void add(const std::string &name,
char short_name=0,
const std::string &desc="",
bool need=true,
const T def=T(),
F reader=F()){
if (options.count(name)) throw cmdline_error("multiple definition: "+name);
options[name]=new option_with_value_with_reader<T, F>(name, short_name, need, def, desc, reader);
ordered.push_back(options[name]);
}
void footer(const std::string &f){
ftr=f;
}
void set_program_name(const std::string &name){
prog_name=name;
}
bool exist(const std::string &name) const {
if (options.count(name)==0) throw cmdline_error("there is no flag: --"+name);
return options.find(name)->second->has_set();
}
template <class T>
const T &get(const std::string &name) const {
if (options.count(name)==0) throw cmdline_error("there is no flag: --"+name);
const option_with_value<T> *p=dynamic_cast<const option_with_value<T>*>(options.find(name)->second);
if (p==NULL) throw cmdline_error("type mismatch flag '"+name+"'");
return p->get();
}
const std::vector<std::string> &rest() const {
return others;
}
bool parse(const std::string &arg){
std::vector<std::string> args;
std::string buf;
bool in_quote=false;
for (std::string::size_type i=0; i<arg.length(); i++){
if (arg[i]=='\"'){
in_quote=!in_quote;
continue;
}
if (arg[i]==' ' && !in_quote){
args.push_back(buf);
buf="";
continue;
}
if (arg[i]=='\\'){
i++;
if (i>=arg.length()){
errors.push_back("unexpected occurrence of '\\' at end of string");
return false;
}
}
buf+=arg[i];
}
if (in_quote){
errors.push_back("quote is not closed");
return false;
}
if (buf.length()>0)
args.push_back(buf);
for (size_t i=0; i<args.size(); i++)
std::cout<<"\""<<args[i]<<"\""<<std::endl;
return parse(args);
}
bool parse(const std::vector<std::string> &args){
int argc=static_cast<int>(args.size());
std::vector<const char*> argv(argc);
for (int i=0; i<argc; i++)
argv[i]=args[i].c_str();
return parse(argc, &argv[0]);
}
bool parse(int argc, const char * const argv[]){
errors.clear();
others.clear();
if (argc<1){
errors.push_back("argument number must be longer than 0");
return false;
}
if (prog_name=="")
prog_name=argv[0];
std::map<char, std::string> lookup;
for (std::map<std::string, option_base*>::iterator p=options.begin();
p!=options.end(); p++){
if (p->first.length()==0) continue;
char initial=p->second->short_name();
if (initial){
if (lookup.count(initial)>0){
lookup[initial]="";
errors.push_back(std::string("short option '")+initial+"' is ambiguous");
return false;
}
else lookup[initial]=p->first;
}
}
for (int i=1; i<argc; i++){
if (strncmp(argv[i], "--", 2)==0){
const char *p=strchr(argv[i]+2, '=');
if (p){
std::string name(argv[i]+2, p);
std::string val(p+1);
set_option(name, val);
}
else{
std::string name(argv[i]+2);
if (options.count(name)==0){
errors.push_back("undefined option: --"+name);
continue;
}
if (options[name]->has_value()){
if (i+1>=argc){
errors.push_back("option needs value: --"+name);
continue;
}
else{
i++;
set_option(name, argv[i]);
}
}
else{
set_option(name);
}
}
}
else if (strncmp(argv[i], "-", 1)==0){
if (!argv[i][1]) continue;
char last=argv[i][1];
for (int j=2; argv[i][j]; j++){
last=argv[i][j];
if (lookup.count(argv[i][j-1])==0){
errors.push_back(std::string("undefined short option: -")+argv[i][j-1]);
continue;
}
if (lookup[argv[i][j-1]]==""){
errors.push_back(std::string("ambiguous short option: -")+argv[i][j-1]);
continue;
}
set_option(lookup[argv[i][j-1]]);
}
if (lookup.count(last)==0){
errors.push_back(std::string("undefined short option: -")+last);
continue;
}
if (lookup[last]==""){
errors.push_back(std::string("ambiguous short option: -")+last);
continue;
}
if (i+1<argc && options[lookup[last]]->has_value()){
set_option(lookup[last], argv[i+1]);
i++;
}
else{
set_option(lookup[last]);
}
}
else{
others.push_back(argv[i]);
}
}
for (std::map<std::string, option_base*>::iterator p=options.begin();
p!=options.end(); p++)
if (!p->second->valid())
errors.push_back("need option: --"+std::string(p->first));
return errors.size()==0;
}
void parse_check(const std::string &arg){
if (!options.count("help"))
add("help", '?', "print this message");
check(0, parse(arg));
}
void parse_check(const std::vector<std::string> &args){
if (!options.count("help"))
add("help", '?', "print this message");
check(args.size(), parse(args));
}
void parse_check(int argc, char *argv[]){
if (!options.count("help"))
add("help", '?', "print this message");
check(argc, parse(argc, argv));
}
std::string error() const{
return errors.size()>0?errors[0]:"";
}
std::string error_full() const{
std::ostringstream oss;
for (size_t i=0; i<errors.size(); i++)
oss<<errors[i]<<std::endl;
return oss.str();
}
std::string usage() const {
std::ostringstream oss;
oss<<"usage: "<<prog_name<<" ";
for (size_t i=0; i<ordered.size(); i++){
if (ordered[i]->must())
oss<<ordered[i]->short_description()<<" ";
}
oss<<"[options] ... "<<ftr<<std::endl;
oss<<"options:"<<std::endl;
size_t max_width=0;
for (size_t i=0; i<ordered.size(); i++){
max_width=std::max(max_width, ordered[i]->name().length());
}
for (size_t i=0; i<ordered.size(); i++){
if (ordered[i]->short_name()){
oss<<" -"<<ordered[i]->short_name()<<", ";
}
else{
oss<<" ";
}
oss<<"--"<<ordered[i]->name();
for (size_t j=ordered[i]->name().length(); j<max_width+4; j++)
oss<<' ';
oss<<ordered[i]->description()<<std::endl;
}
return oss.str();
}
private:
void check(int argc, bool ok){
if ((argc==1 && !ok) || exist("help")){
std::cerr<<usage();
exit(0);
}
if (!ok){
std::cerr<<error()<<std::endl<<usage();
exit(1);
}
}
void set_option(const std::string &name){
if (options.count(name)==0){
errors.push_back("undefined option: --"+name);
return;
}
if (!options[name]->set()){
errors.push_back("option needs value: --"+name);
return;
}
}
void set_option(const std::string &name, const std::string &value){
if (options.count(name)==0){
errors.push_back("undefined option: --"+name);
return;
}
if (!options[name]->set(value)){
errors.push_back("option value is invalid: --"+name+"="+value);
return;
}
}
class option_base{
public:
virtual ~option_base(){}
virtual bool has_value() const=0;
virtual bool set()=0;
virtual bool set(const std::string &value)=0;
virtual bool has_set() const=0;
virtual bool valid() const=0;
virtual bool must() const=0;
virtual const std::string &name() const=0;
virtual char short_name() const=0;
virtual const std::string &description() const=0;
virtual std::string short_description() const=0;
};
class option_without_value : public option_base {
public:
option_without_value(const std::string &name,
char short_name,
const std::string &desc)
:nam(name), snam(short_name), desc(desc), has(false){
}
~option_without_value(){}
bool has_value() const { return false; }
bool set(){
has=true;
return true;
}
bool set(const std::string &){
return false;
}
bool has_set() const {
return has;
}
bool valid() const{
return true;
}
bool must() const{
return false;
}
const std::string &name() const{
return nam;
}
char short_name() const{
return snam;
}
const std::string &description() const {
return desc;
}
std::string short_description() const{
return "--"+nam;
}
private:
std::string nam;
char snam;
std::string desc;
bool has;
};
template <class T>
class option_with_value : public option_base {
public:
option_with_value(const std::string &name,
char short_name,
bool need,
const T &def,
const std::string &desc)
: nam(name), snam(short_name), need(need), has(false)
, def(def), actual(def) {
this->desc=full_description(desc);
}
~option_with_value(){}
const T &get() const {
return actual;
}
bool has_value() const { return true; }
bool set(){
return false;
}
bool set(const std::string &value){
try{
actual=read(value);
has=true;
}
catch(const std::exception &e){
return false;
}
return true;
}
bool has_set() const{
return has;
}
bool valid() const{
if (need && !has) return false;
return true;
}
bool must() const{
return need;
}
const std::string &name() const{
return nam;
}
char short_name() const{
return snam;
}
const std::string &description() const {
return desc;
}
std::string short_description() const{
return "--"+nam+"="+detail::readable_typename<T>();
}
protected:
std::string full_description(const std::string &desc){
return
desc+" ("+detail::readable_typename<T>()+
(need?"":" [="+detail::default_value<T>(def)+"]")
+")";
}
virtual T read(const std::string &s)=0;
std::string nam;
char snam;
bool need;
std::string desc;
bool has;
T def;
T actual;
};
template <class T, class F>
class option_with_value_with_reader : public option_with_value<T> {
public:
option_with_value_with_reader(const std::string &name,
char short_name,
bool need,
const T def,
const std::string &desc,
F reader)
: option_with_value<T>(name, short_name, need, def, desc), reader(reader){
}
private:
T read(const std::string &s){
return reader(s);
}
F reader;
};
std::map<std::string, option_base*> options;
std::vector<option_base*> ordered;
std::string ftr;
std::string prog_name;
std::vector<std::string> others;
std::vector<std::string> errors;
};
} // cmdline

File diff suppressed because it is too large Load Diff

@ -1 +0,0 @@
./TestAPI_Profile "-s" "sky" "-d" "Y16"

@ -1,398 +0,0 @@
#include <memory>
#include "utils.h"
#include "opencv2/opencv.hpp"
#include <random> // 添加随机数生成相关头文件
using namespace std;
using namespace cv;
SimTargetImage_Y16::SimTargetImage_Y16(int w, int h)
{
pSrc = new unsigned short[w * h];
Y8Mat = cv::Mat(h,w,CV_8UC1);
nImageWidth = w;
nImageHeight = h;
}
SimTargetImage_Y16::~SimTargetImage_Y16()
{
if (pSrc)
{
delete[]pSrc;
pSrc = nullptr;
}
}
void SimTargetImage_Y16::setBackGround(int gray, int std)
{
for (size_t i = 0; i < nImageHeight; i++)
{
for (size_t j = 0; j < nImageWidth; j++)
{
pSrc[i * nImageWidth + j] = rand() % std + gray;
}
}
}
void SimTargetImage_Y16::addTarget(int x, int y, int w, int h, int gray)
{
for (size_t i = y - h/2; i < y + h/2; i++)
{
for (size_t j = x - w/2; j < x+w/2; j++)
{
pSrc[i * nImageWidth + j] = gray;
}
}
}
unsigned short* SimTargetImage_Y16::getImageData()
{
return pSrc;
}
cv::Mat SimTargetImage_Y16::getMatRGB()
{
Map16BitTo8Bit(pSrc, nImageHeight * nImageWidth, Y8Mat.data);
cv::Mat rgb;
cv::cvtColor(Y8Mat,rgb,COLOR_GRAY2RGB);
return rgb.clone();
}
SimTargetImage::SimTargetImage(int w,int h)
{
m_BaseMat = cv::Mat(h,w,CV_8UC3);
m_CurMat = m_BaseMat.clone();
m_imgW = w;
m_imgH = h;
}
void SimTargetImage::update()
{
m_CurMat = m_BaseMat.clone();
// 绘制目标
for (int i = 0; i < m_targetList.size(); ++i)
{
Target* target = &m_targetList[i];
// 更新目标位置
target->updatePosition(m_imgW, m_imgH);
target->updateTexture();
//target->color = cv::Scalar(target->color[0] + target->vc, target->color[0] + target->vc, target->color[0] + target->vc);
drawObj(*target);
}
// 绘制遮挡
for (int i = 0; i < m_OccList.size(); ++i)
{
Target* target = &m_OccList[i];
// 绘制目标
drawOcc(*target);
}
}
void SimTargetImage::setBackGround(int gray, int std)
{
cv::Mat noise(m_BaseMat.size(), CV_8UC3);
cv::randn(noise, cv::Scalar::all(gray), cv::Scalar::all(std)); // 高斯噪声
m_BaseMat = noise;
m_CurMat = noise.clone();
}
void SimTargetImage::addTarget(Target t)
{
m_targetList.push_back(t);
}
void SimTargetImage::addOcc(Target t)
{
m_OccList.push_back(t);
}
cv::Mat SimTargetImage::getImageRGB()
{
return m_CurMat;
}
cv::Mat SimTargetImage::getImageY16()
{
cvtColor(m_CurMat,matY8,COLOR_BGR2GRAY);
matY8.convertTo(matY16,CV_16UC1);
return matY16*3+4000;
}
cv::Mat SimTargetImage::getImageY8()
{
cvtColor(m_CurMat,matY8,COLOR_BGR2GRAY);
return matY8;
}
cv::Mat SimTargetImage::getImageNV12()
{
matNV12 = cv::Mat(m_imgH*1.5, m_imgW, CV_8UC1);
cv::Mat matYUV_I420(m_imgH*1.5, m_imgW, CV_8UC1);
cvtColor(m_CurMat, matYUV_I420, COLOR_BGR2YUV_I420);
int nLenY = m_imgW * m_imgH;
int nLenU = nLenY / 4;
uchar* pNV12 = matNV12.data;
uchar* pUV = matNV12.data + nLenY;
uchar* pI420 = matYUV_I420.data;
memcpy(pNV12, pI420, nLenY);
for (int i = 0; i < nLenU; i++)
{
pUV[2 * i] = pI420[nLenY + i]; // U
pUV[2 * i + 1] = pI420[nLenY + nLenU + i]; // V
}
return matNV12;
}
Target* SimTargetImage::getTarget(int id)
{
if (m_targetList.size() > 0)
{
return &m_targetList[id];
}
else
{
return nullptr;
}
}
void SimTargetImage::drawObj(Target t)
{
if (t.useTexture)
{
// 设置叠加位置
int offsetX = t.x - t.width/2, offsetY = t.y - t.height/2;
cv::Rect roi(offsetX, offsetY, t.width, t.height);
// // 将前景叠加到背景,使用 Alpha 通道作为掩码
t.texture_cur.copyTo(m_CurMat(roi),t.alaph_cur);
return;
}
if (t.bGrayComplex)
{
cv::Rect rect(t.x - t.width / 2, t.y - t.height / 2, t.width, t.height/2);
cv::rectangle(m_CurMat, rect, cv::Scalar(t.color), cv::FILLED);
cv::Rect rect2(t.x - t.width / 2, t.y - t.height / 4, t.width, t.height/2);
cv::rectangle(m_CurMat, rect2, cv::Scalar(t.color2), cv::FILLED);
}
else
{
cv::Rect rect(t.x - t.width / 2, t.y - t.height / 2, t.width, t.height);
cv::rectangle(m_CurMat, rect, cv::Scalar(t.color), cv::FILLED);
}
}
void SimTargetImage::drawOcc(Target t)
{
cv::Rect rect(t.x - t.width/2, t.y - t.height/2, t.width, t.height);
cv::rectangle(m_CurMat, rect, cv::Scalar(t.color), cv::FILLED);
}
void SimTargetImage::setCheckerboardBackGround(int gray1, int gray2, int blockSize)
{
m_BaseMat = cv::Mat(m_imgH, m_imgW, CV_8UC3);
for (int y = 0; y < m_imgH; ++y)
{
for (int x = 0; x < m_imgW; ++x)
{
int bx = x / blockSize;
int by = y / blockSize;
bool isWhite = (bx + by) % 2 == 0;
int gray = isWhite ? gray1 : gray2;
m_BaseMat.at<cv::Vec3b>(y, x) = cv::Vec3b(gray, gray, gray);
}
}
m_CurMat = m_BaseMat.clone();
}
void Map16BitTo8Bit(unsigned short* psh16BitData, long lDataLen, unsigned char* pby8BitData)
{
if (psh16BitData == NULL || pby8BitData == NULL || lDataLen <= 0)
{
return;
}
//指向直方图的数据指针
int* pHist = new int[65536];
memset(pHist, 0, 65536 * sizeof(int));
int i = 0;
for (i = 0; i < lDataLen; i += 10)
{
pHist[psh16BitData[i]]++;
}
//设置阈值大小为: AreaSigma*图象大小/100
int nSigma = (int)(0.03 * lDataLen);
int nSum = 0;
int nMin = 0;
int nMax = 0;
//求映射的最大最小值
for (i = 0; i < 65536; i++)
{
nSum += pHist[i];
if (nSum >= nSigma)
{
nMin = i;
break;
}
}
nSum = 0;
for (i = 65535; i >= 0; i--)
{
nSum += pHist[i];
if (nSum >= nSigma)
{
nMax = i;
break;
}
}
//计算对比度亮度
float K = (float)(120.0 / (nMax - nMin + 1));
float C = (float)(-K * nMin);
//图像映射
for (i = 0; i < lDataLen; i++)
{
int nValue = (int)(K * psh16BitData[i] + C);
if (nValue < 0)
{
pby8BitData[i] = 0;
}
else if (nValue > 255)
{
pby8BitData[i] = 255;
}
else
{
pby8BitData[i] = nValue;
}
}
delete[] pHist;
}
void showArithInfo(cv::Mat src, ARIDLL_OUTPUT * stOutput)
{
auto detNum = stOutput->nAlarmObjCnts;
auto detObjs = stOutput->stAlarmObjs;
for (size_t i = 0; i < detNum; i++)
{
cv::Rect outRect;
outRect.width = MAX(15,int(detObjs[i].nObjW));
outRect.height= MAX(15,int(detObjs[i].nObjH));
outRect.x = detObjs[i].nX-outRect.width/2.0;
outRect.y = detObjs[i].nY-outRect.height/2.0;
cv::rectangle(src,outRect,cv::Scalar(255,0,0),2);
cv::putText(src,to_string(detObjs[i].nOutputID),cv::Point(outRect.x - 10,outRect.y),1,2,cv::Scalar(255,255,0));
}
auto trackerOut = stOutput->stTrackers[0];
cv::Rect outRect;
outRect.width = MAX(25,int(trackerOut.nObjW));
outRect.height= MAX(25,int(trackerOut.nObjH));
outRect.x = trackerOut.nX-outRect.width/2.0;
outRect.y = trackerOut.nY-outRect.height/2.0;
cv::rectangle(src,outRect,cv::Scalar(0,0,255),2);
char str[100];
sprintf(str,"%d,%d,%d,%d",int(trackerOut.nX),int(trackerOut.nY),int(trackerOut.nObjW),int(trackerOut.nObjH));
cv::putText(src,cv::String(str),cv::Point(outRect.x - 10,outRect.y),1,2,cv::Scalar(255,255,0));
}
std::vector<int> generateRandomLockUnlockSequence(int totalFrames, int minLockDuration, int maxLockDuration)
{
std::vector<int> sequence(totalFrames, 0); // 初始化为全0
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> actionDist(0, 1); // 0:普通帧, 1:锁定
std::uniform_int_distribution<> durationDist(1, 5); // 连续动作的持续时间
std::uniform_int_distribution<> unlockProbDist(0, 100); // 解锁概率分布
std::uniform_int_distribution<> waitFramesDist(1, 3); // 锁定后等待帧数
std::uniform_int_distribution<> waitProbDist(0, 100); // 是否等待的概率分布
int currentFrame = 0;
bool isLocked = false; // 当前是否处于锁定状态
bool justLocked = false; // 是否刚刚执行了锁定操作
while (currentFrame < totalFrames)
{
if (isLocked)
{
// 如果当前是锁定状态有20%的概率解锁
if (unlockProbDist(gen) < 20)
{
sequence[currentFrame] = 2; // 解锁
isLocked = false;
currentFrame++;
continue;
}
}
// 如果刚刚执行了锁定操作有50%的概率添加等待帧
if (justLocked && waitProbDist(gen) < 50)
{
int waitFrames = waitFramesDist(gen);
for (int i = 0; i < waitFrames && currentFrame < totalFrames; i++)
{
sequence[currentFrame] = 0; // 等待帧
currentFrame++;
}
}
justLocked = false;
// 随机决定当前帧的动作类型
int action = actionDist(gen);
// 随机决定这个动作持续多少帧
int duration = durationDist(gen);
// 确保不会超出总帧数
if (currentFrame + duration > totalFrames)
{
duration = totalFrames - currentFrame;
}
// 填充动作序列
for (int i = 0; i < duration && currentFrame < totalFrames; i++)
{
sequence[currentFrame] = action;
if (action == 1)
{
isLocked = true;
justLocked = true; // 标记刚刚执行了锁定操作
}
currentFrame++;
}
}
return sequence;
}

@ -1,177 +0,0 @@
#pragma once
#include <opencv2/opencv.hpp>
#include "NeoArithStandardDll.h"
class SimTargetImage_Y16
{
public:
SimTargetImage_Y16(int w,int h);
~SimTargetImage_Y16();
public:
void setBackGround(int gray,int std);
void addTarget(int x, int y, int w, int h, int gray);
// 获取Y16数据
unsigned short* getImageData();
cv::Mat getMatRGB();
private:
cv::Mat Y8Mat;//显示图像
unsigned short* pSrc;
int nImageWidth;
int nImageHeight;
};
// 目标结构体
struct Target
{
float x, y; // 目标位置
float width, height; // 目标尺寸
float vx, vy; // 水平和垂直速度
float vw, vh; // 尺寸变化速度
cv::Scalar color; // 目标颜色(适用于 RGB
float vc; // 颜色变化率
bool useTexture; // 使用贴图
bool bGrayComplex; //花目标
cv::Scalar color2; //花目标颜色(适用于 RGB
cv::Mat texture_cur; // 贴图
cv::Mat alaph_cur; // 贴图alaph
// 保存原始贴图状态
cv::Mat texture; // 贴图
cv::Mat alaph; // 贴图alaph
Target()
{
useTexture = false;
};
void updatePosition(int frameWidth, int frameHeight)
{
// 更新位置
x += vx;
y += vy;
width += vw;
height += vh;
// 检测边界并反弹
if (x <= width || x + width >= frameWidth) vx = -vx;
if (y <= height || y + height >= frameHeight) vy = -vy;
// 保持目标在图像范围内
x = std::max(0.0f, std::min(x, frameWidth - width));
y = std::max(0.0f, std::min(y, frameHeight - height));
}
void updateTexture()
{
if(useTexture && (texture_cur.cols != height || texture_cur.rows != width))
{
texture_cur = cv::Mat(height,width,CV_8UC3);
alaph_cur = cv::Mat(height,width,CV_8UC1);
cv::resize(texture,texture_cur,cv::Size(width,height));
cv::resize(alaph,alaph_cur,cv::Size(width,height));
}
}
void addTexture(unsigned char* png_data,int Len)
{
std::vector<uchar> img_data(png_data, png_data + Len);
cv::Mat pic = cv::imdecode(img_data, cv::IMREAD_UNCHANGED);
//cv::imshow("pic",pic);
// cv::waitKey(0);
// 分离前景的 RGB 和 Alpha 通道
cv::Mat bgr, alpha;
std::vector<cv::Mat> channels(4);
cv::split(pic, channels);
cv::merge(std::vector<cv::Mat>{channels[0], channels[1], channels[2]}, bgr);
useTexture = true;
alaph = channels[3]; // Alpha 通道
texture = bgr;
alaph_cur = channels[3];
texture_cur = bgr;
}
};
class SimTargetImage
{
public:
SimTargetImage(int w,int h);
~SimTargetImage(){};
public:
// 状态更新
void update();
// 设置背景
void setBackGround(int gray,int std);
// 新增:设置棋盘格背景
void setCheckerboardBackGround(int gray1, int gray2, int blockSize);
// 增加一个目标
void addTarget(Target t);
// 增加一个遮挡
void addOcc(Target t);
public:
cv::Mat getImageRGB();
cv::Mat getImageY16();
cv::Mat getImageY8();
cv::Mat getImageNV12();
Target* getTarget(int id);
private:
void drawObj(Target t);
void drawOcc(Target t);
cv::Mat m_BaseMat;//图像帧(RGB)
cv::Mat m_CurMat;//图像帧(RGB)
std::vector<Target> m_targetList;
std::vector<Target> m_OccList;
int m_imgW;
int m_imgH;
// 各类型输出结果
private:
cv::Mat matY8;
cv::Mat matY16;
cv::Mat matNV12;
};
// 16位调光
void Map16BitTo8Bit(unsigned short* psh16BitData, long lDataLen, unsigned char* pby8BitData);
void showArithInfo(cv::Mat src,ARIDLL_OUTPUT* output);
// 生成随机锁定解锁序列
// 返回值锁定和解锁的帧编号数组1表示锁定2表示解锁0表示普通帧
std::vector<int> generateRandomLockUnlockSequence(int totalFrames, int minLockDuration = 10, int maxLockDuration = 30);

@ -235,6 +235,7 @@ SINT32 Detectors::Detect(GD_VIDEO_FRAME_S img)
} }
POINT16S* Detectors::GetDST_MaxPoint() POINT16S* Detectors::GetDST_MaxPoint()
{ {
return pDST_Module->getMaxPoint(); return pDST_Module->getMaxPoint();

@ -660,7 +660,7 @@ void Arith_EOController::Arith_SetRunTimeParam(ARIDLL_PARMA config)
g_GLB_Detectors->SetParam(para);//设置参数 g_GLB_Detectors->SetParam(para);//设置参数
// 全局检测器参数共享给局部检测器参数 // 全局检测器参数共享给局部检测器参数
memcpy(&config.stSkyParam.prmTSkyDet, &config.PrmSkyDetect, sizeof(Param_SkyDetect)); //memcpy(&config.stSkyParam.prmTSkyDet, &config.PrmSkyDetect, sizeof(Param_SkyDetect));
// 设置管道实例参数 // 设置管道实例参数
@ -702,11 +702,6 @@ void Arith_EOController::Arith_SetRunTimeParam(ARIDLL_PARMA config)
{ {
LOG_SETLEVEL(config.nLogLevel); LOG_SETLEVEL(config.nLogLevel);
} }
if (GLB_SCEN_MODE::GLB_SCEN_SKY == m_SceneType)
{
g_pSkyTracker->pSkyTracker->SetSkyParam(&g_GLB_stArithPara.stSkyParam);
}
} }
// 读取运行时跟踪参数 // 读取运行时跟踪参数
@ -1048,24 +1043,7 @@ void Arith_CalcTargetSysInfo(TARGET_OBJECT* pTargetObj, SINT32 nFrmNum, GLB_INP
} }
} }
void DeleteTargetSkyLine(TARGET_OBJECT* pTargetArr, int num, GLB_PARAMETERS& g_para)
{
// 不使用天地线
if (!g_para.bFiteredAlarm)
{
return;
}
for (int i = 0; i < num; i++)
{
TARGET_OBJECT* tTarget = &pTargetArr[i];
if (tTarget->afAngle.fPt < g_para.fFilterPt)
{
tTarget->bObject = false;
}
}
}
void Arith_EOController::GLB_Release_Trackers(SINT32 nPipeNum) void Arith_EOController::GLB_Release_Trackers(SINT32 nPipeNum)
{ {

@ -14,8 +14,6 @@
void Arith_CalcTargetSysInfo(TARGET_OBJECT* pTargetObj, SINT32 nFrmNum, GLB_INPUT g_Input); void Arith_CalcTargetSysInfo(TARGET_OBJECT* pTargetObj, SINT32 nFrmNum, GLB_INPUT g_Input);
void DeleteTargetSkyLine(TARGET_OBJECT* pTargetArr, int num, GLB_PARAMETERS& g_para);
class Arith_EOController class Arith_EOController
{ {
public: public:

@ -1,75 +0,0 @@
#include "Arith_PosAnalyse.h"
ImagePosRecord::ImagePosRecord()
{
reset();
}
ImagePosRecord::~ImagePosRecord()
{
}
ImagePosRecord::STATUS ImagePosRecord::update(POINT32F pos)
{
if (p_Last.x == 0 && p_Last.y == 0)
{
stPositonMove[ubEnd].x = 0;
stPositonMove[ubEnd].y = 0;
}
else
{
stPositonMove[ubEnd].x = pos.x - p_Last.x;
stPositonMove[ubEnd].y = pos.y - p_Last.y;
}
p_Last = pos;
ubEnd = (ubEnd + 1) % TRACK_POS_REC_MAX;
nTotalCnt++;
return Analyse();
}
void ImagePosRecord::reset()
{
ubEnd = 0;
nTotalCnt = 0;
p_Last.x = 0;
p_Last.y = 0;
memset(stPositonMove, 0, sizeof(POINT32F) * TRACK_POS_REC_MAX);
}
ImagePosRecord::STATUS ImagePosRecord::Analyse()
{
//
int VecFluCnt = 0; //速度波动
int aFluCnt = 0; //加速度波动
if (nTotalCnt > 10)
{
for (size_t i = 0; i < MIN(TRACK_POS_REC_MAX, nTotalCnt); i++)
{
int indCurr = (ubEnd - i + TRACK_POS_REC_MAX) % TRACK_POS_REC_MAX;
int indLast = (ubEnd - i - 1 + TRACK_POS_REC_MAX) % TRACK_POS_REC_MAX;
auto movedisCurr = sqrt(stPositonMove[indCurr].x * stPositonMove[indCurr].x + stPositonMove[indCurr].y * stPositonMove[indCurr].y);
auto movedisLast = sqrt(stPositonMove[indLast].x * stPositonMove[indLast].x + stPositonMove[indLast].y * stPositonMove[indLast].y);
if (movedisCurr > 6)
{
VecFluCnt++;
}
if (ABS(movedisCurr) > ABS(movedisLast) * 2 && ABS(movedisCurr) > 5)
{
aFluCnt++;
}
}
}
if (VecFluCnt > 2 || aFluCnt > 0)
{
return STATUS::NOTSTABLE;
}
return STATUS::STATBLE;
}

@ -1,36 +0,0 @@
// 目标像方位置记录,并估计伺服控制状态,不稳定时不使用惯性预测
#pragma once
#include "Arith_Common.hpp"
class ImagePosRecord
{
enum STATUS
{
STATBLE,
NOTSTABLE
};
public:
ImagePosRecord();
~ImagePosRecord();
public:
STATUS update(POINT32F pos); // 更新坐标
void reset();
STATUS Analyse();
private:
UBYTE8 ubEnd;
SINT32 nTotalCnt;
POINT32F stPositonMove[TRACK_POS_REC_MAX]; //帧间运动
POINT32F p_Last; //起始点
//
BBOOL bStable; //稳态
};

@ -363,9 +363,6 @@ TrackUnlockState SkyTracker::Track(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input,
// 自动切换跟踪器 // 自动切换跟踪器
SetTrackModeAuto(img, p_GLB_Input); SetTrackModeAuto(img, p_GLB_Input);
// 调整惯性预测建议
m_posRecorder.update(m_TSky_Output.ObjectStatusDesc.ptPos);
// 小面跟踪目标及背景监控 // 小面跟踪目标及背景监控
if (pSATracker && mTrakingPara.Sky_bEnableTrackSA) if (pSATracker && mTrakingPara.Sky_bEnableTrackSA)
{ {
@ -523,8 +520,6 @@ void SkyTracker::Cancle()
memset(&m_TSky_Output.ObjectStatusKCF, 0, sizeof(OBJECTSTATUS)); memset(&m_TSky_Output.ObjectStatusKCF, 0, sizeof(OBJECTSTATUS));
memset(&m_TSky_Output.ObjectStatusDesc, 0, sizeof(OBJECTSTATUS)); memset(&m_TSky_Output.ObjectStatusDesc, 0, sizeof(OBJECTSTATUS));
m_posRecorder.reset();
} }
void SkyTracker::SetMemTrack(bool bMemFlag) void SkyTracker::SetMemTrack(bool bMemFlag)
@ -542,14 +537,9 @@ void SkyTracker::SetSkyParam(Param_SkyTracker * stSkyParam)
//mTrakingPara.Sky_bEnableTrackSA = stSkyParam->Sky_bEnableTrackSA; //mTrakingPara.Sky_bEnableTrackSA = stSkyParam->Sky_bEnableTrackSA;
//mTrakingPara.Sky_bEnableKCF = stSkyParam->Sky_bEnableKCF; // 内部自动调整关闭因为清空了kcf信息强制打开可能会导致程序崩溃不建议设置kcf算法开关 //mTrakingPara.Sky_bEnableKCF = stSkyParam->Sky_bEnableKCF; // 内部自动调整关闭因为清空了kcf信息强制打开可能会导致程序崩溃不建议设置kcf算法开关
if (pSATracker)
{
pSATracker->m_TSA_Param.Sky_bUseAIDet = stSkyParam->Sky_bUseAIDet; pSATracker->m_TSA_Param.Sky_bUseAIDet = stSkyParam->Sky_bUseAIDet;
pSATracker->m_TSA_Param.nUseAIDetFeq = stSkyParam->Sky_nUseAIDetFeq; pSATracker->m_TSA_Param.nUseAIDetFeq = stSkyParam->Sky_nUseAIDetFeq;
pSATracker->SetParam(stSkyParam->prmTSkyDet);//设置参数
}
skyControlInfo.nTrackMemFrmNum = mTrakingPara.Sky_nTrackMemFrmNum; skyControlInfo.nTrackMemFrmNum = mTrakingPara.Sky_nTrackMemFrmNum;
} }
@ -923,7 +913,7 @@ void SkyTracker::SetTrackModeAuto(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input)
} }
#endif #endif
if (nCount >= GLB_TRACK_SMALL2FACE_THRES || bFindAreaTarget) //以338A对空抗干扰数据--双机小目标交错为例 if (nCount >= GLB_TRACK_SMALL2FACE_THRES && bFindAreaTarget) //以338A对空抗干扰数据--双机小目标交错为例
{ {
crfCandiRect.cx = pObjStatus->ptPos.x; crfCandiRect.cx = pObjStatus->ptPos.x;
crfCandiRect.cy = pObjStatus->ptPos.y; crfCandiRect.cy = pObjStatus->ptPos.y;
@ -977,7 +967,7 @@ void SkyTracker::SetTrackModeAuto(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input)
pKCFTracker->KCF_CleanUpObjectTracker(); pKCFTracker->KCF_CleanUpObjectTracker();
//清空CEND、KCF跟踪算法 //清空CEND、KCF跟踪算法
//memset(pObjStatusSA, 0, sizeof(OBJECTSTATUS)); memset(pObjStatusSA, 0, sizeof(OBJECTSTATUS));
memset(pObjStatusKCF, 0, sizeof(OBJECTSTATUS)); memset(pObjStatusKCF, 0, sizeof(OBJECTSTATUS));
//小目标和面目标相互切换计数器清零 //小目标和面目标相互切换计数器清零
@ -1317,4 +1307,3 @@ void SkyTracker::SkyTrackFalse_Process(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_In
} }
} }
} }

@ -9,7 +9,7 @@
#include "Matcher/Fast_Matcher.h" #include "Matcher/Fast_Matcher.h"
#include "Arith_RadarInfo.h" #include "Arith_RadarInfo.h"
#include "Arith_AIDMonitor.h" #include "Arith_AIDMonitor.h"
#include "Arith_PosAnalyse.h"
// 决策行为 // 决策行为
enum Act enum Act
{ {
@ -107,11 +107,6 @@ typedef struct tagPipeMainStatusInfo
SINT32 nAbnormalCnt; // 异常次数累计 SINT32 nAbnormalCnt; // 异常次数累计
}PipeMainStatusInfo; }PipeMainStatusInfo;
class SkyTracker class SkyTracker
{ {
public: public:
@ -186,7 +181,6 @@ public:
// 产生决策 // 产生决策
DecPolicy Decision(GD_VIDEO_FRAME_S img); DecPolicy Decision(GD_VIDEO_FRAME_S img);
// KCF+CEND决策处理 // KCF+CEND决策处理
DecPolicy TO_TrackDecisionOf_KCF_CEND(GD_VIDEO_FRAME_S img); DecPolicy TO_TrackDecisionOf_KCF_CEND(GD_VIDEO_FRAME_S img);
@ -239,9 +233,6 @@ private:
void SkyTrackFalse_Process(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, OBJECTSTATUS* ObjStatus, PIPE* m_LockingPipe); void SkyTrackFalse_Process(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, OBJECTSTATUS* ObjStatus, PIPE* m_LockingPipe);
private: private:
ImagePosRecord m_posRecorder; //pos统计
bool bsvmInitDone;// bool bsvmInitDone;//
PipeMainClassifyInfo* pipeClassifyInfo; // 主跟踪管道对应的信息缓存,用于在告警操作中进行类别确认 PipeMainClassifyInfo* pipeClassifyInfo; // 主跟踪管道对应的信息缓存,用于在告警操作中进行类别确认

@ -316,9 +316,6 @@ BBOOL Arith_EOController::Arith_Status_MOTRACK(GD_VIDEO_FRAME_S img, GLB_INPUT&
// 目标系统相关的其他信息计算 // 目标系统相关的其他信息计算
Arith_CalcTargetSysInfo(pFrameTargetArray, nFrmTargetNum, g_Input); Arith_CalcTargetSysInfo(pFrameTargetArray, nFrmTargetNum, g_Input);
// 拦截天地线以下的目标
DeleteTargetSkyLine(pFrameTargetArray, nFrmTargetNum, g_para);
// MOT管道处理 // MOT管道处理
g_GLB_PipeProc->Run(pFrameTargetArray, nFrmTargetNum, GLB_STATUS_TRACK, g_Input, m_SceneType); g_GLB_PipeProc->Run(pFrameTargetArray, nFrmTargetNum, GLB_STATUS_TRACK, g_Input, m_SceneType);

@ -174,9 +174,6 @@ SINT32 Arith_EOController::Proc_SearchLock(GD_VIDEO_FRAME_S img, GLB_INPUT & g_I
} }
/************************************* /*************************************
* Method: Arith_Status_SEARCH() * Method: Arith_Status_SEARCH()
* Function Description: * Function Description:
@ -204,11 +201,6 @@ BBOOL Arith_EOController::Arith_Status_SEARCH(GD_VIDEO_FRAME_S img, GLB_INPUT& g
// 目标系统相关的其他信息计算 // 目标系统相关的其他信息计算
Arith_CalcTargetSysInfo(pFrameTargetArray, nFrmTargetNum, g_Input); Arith_CalcTargetSysInfo(pFrameTargetArray, nFrmTargetNum, g_Input);
// 拦截天地线以下的目标
DeleteTargetSkyLine(pFrameTargetArray, nFrmTargetNum, g_para);
// 合并AI检测目标与传统目标 // 合并AI检测目标与传统目标
nFrmTargetNum = MergeAIAndSATarget(pFrameTargetArray, nFrmTargetNum); nFrmTargetNum = MergeAIAndSATarget(pFrameTargetArray, nFrmTargetNum);
LOG_DEBUG("nFrmTargetNum:{},bObject:{}fAz:{},fPt:{}", nFrmTargetNum,pFrameTargetArray[0].bObject, pFrameTargetArray[0].afAngle.fAz, pFrameTargetArray[0].afAngle.fPt); LOG_DEBUG("nFrmTargetNum:{},bObject:{}fAz:{},fPt:{}", nFrmTargetNum,pFrameTargetArray[0].bObject, pFrameTargetArray[0].afAngle.fAz, pFrameTargetArray[0].afAngle.fPt);
@ -224,3 +216,4 @@ BBOOL Arith_EOController::Arith_Status_SEARCH(GD_VIDEO_FRAME_S img, GLB_INPUT& g

@ -70,8 +70,6 @@ SA_Tracker::SA_Tracker(int nWidth, int nHeight)
memset(nSimTargetNum_Counter, -1, GLB_SIMOBJ_VALID_CNT * sizeof(SINT32)); memset(nSimTargetNum_Counter, -1, GLB_SIMOBJ_VALID_CNT * sizeof(SINT32));
SATrkState = LockStateUnknown; // 小面记忆跟踪状态跟踪 SATrkState = LockStateUnknown; // 小面记忆跟踪状态跟踪
} }
@ -184,7 +182,7 @@ bool SA_Tracker::Init(UINT16* pSrc, SINT32 nWidth, SINT32 nHeight, PIPE* pTracki
void SA_Tracker::GetTrackState(GLB_INPUT* p_GLB_Input) void SA_Tracker::GetTrackState(GLB_INPUT* p_GLB_Input)
{ {
UINT32 unTotalCnt = m_LockingPipe->unTotalCnt; UINT32 unTotalCnt = m_LockingPipe->unTotalCnt;
if (unTotalCnt < p_GLB_Input->unFreq * 2) if (unTotalCnt < p_GLB_Input->unFreq * 3)
{ {
m_LockingPipe->ubEventStatus = PIPE_EVENT_JUST_LOCK; // 锁定后3s范围内处于初始锁定状态 m_LockingPipe->ubEventStatus = PIPE_EVENT_JUST_LOCK; // 锁定后3s范围内处于初始锁定状态
} }
@ -206,13 +204,8 @@ int SA_Tracker::Track(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, API_MOT_PIPE
SINT32 nRealPipeNum = g_GLB_PipeProc->PIPE_GetAlarmNum(); SINT32 nRealPipeNum = g_GLB_PipeProc->PIPE_GetAlarmNum();
SINT32 m_nMaxPipeNum = g_GLB_PipeProc->PIPE_GetMaxPipeNum(); SINT32 m_nMaxPipeNum = g_GLB_PipeProc->PIPE_GetMaxPipeNum();
SINT32 nPipeRadiusTrack = g_GLB_PipeProc->GetParam().nPipeRadiusTrack; SINT32 nPipeRadiusTrack = g_GLB_PipeProc->GetParam().nPipeRadiusTrack;
SINT32 nPipeRaduisLost = g_GLB_PipeProc->GetParam().nPipeRadiusLost;
PIPE_PARAMETERS MOT_PARA = g_GLB_PipeProc->GetParam();
nTrackTargetID = -1;
// 同步管道参数 nTrackTargetID = -1;
m_TSA_Param.nPipeRadiusTrack = nPipeRadiusTrack;
m_TSA_Param.nPipeRadiusLost = nPipeRaduisLost;
//调整搜索区域大小,为分块宽高的整数倍 //调整搜索区域大小,为分块宽高的整数倍
SetAutoSearchZone(nWidth, nHeight, p_GLB_Input); SetAutoSearchZone(nWidth, nHeight, p_GLB_Input);
@ -406,7 +399,7 @@ SINT32 SA_Tracker::getInterfereAreaTargteNum(RECT32S bbox)
TARGET_OBJECT* pObj = &pDAT_Module->GetTargetArray()[i]; TARGET_OBJECT* pObj = &pDAT_Module->GetTargetArray()[i];
if (ABS(pObj->pfCenPos.x - nX) < MAX(50, bbox.w * 3) && if (ABS(pObj->pfCenPos.x - nX) < MAX(50, bbox.w * 3) &&
ABS(pObj->pfCenPos.y - nY) < MAX(50, bbox.h * 3)) ABS(pObj->pfCenPos.x - nY) < MAX(50, bbox.h * 3))
{ {
nNum++; nNum++;
} }
@ -442,9 +435,9 @@ void SA_Tracker::SetAutoSearchZone(SINT32 nWidth, SINT32 nHeight, GLB_INPUT* p_G
SINT32 nPipeRadiusTrack = m_TSA_Param.nPipeRadiusTrack; SINT32 nPipeRadiusTrack = m_TSA_Param.nPipeRadiusTrack;
SINT32 nPipeRadiusLost = m_TSA_Param.nPipeRadiusLost; SINT32 nPipeRadiusLost = m_TSA_Param.nPipeRadiusLost;
//根据目标速度预测目标在当前帧的坐标位置 //根据目标速度预测目标在当前帧的坐标位置
// 伺服太晃,直接使用短时预测。
nEnd = m_LockingPipe->ubEnd; nEnd = m_LockingPipe->ubEnd;
pPipe->ptCurrentPnt.x = pPipe->stMotionMod_mean.crnObjPrediRtLong.cx;
pPipe->ptCurrentPnt.y = pPipe->stMotionMod_mean.crnObjPrediRtLong.cy;
// 取长时预测点作为管道当前位置预测 // 取长时预测点作为管道当前位置预测
if (PIPE_EVENT_JUST_LOCK == m_LockingPipe->ubEventStatus) if (PIPE_EVENT_JUST_LOCK == m_LockingPipe->ubEventStatus)
@ -510,10 +503,6 @@ void SA_Tracker::SetAutoSearchZone(SINT32 nWidth, SINT32 nHeight, GLB_INPUT* p_G
snPipeRadius.h = MAX((SINT32)(fObjVy * 2), nPipeRadiusTrack); snPipeRadius.h = MAX((SINT32)(fObjVy * 2), nPipeRadiusTrack);
} }
//+++++++++++++++++++++++++++++++++++++++++++++++++++++++ //+++++++++++++++++++++++++++++++++++++++++++++++++++++++
//根据管道半径,修正分块大小: //根据管道半径,修正分块大小:
//保证nBlkNumW<=20, nBlkNumH<=16防止分块极值点坐标溢出及保证计算量 //保证nBlkNumW<=20, nBlkNumH<=16防止分块极值点坐标溢出及保证计算量
@ -638,22 +627,19 @@ void SA_Tracker::SARegionDet(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, SINT3
DAT_PARAMETERS* pDAT_stPara = pDAT_Module->GetDatParm(); DAT_PARAMETERS* pDAT_stPara = pDAT_Module->GetDatParm();
SINT32 nObjCombineDist = pDAT_stPara->nObjCombineDist; SINT32 nObjCombineDist = pDAT_stPara->nObjCombineDist;
FilterMeanNL stMotionMod_mean = m_LockingPipe->stMotionMod_mean; // 使用运动模型 FilterMeanNL stMotionMod_mean = m_LockingPipe->stMotionMod_mean; // 使用运动模型
TARGET_OBJECT* trackTarget = &m_LockingPipe->objHistoryList[m_LockingPipe->ubEnd];//跟踪最近的目标
////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////
//////////////////////////小面目标检测//////////////////////////////////// //////////////////////////小面目标检测////////////////////////////////////
BBOOL bEnableAreaObjDetect = FALSE; BBOOL bEnableAreaObjDetect = FALSE;
float fObjAglSpeed = ABS(m_LockingPipe->sfAglSpeed.vx) + ABS(m_LockingPipe->sfAglSpeed.vy); float fObjAglSpeed = ABS(m_LockingPipe->sfAglSpeed.vx) + ABS(m_LockingPipe->sfAglSpeed.vy);
//// 像素数超过面目标检出下限或者速度较快,开启面目标检测算法 // 像素数超过面目标检出下限或者速度较快,开启面目标检测算法
//if ((m_ObjStatus.fObjPxlsCnt > DAT_TARGET_PXLS_MIN) || (fObjAglSpeed > 0.01f)) if ((m_ObjStatus.fObjPxlsCnt > DAT_TARGET_PXLS_MIN) || (fObjAglSpeed > 0.01f))
//{ {
// bEnableAreaObjDetect = true;
//}
//else
//{
// bEnableAreaObjDetect = false;
//}
bEnableAreaObjDetect = true; bEnableAreaObjDetect = true;
}
else
{
bEnableAreaObjDetect = false;
}
// 跟踪阶段,合并距离关键参数 // 跟踪阶段,合并距离关键参数
SINT32 nCombinDist = m_TSA_Input.nBlkWidth / 2 + 1; SINT32 nCombinDist = m_TSA_Input.nBlkWidth / 2 + 1;
m_TSA_output.crCenterRect = m_TSA_Input.crCenterRect; m_TSA_output.crCenterRect = m_TSA_Input.crCenterRect;
@ -680,13 +666,7 @@ void SA_Tracker::SARegionDet(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, SINT3
pDST_Module->SetCombinDist(nCombinDist); pDST_Module->SetCombinDist(nCombinDist);
// 首次挑选使用自适应SNR阈值尽量避免低SNR阈值下虚警 pDST_Module->Detect(img, m_TSA_Input.crCenterRect, GLB_STATUS_TRACK);
if (trackTarget->fSNR > 9)
{
pDST_Module->SetDetSnr(MAX(trackTarget->fSNR * 0.5, 4.0f));
}
SINT32 nSmallTargetNum = pDST_Module->Detect(img, m_TSA_Input.crCenterRect, GLB_STATUS_TRACK);
// 小目标可以开面目标检测 // 小目标可以开面目标检测
if (bEnableAreaObjDetect) if (bEnableAreaObjDetect)
{ {
@ -698,14 +678,7 @@ void SA_Tracker::SARegionDet(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, SINT3
// 面目标跟踪模式 // 面目标跟踪模式
if (m_SizeMode == SizeType::AreaTarget) if (m_SizeMode == SizeType::AreaTarget)
{ {
// 针对S3315近处大目标伺服不收敛优化 pDAT_Module->Detect(img, m_TSA_Input.crCenterRect, nObjCombineDist, GLB_STATUS_TRACK);
if (!stMotionMod_mean.bTrackStable && (trackTarget->snSize.w > 8 || m_LockingPipe->ObjectFilter.fPxlsCnt > 10))
{
m_TSA_Input.crCenterRect.w = MAX(m_TSA_Input.crCenterRect.w, 256);
m_TSA_Input.crCenterRect.h = MAX(m_TSA_Input.crCenterRect.w, 256);
}
SINT32 nAreaTargetNum = pDAT_Module->Detect(img, m_TSA_Input.crCenterRect, nObjCombineDist, GLB_STATUS_TRACK);
m_TSA_output.crCenterRect = pDAT_Module->getDAT_stOutput()->crCenterRect; m_TSA_output.crCenterRect = pDAT_Module->getDAT_stOutput()->crCenterRect;
m_TSky_Output->mTrakingPara_Output.nAreaCombineDist = nObjCombineDist; // 面目标合并距离调试输出 m_TSky_Output->mTrakingPara_Output.nAreaCombineDist = nObjCombineDist; // 面目标合并距离调试输出
} }
@ -721,8 +694,8 @@ void SA_Tracker::SARegionDet(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, SINT3
//小、面目标原地合并 //小、面目标原地合并
m_nTargetNum = MergeSmallAndAreaTarget(m_Target_Array, nSmallTargetNum, nCpNum, nCombinDist, GLB_STATUS::GLB_STATUS_TRACK); m_nTargetNum = MergeSmallAndAreaTarget(m_Target_Array, nSmallTargetNum, nCpNum, nCombinDist, GLB_STATUS::GLB_STATUS_TRACK);
// 小面目标检测信息使用后重置,为了提高效率这里只重置个数 // 小面目标检测信息使用后重置,为了提高效率这里只重置个数
//pDST_Module->SetTargetNum(0); pDST_Module->SetTargetNum(0);
//pDAT_Module->SetTargetNum(0); pDAT_Module->SetTargetNum(0);
////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////
@ -1005,15 +978,6 @@ FLOAT32 SA_Tracker::cos_sim(POINT32F v1, POINT32F v2)
return (fcosSim + 1) / 2.0f; return (fcosSim + 1) / 2.0f;
} }
void SA_Tracker::SetParam(Param_SkyDetect para)
{
pDST_Module->setDstDetState(para.bEnableDetcetSmallTarget); // 小目标检测开关设置
pDST_Module->setDstParm(&para); // 小目标检测参数设置
pDAT_Module->setDatDetState(para.bEnableDetcetAreaTarget); // 面目标检测开关设置
pDAT_Module->setDatParm(&para); // 面目标检测参数设置
}
// 运动相似度 // 运动相似度
FLOAT32 SA_Tracker::Similarity_Move(PIPE* pPipe, TARGET_OBJECT* ptTarget, FLOAT32 fAglReso) FLOAT32 SA_Tracker::Similarity_Move(PIPE* pPipe, TARGET_OBJECT* ptTarget, FLOAT32 fAglReso)
{ {
@ -1201,7 +1165,7 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
//局部变量 //局部变量
FLOAT32 fSim = 0.0f; //目标相似度 FLOAT32 fSim = 0.0f; //目标相似度
FLOAT32 fSimMax = -1.0f;//目标相似度最大值 FLOAT32 fSimMax = -1.0f;//目标相似度最大值
FLOAT32 fSimThres = 0.3f; //相似度阈值 FLOAT32 fSimThres = 0.7f; //相似度阈值
SINT32 nFrmsStep = 1; //帧间隔 SINT32 nFrmsStep = 1; //帧间隔
TARGET_OBJECT* ptMainTarget = NULL; //主管道目标 TARGET_OBJECT* ptMainTarget = NULL; //主管道目标
TARGET_OBJECT* ptTarget = NULL; //候选目标 TARGET_OBJECT* ptTarget = NULL; //候选目标
@ -1234,12 +1198,12 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
} }
// 小目标主要使用位置信息 //s3315运动不稳定不优先使用运动模型 // 小目标主要使用位置信息
if (SizeType::SmallTarget == m_SizeMode) if (SizeType::SmallTarget == m_SizeMode)
{ {
fMoveModelWeight = 0.5f; fMoveModelWeight = 0.7f;
fApparentModelWeight = 0.5f; fApparentModelWeight = 0.3f;
fMoveThres = 0.5f; fMoveThres = 0.7f;
} }
// +++++++++++++++++++++++++++++++++++++++++++++++++++++ // +++++++++++++++++++++++++++++++++++++++++++++++++++++
//若长短轨迹预测异常,则直接取距离搜索区域中心最近的目标 //若长短轨迹预测异常,则直接取距离搜索区域中心最近的目标
@ -1261,7 +1225,6 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
// fApparentModelWeight = 0.0; // fApparentModelWeight = 0.0;
//} //}
//统计所有当前帧目标与管道目标的灰度、与中心距离、信噪比差异 //统计所有当前帧目标与管道目标的灰度、与中心距离、信噪比差异
for (int i = 0; i < nFrmObjsCnt; i++) for (int i = 0; i < nFrmObjsCnt; i++)
{ {
@ -1289,7 +1252,7 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
ABS(ptTarget->snSize.h - pPipe->ObjectFilter.sfSize.h) > 3 * nDSmpScale || ABS(ptTarget->snSize.h - pPipe->ObjectFilter.sfSize.h) > 3 * nDSmpScale ||
ABS(ptTarget->snSize.w - pPipe->ObjectFilter.sfSize.w) > 3 * nDSmpScale) && m_LockingPipe->unLostCnt < GLB_FRM_FREQ) ABS(ptTarget->snSize.w - pPipe->ObjectFilter.sfSize.w) > 3 * nDSmpScale) && m_LockingPipe->unLostCnt < GLB_FRM_FREQ)
{ {
//continue; continue;
} }
} }
@ -1338,7 +1301,7 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
// 阈值调整 // 阈值调整
if (pPipe->unLostCnt < 2) if (pPipe->unLostCnt < 20)
{ {
fSScaleChangeLowThres = 0.3; fSScaleChangeLowThres = 0.3;
fSScaleChangeHighThres = 3.f; fSScaleChangeHighThres = 3.f;
@ -1349,25 +1312,14 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
nPredictDiffThresX = MAX(nFThreMin, 12); nPredictDiffThresX = MAX(nFThreMin, 12);
nPredictDiffThresY = MAX(nFThreMin, 12); nPredictDiffThresY = MAX(nFThreMin, 12);
} }
else if (pPipe->unLostCnt < 20)
{
fSScaleChangeLowThres = 0.3;
fSScaleChangeHighThres = 3.f;
fAScaleChangeLowThres = 0.5;
fAScaleChangeHighThres = 2.0;
nLastDiffThresX = MAX(nMoveMin,50);
nLastDiffThresY = MAX(nMoveMin, 50);
nPredictDiffThresX = MAX(nFThreMin, 25);
nPredictDiffThresY = MAX(nFThreMin, 25);
}
else if (pPipe->unLostCnt < 50) else if (pPipe->unLostCnt < 50)
{ {
fSScaleChangeLowThres = 0.2; fSScaleChangeLowThres = 0.2;
fSScaleChangeHighThres = 5.f; fSScaleChangeHighThres = 5.f;
fAScaleChangeLowThres = 0.33; fAScaleChangeLowThres = 0.33;
fAScaleChangeHighThres = 3.0; fAScaleChangeHighThres = 3.0;
nLastDiffThresX = MAX(nMoveMin, 70); nLastDiffThresX = MAX(nMoveMin, 24);
nLastDiffThresY = MAX(nMoveMin, 70); nLastDiffThresY = MAX(nMoveMin, 24);
nPredictDiffThresX = MAX(nFThreMin, 36); nPredictDiffThresX = MAX(nFThreMin, 36);
nPredictDiffThresY = MAX(nFThreMin, 36); nPredictDiffThresY = MAX(nFThreMin, 36);
} }
@ -1377,8 +1329,8 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
fSScaleChangeHighThres = 8.f; fSScaleChangeHighThres = 8.f;
fAScaleChangeLowThres = 0.33; fAScaleChangeLowThres = 0.33;
fAScaleChangeHighThres = 3.0; fAScaleChangeHighThres = 3.0;
nLastDiffThresX = MAX(nMoveMin, 120); nLastDiffThresX = MAX(nMoveMin, 36);
nLastDiffThresY = MAX(nMoveMin, 120); nLastDiffThresY = MAX(nMoveMin, 36);
nPredictDiffThresX = MAX(nFThreMin, 48); nPredictDiffThresX = MAX(nFThreMin, 48);
nPredictDiffThresY = MAX(nFThreMin, 48); nPredictDiffThresY = MAX(nFThreMin, 48);
} }
@ -1394,29 +1346,6 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
nPredictDiffThresY = MAX(nFThreMin, 60); nPredictDiffThresY = MAX(nFThreMin, 60);
} }
//// 小目标特殊处理
if (pPipe->ObjectFilter.fPxlsCnt < 9)
{
fSScaleChangeLowThres = 0.3;
fSScaleChangeHighThres = 3.f;
fAScaleChangeLowThres = 0.5;
fAScaleChangeHighThres = 2.0;
nLastDiffThresX = MAX(nMoveMin, 12);
nLastDiffThresY = MAX(nMoveMin, 12);
nPredictDiffThresX = MAX(nFThreMin, 12);
nPredictDiffThresY = MAX(nFThreMin, 12);
}
//// 针对S3315大目标近距离伺服晃动阈值强制放大
else if(pPipe->ObjectFilter.sfSize.w >= 6 || pPipe->ObjectFilter.sfSize.h >= 6 || pPipe->ObjectFilter.fPxlsCnt > 9)
{
nLastDiffThresX = MAX(nLastDiffThresX, 36);
nLastDiffThresY = MAX(nLastDiffThresY, 36);
nPredictDiffThresX = MAX(nPredictDiffThresX, 100);
nPredictDiffThresY = MAX(nPredictDiffThresY, 100);
}
SINT32 nEnd = pPipe->ubEnd; SINT32 nEnd = pPipe->ubEnd;
// 尺寸变化强逻辑防跳变// by wcw04046 @ 2020/06/22 // 尺寸变化强逻辑防跳变// by wcw04046 @ 2020/06/22
if (nSimTargetId != -1 && m_ObjStatus.unContiLostCnt < 200) if (nSimTargetId != -1 && m_ObjStatus.unContiLostCnt < 200)
@ -1430,12 +1359,11 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
fPixChange = (FLOAT32)(ptTarget->unObjPxlsCnt) / MAX(ptMainTarget->unObjPxlsCnt, 0.001f); fPixChange = (FLOAT32)(ptTarget->unObjPxlsCnt) / MAX(ptMainTarget->unObjPxlsCnt, 0.001f);
// 大于DAT_TARGET_MIN的目标可能是临界目标 // 大于DAT_TARGET_MIN的目标可能是临界目标
if (ptTarget->unObjPxlsCnt < 9 || ptMainTarget->unObjPxlsCnt < 9) if (ptTarget->unObjPxlsCnt < 6 || ptMainTarget->unObjPxlsCnt < 6)
{ {
if ((ptTarget->unObjPxlsCnt == 1) if (fSScaleChangeHighThres < fPixChange || fSScaleChangeLowThres > fPixChange)
&& (fSScaleChangeHighThres < fPixChange || fSScaleChangeLowThres > fPixChange))
{ {
//fSimMax = -1; fSimMax = -1;
} }
} }
else if (fSScaleChangeHighThres < fSizeChange || fSScaleChangeLowThres > fSizeChange)//均为面目标的情形 else if (fSScaleChangeHighThres < fSizeChange || fSScaleChangeLowThres > fSizeChange)//均为面目标的情形
@ -1447,8 +1375,6 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
} }
// 目标运动距离强逻辑:用预测位置和上一帧的位置,防止跟踪跳转到错误的目标 // 目标运动距离强逻辑:用预测位置和上一帧的位置,防止跟踪跳转到错误的目标
if (nSimTargetId != -1) if (nSimTargetId != -1)
{ {
@ -1469,13 +1395,22 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
nLastDiffThresX = MAX(ptTarget->snSize.w / 2, nLastDiffThresX); nLastDiffThresX = MAX(ptTarget->snSize.w / 2, nLastDiffThresX);
nLastDiffThresY = MAX(ptTarget->snSize.h / 2, nLastDiffThresY); nLastDiffThresY = MAX(ptTarget->snSize.h / 2, nLastDiffThresY);
//if (g_GLB_stPara->nWorkScene == GLB_WATCH_SKY)
//{
// nPredictDiffThres = 70;
// nLastDiffThres = 15;
//}
//if (g_GLB_stPara->nWorkScene == GLB_WATCH_GROUND)
//{
// nPredictDiffThres = 5;
// nLastDiffThres = 5;
//}
// 非初始锁定阶段执行运动距离强逻辑 // 非初始锁定阶段执行运动距离强逻辑
//小目标情况下,周围可能检测出干扰的相似目标(真正的目标无法检出),需要强逻辑限定 //小目标情况下,周围可能检测出干扰的相似目标(真正的目标无法检出),需要强逻辑限定
//面目标情况下,单个目标允许大范围关联,关闭距离限定;非单个目标限定关联距离 //面目标情况下,单个目标允许大范围关联,关闭距离限定;非单个目标限定关联距离
if (/*PIPE_EVENT_JUST_LOCK != m_LockingPipe->ubEventStatus && */ if (PIPE_EVENT_JUST_LOCK != m_LockingPipe->ubEventStatus &&
m_LockingPipe->stMotionMod_mean.bTrackStable &&
m_SizeMode <= SizeType::SmallTarget || (m_SizeMode >= SizeType::MiddleTarget && !bSingleTarget)) m_SizeMode <= SizeType::SmallTarget || (m_SizeMode >= SizeType::MiddleTarget && !bSingleTarget))
{ {
if ((ABS(ptTarget->pfCenPos.x - stMotionMod_mean.crnObjPrediRtLong.cx) > MAX(ABS(fAzSpeed) * 2, nPredictDiffThresX) if ((ABS(ptTarget->pfCenPos.x - stMotionMod_mean.crnObjPrediRtLong.cx) > MAX(ABS(fAzSpeed) * 2, nPredictDiffThresX)
@ -1486,19 +1421,6 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
fSimMax = -1; fSimMax = -1;
} }
} }
//// 灰度类型不同,任何情况都不允许跳跃
//if (ptTarget->fSNR * ptMainTarget->fSNR < 0)
//{
// if ((ABS(ptTarget->pfCenPos.x - stMotionMod_mean.crnObjPrediRtLong.cx) > MAX(ABS(fAzSpeed) * 2, 6)
// || ABS(ptTarget->pfCenPos.y - stMotionMod_mean.crnObjPrediRtLong.cy) > MAX(ABS(fPtSpeed) * 2, 6))
// && (ABS(ptTarget->pfCenPos.x - m_ObjStatus.ptPos.x) > MAX(ABS(fAzSpeed) * 2, 6)
// || ABS(ptTarget->pfCenPos.y - m_ObjStatus.ptPos.y) > MAX(ABS(fPtSpeed) * 2, 6)))
// {
// fSimMax = -1;
// }
//}
} }
} }
////+++++++++++++++++++++++++++++++++++++++++++++++++++++++ ////+++++++++++++++++++++++++++++++++++++++++++++++++++++++
@ -1541,19 +1463,11 @@ SINT32 SA_Tracker::FindMatchTarget(PIPE* pPipe, TARGET_OBJECT* ptTargetArray, SI
} }
// 警戒周边干扰时,调整阈值 // 警戒周边干扰时,调整阈值
if (pPipe->blookout && pPipe->stMotionMod_mean.bTrackStable) if (pPipe->blookout)
{
fThres = fSimThres * ContiLostThres;
fMoveThres = fMoveThres * 1.f - ContiLostThres;
}
// 单一目标初始锁定,不做阈值限制
if (bSingleTarget && PIPE_EVENT_JUST_LOCK == m_LockingPipe->ubEventStatus)
{ {
fThres = 0; fThres = 0.8 * ContiLostThres;
fMoveThres = 0.8 * 1.f - ContiLostThres;
} }
//若最大相似度不满足阈值,则认为未查找到管道目标,返回-1 //若最大相似度不满足阈值,则认为未查找到管道目标,返回-1
if (fSimMax < fThres /*||(SmallTarget == m_SizeMode && fMoveSim_Target < fMoveThres)*/) if (fSimMax < fThres /*||(SmallTarget == m_SizeMode && fMoveSim_Target < fMoveThres)*/)
{ {
@ -1837,12 +1751,12 @@ SINT32 SA_Tracker::FindMatchPipe(SINT32 nWidth, SINT32 nHeight, PIPE* pLockingPi
FLOAT32 fThres = fSimThres; FLOAT32 fThres = fSimThres;
if (bSingleTarget) if (bSingleTarget)
{ {
fThres = 0.0f;//没有底线的跟踪 fThres = 0.1f;//没有底线的跟踪
//// 超过10帧丢失降低阈值搜索 // 超过10帧丢失降低阈值搜索
//if (m_ObjStatus.unContiLostCnt >= 10) if (m_ObjStatus.unContiLostCnt >= 10)
//{ {
// fThres = 0.1f; fThres = 0.1f;
//} }
} }
else else
@ -2136,8 +2050,8 @@ void SA_Tracker::UpdateObject2Tracker(TARGET_OBJECT* pTarget, GLB_INPUT* p_GLB_I
//仅使用长时预测更新目标位置 //仅使用长时预测更新目标位置
pObjStatus->ptPosPre = pObjStatus->ptPos; pObjStatus->ptPosPre = pObjStatus->ptPos;
pObjStatus->ptPos.x = m_LockingPipe->ptCurrentPnt.x; //使用管道预测位置 pObjStatus->ptPos.x = m_LockingPipe->stMotionMod_mean.crnObjPrediRtLong.cx;
pObjStatus->ptPos.y = m_LockingPipe->ptCurrentPnt.y; pObjStatus->ptPos.y = m_LockingPipe->stMotionMod_mean.crnObjPrediRtLong.cy;
pObjStatus->ptPosFilter = pObjStatus->ptPos; pObjStatus->ptPosFilter = pObjStatus->ptPos;
SATrkState = Locked_Losting; SATrkState = Locked_Losting;
@ -2164,7 +2078,6 @@ void SA_Tracker::UpdateObject2Tracker(TARGET_OBJECT* pTarget, GLB_INPUT* p_GLB_I
pObjStatus->fObjStd = pTarget->fObjStd; // 目标方差 pObjStatus->fObjStd = pTarget->fObjStd; // 目标方差
pObjStatus->fBGStd = pTarget->fBGStd; // 目标背景方差 pObjStatus->fBGStd = pTarget->fBGStd; // 目标背景方差
pObjStatus->fSNR = pTarget->fSNR; // 信噪比 pObjStatus->fSNR = pTarget->fSNR; // 信噪比
pObjStatus->fBGMean = pTarget->fBGMean;
// 滤波值 // 滤波值
pObjStatus->ptPosFilter.x = pObjStatus->ptPosFilter.x * 0.4 + pObjStatus->ptPos.x * 0.6;//位置在图像系滤波 pObjStatus->ptPosFilter.x = pObjStatus->ptPosFilter.x * 0.4 + pObjStatus->ptPos.x * 0.6;//位置在图像系滤波
pObjStatus->ptPosFilter.y = pObjStatus->ptPosFilter.y * 0.4 + pObjStatus->ptPos.y * 0.6;//位置在图像系滤波 pObjStatus->ptPosFilter.y = pObjStatus->ptPosFilter.y * 0.4 + pObjStatus->ptPos.y * 0.6;//位置在图像系滤波

@ -135,8 +135,6 @@ struct TSA_Parameters
UINT16 nUseAIDetFeq; //使用AI管道更新频率 UINT16 nUseAIDetFeq; //使用AI管道更新频率
BBOOL bEnableSecDetect;//小目标二次检测开关 BBOOL bEnableSecDetect;//小目标二次检测开关
BBOOL bEnableGeoPredict;//允许惯性预测开关。
}; };
@ -187,8 +185,6 @@ public:
// 获取小目标检测模块 // 获取小目标检测模块
API_DetectSmallObj* getDSTmodule(); API_DetectSmallObj* getDSTmodule();
// 检测器参数设置
void SetParam(Param_SkyDetect para);
// 对空所有跟踪器统一输出,便于融合处理 // 对空所有跟踪器统一输出,便于融合处理
OBJECTSTATUS m_ObjStatus; OBJECTSTATUS m_ObjStatus;

@ -150,21 +150,9 @@ bool Tracker::Track(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, API_MOT_PIPE*
// 基于窗口平均计算管道长短时预测点 // 基于窗口平均计算管道长短时预测点
Predict_ObjAglTrackPredict(&m_LockingPipe->stMotionMod_mean, img.u32Width, img.u32Height, p_GLB_Input); Predict_ObjAglTrackPredict(&m_LockingPipe->stMotionMod_mean, img.u32Width, img.u32Height, p_GLB_Input);
if (m_LockingPipe->stMotionMod_mean.bTrackStable)
{
m_LockingPipe->afCurrentAgl = m_LockingPipe->stMotionMod_mean.ObjAglListsLong.arfFilter.afAngle; m_LockingPipe->afCurrentAgl = m_LockingPipe->stMotionMod_mean.ObjAglListsLong.arfFilter.afAngle;
m_LockingPipe->ptCurrentPnt.x = m_LockingPipe->stMotionMod_mean.crnObjPrediRtLong.cx; m_LockingPipe->ptCurrentPnt.x = m_LockingPipe->stMotionMod_mean.crnObjPrediRtLong.cx;
m_LockingPipe->ptCurrentPnt.y = m_LockingPipe->stMotionMod_mean.crnObjPrediRtLong.cy; m_LockingPipe->ptCurrentPnt.y = m_LockingPipe->stMotionMod_mean.crnObjPrediRtLong.cy;
}
else
{
m_LockingPipe->afCurrentAgl = m_LockingPipe->stMotionMod_mean.ObjAglListsNear.arfFilter.afAngle;
m_LockingPipe->ptCurrentPnt.x = m_LockingPipe->stMotionMod_mean.crnObjPrediRtNear.cx;
m_LockingPipe->ptCurrentPnt.y = m_LockingPipe->stMotionMod_mean.crnObjPrediRtNear.cy;
}
// 跟踪找到目标 // 跟踪找到目标
if (m_type == GLB_SCEN_MODE::GLB_SCEN_SKY) if (m_type == GLB_SCEN_MODE::GLB_SCEN_SKY)
@ -204,9 +192,9 @@ SINT32 Tracker::MemTrack(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, API_MOT_P
// 基于窗口平均计算管道长短时预测点 // 基于窗口平均计算管道长短时预测点
Predict_ObjAglTrackPredict(&m_LockingPipe->stMotionMod_mean, img.u32Width, img.u32Height, p_GLB_Input); Predict_ObjAglTrackPredict(&m_LockingPipe->stMotionMod_mean, img.u32Width, img.u32Height, p_GLB_Input);
m_LockingPipe->afCurrentAgl = m_LockingPipe->stMotionMod_mean.ObjAglListsNear.arfFilter.afAngle; m_LockingPipe->afCurrentAgl = m_LockingPipe->stMotionMod_mean.ObjAglListsLong.arfFilter.afAngle;
m_LockingPipe->ptCurrentPnt.x = m_LockingPipe->stMotionMod_mean.crnObjPrediRtNear.cx; m_LockingPipe->ptCurrentPnt.x = m_LockingPipe->stMotionMod_mean.crnObjPrediRtLong.cx;
m_LockingPipe->ptCurrentPnt.y = m_LockingPipe->stMotionMod_mean.crnObjPrediRtNear.cy; m_LockingPipe->ptCurrentPnt.y = m_LockingPipe->stMotionMod_mean.crnObjPrediRtLong.cy;
// 跟踪找到目标 // 跟踪找到目标
if (m_type == GLB_SCEN_MODE::GLB_SCEN_SKY) if (m_type == GLB_SCEN_MODE::GLB_SCEN_SKY)
@ -391,8 +379,6 @@ bool Tracker::UpdateTracker2Pipe(GLB_INPUT* p_GLB_Input,GLB_SCEN_MODE type, PIPE
Target.pxObjGray = ObjStatus.pxObjGray; Target.pxObjGray = ObjStatus.pxObjGray;
Target.fSNR = ObjStatus.fSNR; Target.fSNR = ObjStatus.fSNR;
Target.emClsSrc = ObjStatus.emClsSrc; Target.emClsSrc = ObjStatus.emClsSrc;
Target.fBGMean = ObjStatus.fBGMean;
Target.fBGStd = ObjStatus.fBGStd;
//// 如果对空小面目标跟踪器已经检出目标,且是融合决策目标,直接加入管道 //// 如果对空小面目标跟踪器已经检出目标,且是融合决策目标,直接加入管道
//if (type == GLB_SCEN_MODE::GLB_SCEN_SKY && pSkyTracker->mTargetFusion.bObject) //if (type == GLB_SCEN_MODE::GLB_SCEN_SKY && pSkyTracker->mTargetFusion.bObject)

@ -187,7 +187,7 @@ void Predict_ObjAglTrackPredict(FilterMeanNL* pFilter, SINT32 nWidth, SINT32 nHe
//SINT32 nNLDist2Thres = g_DST_stPara.nPipeRadiusTrack * g_DST_stPara.nPipeRadiusTrack; //SINT32 nNLDist2Thres = g_DST_stPara.nPipeRadiusTrack * g_DST_stPara.nPipeRadiusTrack;
// SINT32 nNLDist2Thres = 20*20; // SINT32 nNLDist2Thres = 20*20;
//SINT32 nNLDist2Thres = (g_GLB_stOutput.ObjectStatus.sfSize.h * g_GLB_stOutput.ObjectStatus.sfSize.w) * 25; //SINT32 nNLDist2Thres = (g_GLB_stOutput.ObjectStatus.sfSize.h * g_GLB_stOutput.ObjectStatus.sfSize.w) * 25;
SINT32 nNLDist2Thres = 10*10; SINT32 nNLDist2Thres = 10 * 25;
nNLDist2Thres = MAX(nNLDist2Thres,100); nNLDist2Thres = MAX(nNLDist2Thres,100);
if (fNLDist2 - fNLDist2Pre > 2 if (fNLDist2 - fNLDist2Pre > 2
@ -200,23 +200,13 @@ void Predict_ObjAglTrackPredict(FilterMeanNL* pFilter, SINT32 nWidth, SINT32 nHe
pFilter->nObjPredictFarCnt = 0; pFilter->nObjPredictFarCnt = 0;
pFilter->bObjPredictAbnormal = FALSE; pFilter->bObjPredictAbnormal = FALSE;
} }
if (pFilter->nObjPredictFarCnt > 3 if (pFilter->nObjPredictFarCnt > 10
&& fNLDist2 > nNLDist2Thres) && fNLDist2 > nNLDist2Thres)
{ {
pFilter->bObjPredictAbnormal = TRUE; pFilter->bObjPredictAbnormal = TRUE;
} }
pFilter->dnObjPredictDist = dnNLDist; pFilter->dnObjPredictDist = dnNLDist;
// 预测偏离,或者滤波器被重置时,均不能认为跟踪稳定
if (dnNLDist.dx > 8 || dnNLDist.dy > 8 || (pFilter->ObjAglListsLong.nCnt == 0))
{
pFilter->bTrackStable = false;
}
else
{
pFilter->bTrackStable = true;
}
////利用长短时轨迹判断是否开启惯性预测,采用多帧判断 by wcw04046 @ 2022/01/17 ////利用长短时轨迹判断是否开启惯性预测,采用多帧判断 by wcw04046 @ 2022/01/17
//if (g_GLB_stOutput.nObjPredictFarCnt == 0) //if (g_GLB_stOutput.nObjPredictFarCnt == 0)
//{ //{

@ -223,33 +223,6 @@ FLOAT32 SIoUB(RECT16S A, RECT16S B)
return IOUvalue; return IOUvalue;
} }
FLOAT32 SIoUB_Expand(RECT16S A, RECT16S B, SINT32 expand)
{
FLOAT32 IOUvalue = 0.0;
FLOAT32 IOUvalueA = 0.0;
SINT16 x1 = A.x - expand;
SINT16 y1 = A.y - expand;
SINT16 x2 = A.x + A.w + expand - 1;
SINT16 y2 = A.y + A.h + expand - 1;
SINT16 u1 = B.x - expand;
SINT16 v1 = B.y - expand;
SINT16 u2 = B.x + B.w + expand - 1;
SINT16 v2 = B.y + B.h + expand - 1;
FLOAT32 intersA = (FLOAT32)A.w * A.h;//目标A面积
FLOAT32 inters = MAX((MIN(x2, u2) - MAX(x1, u1)) + 1, 0.f) * MAX((MIN(y2, v2) - MAX(y1, v1)) + 1, 0.f);//交集
//float unions = (x2 - x1) * (y2 - y1) + (u2 - u1) * (v2 - v1) - inters;//并集
if (intersA > EPSILON)
{
IOUvalueA = 1.0f * inters / intersA;
}
IOUvalue = IOUvalueA;//交集占比最大值
return IOUvalue;
}
// 公共宽度占据B框宽度和高度的比例 // 公共宽度占据B框宽度和高度的比例
void Overlap_WH_Bratio(RECT32F A, RECT32F B, FLOAT32 *Overlap_Wb, FLOAT32 *Overlap_Hb) void Overlap_WH_Bratio(RECT32F A, RECT32F B, FLOAT32 *Overlap_Wb, FLOAT32 *Overlap_Hb)
{ {

@ -31,8 +31,6 @@ FLOAT32 FIoUB(RECT32F A, RECT32F B);
FLOAT32 SIoUB(RECT16S A, RECT16S B); FLOAT32 SIoUB(RECT16S A, RECT16S B);
FLOAT32 SIoUB_Expand(RECT16S A, RECT16S B, SINT32 expand);
FLOAT32 IoUC(RECT32F A, RECT32F B); FLOAT32 IoUC(RECT32F A, RECT32F B);
FLOAT32 IoUA32S(RECT32S *A, RECT32S *B); FLOAT32 IoUA32S(RECT32S *A, RECT32S *B);

@ -78,7 +78,7 @@
// 面目标检测最大个数--控制耗时 // 面目标检测最大个数--控制耗时
#define DAT_TARGET_NUM_MAX 10 // #define DAT_TARGET_NUM_MAX 10 //
// 定义dst最大检测个数 // 定义dst最大检测个数
#define DST_MAX_NUM 150 //注意:检测器部分使用了小目标队列长度,但是二者引用了不同的文件,宏定义重复 #define DST_MAX_NUM 50
#define DT_TARGET_MAX_NUM DST_MAX_NUM + DAT_TARGET_NUM_MAX // 定义检测器最大检测个数 #define DT_TARGET_MAX_NUM DST_MAX_NUM + DAT_TARGET_NUM_MAX // 定义检测器最大检测个数
#define DT_MATCHER_MAX_NUM 6 // 定义模板匹配最大检测个数 #define DT_MATCHER_MAX_NUM 6 // 定义模板匹配最大检测个数
@ -92,14 +92,12 @@
#define TRACK_STATUS_DEPTH_MAX 50 //目标状态监控最大深度 #define TRACK_STATUS_DEPTH_MAX 50 //目标状态监控最大深度
#define TRACK_POS_REC_MAX 20 // 目标像方坐标记录深度
#define GLB_GROUP_NUM_MAX 5 //组的最大个数 #define GLB_GROUP_NUM_MAX 5 //组的最大个数
#define GLB_GROUP_PIPENUM_MAX 3 //组包含的管道个数只支持3个合并 #define GLB_GROUP_PIPENUM_MAX 3 //组包含的管道个数只支持3个合并
//管道确认帧数 //管道确认帧数
#define GLB_PIPE_AWS_FRAME_STARE 3 //管道目标确认(报警)的最少帧数:凝视 #define GLB_PIPE_AWS_FRAME_STARE 5 //管道目标确认(报警)的最少帧数:凝视
#define GLB_PIPE_AWS_FRAME_SCAN 3 //管道目标确认(报警)的最少帧数:扫描 #define GLB_PIPE_AWS_FRAME_SCAN 3 //管道目标确认(报警)的最少帧数:扫描
#define GLB_PIPE_NUM_SECTOR 9 //目标运动方向分割 qw 20220112 #define GLB_PIPE_NUM_SECTOR 9 //目标运动方向分割 qw 20220112
@ -366,7 +364,6 @@ typedef struct tagObjectStatus
FLOAT32 fSNR; // 目标信噪比 FLOAT32 fSNR; // 目标信噪比
FLOAT32 fObjStd; // 目标方差 4 FLOAT32 fObjStd; // 目标方差 4
FLOAT32 fBGStd; // 目标背景方差 FLOAT32 fBGStd; // 目标背景方差
FLOAT32 fBGMean; // 目标背景均值
UINT16 fGrayFilter; // 灰度滤波值 UINT16 fGrayFilter; // 灰度滤波值
POINT32F ptCentroid; // 质心定位置 POINT32F ptCentroid; // 质心定位置
@ -683,7 +680,6 @@ typedef struct tagFilterMeanNL
SINT32 nObjTrackLostCntNear; //目标短时轨迹预测失败帧数 SINT32 nObjTrackLostCntNear; //目标短时轨迹预测失败帧数
SINT32 nObjTrackLostCntLong; //目标长时轨迹预测失败帧数 SINT32 nObjTrackLostCntLong; //目标长时轨迹预测失败帧数
SINT32 nAbnormalCnt; //跟踪异常计数器(连续---坏点 SINT32 nAbnormalCnt; //跟踪异常计数器(连续---坏点
BBOOL bTrackStable; //跟踪稳态判断
}FilterMeanNL; }FilterMeanNL;

@ -7,31 +7,83 @@
#include <stdio.h> #include <stdio.h>
#include <iostream> #ifdef __linux__
#include <chrono> #include <sys/time.h> // for gettimeofday()
#elif _WIN32
#include <time.h>
#include <winsock.h>
#endif
#ifdef WIN32
#define gettimeofday(tp, tzp) \
do {\
time_t clock; struct tm tm; SYSTEMTIME wtm; GetLocalTime(&wtm);\
tm.tm_year = wtm.wYear - 1900;\
tm.tm_mon = wtm.wMonth - 1;\
tm.tm_mday = wtm.wDay;\
tm.tm_hour = wtm.wHour;\
tm.tm_min = wtm.wMinute;\
tm.tm_sec = wtm.wSecond;\
tm.tm_isdst = -1;\
clock = mktime(&tm); (tp)->tv_sec = clock; (tp)->tv_usec = wtm.wMilliseconds * 1000;\
} while (0)
#endif
class stopWatch { struct time_checker
public: {
stopWatch() : start_time(std::chrono::steady_clock::now()) {} struct timeval start_time;
struct timeval stop_time;
// 获取经过的时间ms void TimeStart()
float elapsed_ms() const
{ {
auto duration = std::chrono::steady_clock::now() - start_time; gettimeofday(&start_time, nullptr);
return (std::chrono::duration_cast<std::chrono::microseconds>(duration).count()) / 1000.0f;
} }
// 重置计时器 void TimeStop()
void reset() { {
start_time = std::chrono::steady_clock::now(); gettimeofday(&stop_time, nullptr);
} }
private: int timeDistance()
std::chrono::time_point<std::chrono::steady_clock> start_time; {
}; long time_1_token = start_time.tv_sec * 1000 + start_time.tv_usec / 1000;
long time_2_token = stop_time.tv_sec * 1000 + stop_time.tv_usec / 1000;
return time_2_token - time_1_token;
}
void show_distance(const char*title = "current time")
{
long time_1_token = start_time.tv_sec * 1000 + start_time.tv_usec / 1000;
long time_2_token = stop_time.tv_sec * 1000 + stop_time.tv_usec / 1000;
printf("%s : %ld ms\n", title, time_2_token - time_1_token);
}
#endif void show_ns_distance(const char*title = "current time")
{
long time_1_token = start_time.tv_sec * 1000000 + start_time.tv_usec;
long time_2_token = stop_time.tv_sec * 1000000 + stop_time.tv_usec;
printf("%s : %ld ns\n", title, time_2_token - time_1_token);
}
bool timeout(int second)
{
struct timeval current;
gettimeofday(&current, nullptr);
long time_1_token = start_time.tv_sec * 1000 + start_time.tv_usec / 1000;
long time_2_token = current.tv_sec * 1000 + current.tv_usec / 1000;
int value = time_2_token - time_1_token;
return value > second;
}
static long get_current_timetoken()
{
struct timeval current;
gettimeofday(&current, nullptr);
return (current.tv_sec * 1000 + current.tv_usec / 1000);
}
};
#endif //BABY_FACE_NCNN_DEMO_TIMER_H

@ -179,9 +179,6 @@ public:
// 设置小目标合并距离 // 设置小目标合并距离
virtual void SetCombinDist(SINT32 ndistance) = 0; virtual void SetCombinDist(SINT32 ndistance) = 0;
// 设置检测基础阈值
virtual void SetDetSnr(FLOAT32 fSNR) = 0;
// 获取小目标检测队列 // 获取小目标检测队列
virtual TARGET_OBJECT* GetTargetArray() = 0; virtual TARGET_OBJECT* GetTargetArray() = 0;

@ -9,6 +9,7 @@
*******************************************************************/ *******************************************************************/
#include "Arith_DetectAreaObj.h" #include "Arith_DetectAreaObj.h"
#include "Arith_ImgOperate.h" #include "Arith_ImgOperate.h"
#include "Arith_DetectAreaObj.h"
#include "API_DetectSAObj.h" #include "API_DetectSAObj.h"
//#include "../Version.h" //#include "../Version.h"
#include "opencv2/opencv.hpp" #include "opencv2/opencv.hpp"
@ -237,7 +238,7 @@ void DetectAreaObj::DAT_MallocMemory(SINT32 nWidth, SINT32 nHeight, CENTERRECT m
DAT_pBinary_Mag = new UBYTE8[DAT_CENTER_REGION_SIZE]; DAT_pBinary_Mag = new UBYTE8[DAT_CENTER_REGION_SIZE];
DAT_pBinary_Gray = new UBYTE8[DAT_CENTER_REGION_SIZE]; DAT_pBinary_Gray = new UBYTE8[DAT_CENTER_REGION_SIZE];
DAT_pBinary_GrayNew = new UBYTE8[DAT_CENTER_REGION_SIZE]; DAT_pBinary_GrayNew = new UBYTE8[DAT_CENTER_REGION_SIZE];
DAT_DIFF = new SINT32[DAT_CENTER_REGION_SIZE]; DAT_DIFF = new SINT16[DAT_CENTER_REGION_SIZE];
DAT_pxDetectAreaGradIntegral = new SINT32[DAT_CENTER_REGION_SIZE]; DAT_pxDetectAreaGradIntegral = new SINT32[DAT_CENTER_REGION_SIZE];
} }
@ -554,7 +555,7 @@ SINT32 DetectAreaObj::DAT_DetectAreaTarget(GD_VIDEO_FRAME_S img, GLB_STATUS nSta
if (GLB_OBJ_GRAY_BRIGHT == m_DAT_stPara.nDetectGrayType) if (GLB_OBJ_GRAY_BRIGHT == m_DAT_stPara.nDetectGrayType)
{ {
SINT32* pnTgBgDiff = (SINT32*)DAT_DIFF; SINT16* pnTgBgDiff = (SINT16*)DAT_DIFF;
//20161115计算背景目标灰度残差图 //20161115计算背景目标灰度残差图
DAT_CalcTgBgDiff(m_DAT_stOutput.pnSrDSmpImg, snSrDSmp, m_DAT_stOutput.pnGrayInt, DAT_CalcTgBgDiff(m_DAT_stOutput.pnSrDSmpImg, snSrDSmp, m_DAT_stOutput.pnGrayInt,
@ -621,7 +622,7 @@ SINT32 DetectAreaObj::DAT_DetectAreaTarget(GD_VIDEO_FRAME_S img, GLB_STATUS nSta
} }
else if (GLB_OBJ_GRAY_DARK == m_DAT_stPara.nDetectGrayType) else if (GLB_OBJ_GRAY_DARK == m_DAT_stPara.nDetectGrayType)
{ {
SINT32* pnTgBgDiff = (SINT32*)DAT_DIFF; SINT16* pnTgBgDiff = (SINT16*)DAT_DIFF;
//20161115计算背景目标灰度残差图 //20161115计算背景目标灰度残差图
DAT_CalcTgBgDiff(m_DAT_stOutput.pnSrDSmpImg, snSrDSmp, m_DAT_stOutput.pnGrayInt, DAT_CalcTgBgDiff(m_DAT_stOutput.pnSrDSmpImg, snSrDSmp, m_DAT_stOutput.pnGrayInt,
@ -689,7 +690,7 @@ SINT32 DetectAreaObj::DAT_DetectAreaTarget(GD_VIDEO_FRAME_S img, GLB_STATUS nSta
{ {
SINT32 nFrmBrightObjCnt = 0; SINT32 nFrmBrightObjCnt = 0;
SINT32 nFrmDarkObjCnt = 0; SINT32 nFrmDarkObjCnt = 0;
SINT32* pnTgBgDiff = (SINT32*)DAT_DIFF; SINT16* pnTgBgDiff = (SINT16*)DAT_DIFF;
//20161112Whao初始化亮暗目标数组 //20161112Whao初始化亮暗目标数组
TARGET_OBJECT* ptDATResultBright = (TARGET_OBJECT*)DAT_Target_Bright; TARGET_OBJECT* ptDATResultBright = (TARGET_OBJECT*)DAT_Target_Bright;
@ -972,7 +973,7 @@ SINT32 DetectAreaObj::DAT_ObjectAreaSeg(GD_VIDEO_FRAME_S img, CENTERRECT crnSrRe
if (GLB_OBJ_GRAY_BRIGHT == m_DAT_stPara.nDetectGrayType) if (GLB_OBJ_GRAY_BRIGHT == m_DAT_stPara.nDetectGrayType)
{ {
SINT32* pnTgBgDiff = (SINT32*)DAT_DIFF; SINT16* pnTgBgDiff = (SINT16*)DAT_DIFF;
//20161115计算背景目标灰度残差图 //20161115计算背景目标灰度残差图
DAT_CalcTgBgDiff(m_DAT_stOutput.pnSrDSmpImg, snSrDSmp, m_DAT_stOutput.pnGrayInt, DAT_CalcTgBgDiff(m_DAT_stOutput.pnSrDSmpImg, snSrDSmp, m_DAT_stOutput.pnGrayInt,
@ -996,7 +997,7 @@ SINT32 DetectAreaObj::DAT_ObjectAreaSeg(GD_VIDEO_FRAME_S img, CENTERRECT crnSrRe
} }
else if (GLB_OBJ_GRAY_DARK == m_DAT_stPara.nDetectGrayType) else if (GLB_OBJ_GRAY_DARK == m_DAT_stPara.nDetectGrayType)
{ {
SINT32* pnTgBgDiff = (SINT32*)DAT_DIFF; SINT16* pnTgBgDiff = (SINT16*)DAT_DIFF;
//20161115计算背景目标灰度残差图 //20161115计算背景目标灰度残差图
DAT_CalcTgBgDiff(m_DAT_stOutput.pnSrDSmpImg, snSrDSmp, m_DAT_stOutput.pnGrayInt, DAT_CalcTgBgDiff(m_DAT_stOutput.pnSrDSmpImg, snSrDSmp, m_DAT_stOutput.pnGrayInt,
@ -1071,7 +1072,7 @@ void DetectAreaObj::DAT_Initialization(SINT32 nWidth, SINT32 nHeight, CENTERRECT
m_DAT_stPara.nObjSizeMax = DAT_TARGET_PXLS_MAX; m_DAT_stPara.nObjSizeMax = DAT_TARGET_PXLS_MAX;
m_DAT_stPara.nObjWidthMax = DAT_TARGET_WIDTH_MAX; m_DAT_stPara.nObjWidthMax = DAT_TARGET_WIDTH_MAX;
m_DAT_stPara.nObjHeightMax = DAT_TARGET_HEIGHT_MAX; m_DAT_stPara.nObjHeightMax = DAT_TARGET_HEIGHT_MAX;
m_DAT_stPara.fObjWHRatioMin = 0.25f; m_DAT_stPara.fObjWHRatioMin = 0.4f;
m_DAT_stPara.fObjWHRatioMax = 4.0f; m_DAT_stPara.fObjWHRatioMax = 4.0f;
m_DAT_stPara.fObjRectRatioMin = 0.3f; m_DAT_stPara.fObjRectRatioMin = 0.3f;
m_DAT_stPara.nObjBkgGrayDiffMin = 10; m_DAT_stPara.nObjBkgGrayDiffMin = 10;
@ -1153,8 +1154,8 @@ void DetectAreaObj::DAT_CleanUpFrameDetectOutput(void)
memset(ptTargetArrayCombine, 0, sizeof(TARGET_OBJECT) * DAT_TARGET_NUM_MAX); memset(ptTargetArrayCombine, 0, sizeof(TARGET_OBJECT) * DAT_TARGET_NUM_MAX);
//20161116初始化灰度残差图 //20161116初始化灰度残差图
SINT32* pnTgBgDiff = (SINT32*)DAT_DIFF; SINT16* pnTgBgDiff = (SINT16*)DAT_DIFF;
memset(pnTgBgDiff, 0, sizeof(SINT32) * DAT_CENTER_REGION_SIZE); memset(pnTgBgDiff, 0, sizeof(SINT16) * DAT_CENTER_REGION_SIZE);
} }
@ -1386,7 +1387,7 @@ void DetectAreaObj::DAT_GradSegmentation(UINT16* pnImg, SIZE32S snImgSize,
void DetectAreaObj::DAT_CalcTgBgDiff(UINT16* pFrame, SIZE32S snImgSize, void DetectAreaObj::DAT_CalcTgBgDiff(UINT16* pFrame, SIZE32S snImgSize,
SINT32* pnIntImg, SIZE32S snIntImgSize, SINT32* pnIntImg, SIZE32S snIntImgSize,
SINT32 nRadius, SINT32 nRadiusTG, SINT32 nRadius, SINT32 nRadiusTG,
SINT32* pnTgBgDiff) SINT16* pnTgBgDiff)
{ {
SINT32 i, j; SINT32 i, j;
SINT32 nBorder = 1; //不处理的边界大小 SINT32 nBorder = 1; //不处理的边界大小
@ -1584,7 +1585,7 @@ void DetectAreaObj::DAT_CalcTgBgDiff(UINT16* pFrame, SIZE32S snImgSize,
* 4(i,j)(i+1,j+1) * 4(i,j)(i+1,j+1)
**********************************************************/ **********************************************************/
void DetectAreaObj::DAT_GraySegmentation(SIZE32S snImgSize, SINT32 nRadius, SINT32 nThresMin, void DetectAreaObj::DAT_GraySegmentation(SIZE32S snImgSize, SINT32 nRadius, SINT32 nThresMin,
UBYTE8* pnBinary, SINT32* pnTgBgDiff, BBOOL changeDetParam) UBYTE8* pnBinary, SINT16* pnTgBgDiff, BBOOL changeDetParam)
{ {
SINT32 i, j; SINT32 i, j;
SINT32 nBorder = 1; //不处理的边界大小 SINT32 nBorder = 1; //不处理的边界大小
@ -1823,8 +1824,8 @@ SINT32 DetectAreaObj::DAT_ObjsSegmentation(UINT16* pFrame, UBYTE8* pnGrayBinary,
//MSSu, 20150514: 弱目标检测时,灰度、梯度二值点取并集,否则取交集 //MSSu, 20150514: 弱目标检测时,灰度、梯度二值点取并集,否则取交集
BBOOL bSeedPt = FALSE; BBOOL bSeedPt = FALSE;
if ((DAT_FLAG_BINARY == pnGrayBinary[nIndex] if (DAT_FLAG_BINARY == pnGrayBinary[nIndex]
&& DAT_FLAG_BINARY == pnGradBinary[nIndex]) && DAT_FLAG_BINARY == pnGradBinary[nIndex]
&& DAT_FLAG_CONNECTED != pnFlagHasSr[nIndex]) && DAT_FLAG_CONNECTED != pnFlagHasSr[nIndex])
{ {
bSeedPt = TRUE; bSeedPt = TRUE;

@ -187,10 +187,10 @@ private:
void DAT_CalcTgBgDiff(UINT16* pFrame, SIZE32S snImgSize, void DAT_CalcTgBgDiff(UINT16* pFrame, SIZE32S snImgSize,
SINT32* pnIntImg, SIZE32S snIntImgSize, SINT32* pnIntImg, SIZE32S snIntImgSize,
SINT32 nRadius, SINT32 nRadiusTG, SINT32 nRadius, SINT32 nRadiusTG,
SINT32* pnTgBgDiff); SINT16* pnTgBgDiff);
// 灰度分割 // 灰度分割
void DAT_GraySegmentation(SIZE32S snImgSize, SINT32 nRadius, SINT32 nThresMin, UBYTE8* pnBinary, SINT32* pnTgBgDiff, BBOOL changeDetParam); void DAT_GraySegmentation(SIZE32S snImgSize, SINT32 nRadius, SINT32 nThresMin, UBYTE8* pnBinary, SINT16* pnTgBgDiff, BBOOL changeDetParam);
// 目标分割在2倍降采样图像上检测目标再还原回原始图像位置及大小 // 目标分割在2倍降采样图像上检测目标再还原回原始图像位置及大小
SINT32 DAT_ObjsSegmentation(UINT16* pFrame, UBYTE8* pnGrayBinary, SINT32 DAT_ObjsSegmentation(UINT16* pFrame, UBYTE8* pnGrayBinary,
@ -263,7 +263,7 @@ private:
UBYTE8* DAT_pBinary_Gray; //DAT: 灰度二值化图像2倍降采样 //BYTES: (320/2)*(256/2)*1 = 20480 - 0x5000 UBYTE8* DAT_pBinary_Gray; //DAT: 灰度二值化图像2倍降采样 //BYTES: (320/2)*(256/2)*1 = 20480 - 0x5000
UBYTE8* DAT_pBinary_GrayNew; //DAT: 灰度二值化图像1倍降采样 //BYTES: (320/2)*(256/2)*1 = 20480 - 0x5000 UBYTE8* DAT_pBinary_GrayNew; //DAT: 灰度二值化图像1倍降采样 //BYTES: (320/2)*(256/2)*1 = 20480 - 0x5000
MINMAXRECT32S DAT_pmrnBlksRect[DAT_SR_RGN_BLKS_NUM]; //DAT: 分块区域起始坐标数组2倍降采样 MINMAXRECT32S DAT_pmrnBlksRect[DAT_SR_RGN_BLKS_NUM]; //DAT: 分块区域起始坐标数组2倍降采样
SINT32* DAT_DIFF; //DAT: 20161116面目标灰度残差图 SINT16* DAT_DIFF; //DAT: 20161116面目标灰度残差图
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SINT16 GLB_Dat16SampleCnt = 0; //目标16倍下采样的帧连续计数器 SINT16 GLB_Dat16SampleCnt = 0; //目标16倍下采样的帧连续计数器
SINT16 GLB_Dat8SampleCnt = 0; //目标8倍下采样的帧连续计数器 SINT16 GLB_Dat8SampleCnt = 0; //目标8倍下采样的帧连续计数器

@ -52,28 +52,6 @@ SINT32 g_DST_nFilter_5_9[DST_KERNAL_SIZE_5_9] =
}; };
SINT32 g_DST_nFilter_13_17[DST_KERNAL_SIZE_13_17] =
{
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
};
DetectSmallObj::DetectSmallObj(SINT32 nWidth, SINT32 nHeight) DetectSmallObj::DetectSmallObj(SINT32 nWidth, SINT32 nHeight)
{ {
@ -152,11 +130,6 @@ void DetectSmallObj::SetTargetNum(SINT32 num)
m_FrmObjsCnt = num; m_FrmObjsCnt = num;
} }
void DetectSmallObj::SetDetSnr(FLOAT32 fSNR)
{
m_DST_stPara.fgdk = fSNR;
}
DST_PARAMETERS* DetectSmallObj::GetDstParm() DST_PARAMETERS* DetectSmallObj::GetDstParm()
{ {
return &m_DST_stPara; return &m_DST_stPara;
@ -169,9 +142,7 @@ void DetectSmallObj::setDstParm(Param_SkyDetect* param)
m_DST_stPara.nDetectGrayType = param->nDetectGrayType; m_DST_stPara.nDetectGrayType = param->nDetectGrayType;
m_DST_stPara.nObjSizeMin = param->nObjSizeMin; m_DST_stPara.nObjSizeMin = param->nObjSizeMin;
m_DST_stPara.nObjSizeMax = param->nObjSizeMax;
m_DST_stPara.bEnableDimDetect = param->bEnableDetcetDimTarget;
} }
@ -374,13 +345,10 @@ void DetectSmallObj::DST_Initialization(SINT32 nWidth, SINT32 nHeight, CENTERREC
m_DST_stPara.nDetectGrayType = GLB_OBJ_GRAY_BRIGHT; m_DST_stPara.nDetectGrayType = GLB_OBJ_GRAY_BRIGHT;
// 模板参数 // 模板参数
//m_DST_stPara.pnFilter = g_DST_nFilter_9_13; m_DST_stPara.pnFilter = g_DST_nFilter_9_13;
//m_DST_stPara.nFilterBGW = DST_KERNAL_SIZE_Bm_13; m_DST_stPara.nFilterBGW = DST_KERNAL_SIZE_Bm_13;
//m_DST_stPara.nFilterTGW = DST_KERNAL_SIZE_Tm_9; m_DST_stPara.nFilterTGW = DST_KERNAL_SIZE_Tm_9;
m_DST_stPara.pnFilter = g_DST_nFilter_5_9;
m_DST_stPara.nFilterBGW = 9;
m_DST_stPara.nFilterTGW = 5;
// 申请内存 // 申请内存
DST_MallocMemory(nWidth, nHeight, mmCenterRect); DST_MallocMemory(nWidth, nHeight, mmCenterRect);
@ -730,6 +698,7 @@ void DetectSmallObj::DST_MallocMemory(SINT32 nWidth, SINT32 nHeight, CENTERRECT
DST_pBlkPtBeTargetFlag = new SINT32[DST_BLK_NUM * 2]; DST_pBlkPtBeTargetFlag = new SINT32[DST_BLK_NUM * 2];
DST_pBlkMaxPntFlag = new BYTE8[DST_BLK_NUM * 2]; DST_pBlkMaxPntFlag = new BYTE8[DST_BLK_NUM * 2];
} }
void DetectSmallObj::DST_ReleaseMemory() void DetectSmallObj::DST_ReleaseMemory()
@ -1215,6 +1184,7 @@ SINT32 DetectSmallObj::DST_DetectFrameTarget(GD_VIDEO_FRAME_S img, SINT32 nWidth
continue; continue;
} }
//查看此极值点前面上面和左边4个极值点是否已被判断为目标 //查看此极值点前面上面和左边4个极值点是否已被判断为目标
//若当前点落在该目标的上下左右边界之内,或者与该目标距离小于阈值,则将当前点删除不处理 //若当前点落在该目标的上下左右边界之内,或者与该目标距离小于阈值,则将当前点删除不处理
//UINT16 pxPntGray = pFrame[pPnt.y * nWidth + pPnt.x];//from S332 //UINT16 pxPntGray = pFrame[pPnt.y * nWidth + pPnt.x];//from S332
@ -2131,8 +2101,8 @@ BBOOL DetectSmallObj::DST_PipeTargetReDetect(GD_VIDEO_FRAME_S img, SINT32 nWidth
nIndex = pBlkStart + j; nIndex = pBlkStart + j;
int nnIndex_left = nIndex - 1; int nnIndex_left = nIndex - 1;
int nnIndex_right = nIndex + 1; int nnIndex_right = nIndex + 1;
int nnIndex_top = MAX(0, nIndex - nWidth); int nnIndex_top = nIndex - nWidth;
int nnIndex_bottom = MIN(nIndex + nWidth, nHeight - 1); int nnIndex_bottom = nIndex + nWidth;
if (GD_PIXEL_FORMAT_GRAY_Y8 == img.enPixelFormat) if (GD_PIXEL_FORMAT_GRAY_Y8 == img.enPixelFormat)
{ {
@ -3390,7 +3360,7 @@ BBOOL DetectSmallObj::DST_BlkMaxTargetDetect(GD_VIDEO_FRAME_S img, SINT32 nWidth
&& !m_DST_stPara.bDimDetecting && !m_DST_stPara.bDimDetecting
&& !m_DST_stPara.bSecDetecting && !m_DST_stPara.bSecDetecting
&& (ptTarget->unObjPxlsCnt < (UINT32)m_DST_stPara.nObjSizeMin && (ptTarget->unObjPxlsCnt < (UINT32)m_DST_stPara.nObjSizeMin
/*|| ptTarget->unObjPxlsCnt >(UINT32)m_DST_stPara.nObjSizeMax*/)) || ptTarget->unObjPxlsCnt >(UINT32)m_DST_stPara.nObjSizeMax))
{ {
bIsTarget = false; bIsTarget = false;
} }

@ -21,7 +21,7 @@
// 定义dst最大检测个数 // 定义dst最大检测个数
#define DST_MAX_NUM 150 #define DST_MAX_NUM 50
//20170331,雷达导引中心区域恒虚警个数 //20170331,雷达导引中心区域恒虚警个数
#define DST_FA_GUIDE_TARGET_NUM 10 #define DST_FA_GUIDE_TARGET_NUM 10
@ -102,13 +102,13 @@
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
//小目标检测参数 //小目标检测参数
//gdk阈值 //gdk阈值
#define DST_OBJ_SNR_MIN 4.0f//目标SNR下限 #define DST_OBJ_SNR_MIN 5.0f//目标SNR下限
//目标大小 //目标大小
#define DST_OBJ_SIZE_MIN 1 //目标大小下限 #define DST_OBJ_SIZE_MIN 1 //目标大小下限
//背景标准差限制 //背景标准差限制
#define DST_Bm_STD_LIMIT_MIN 2 //极大值滤波窗口-背景标准差下限 #define DST_Bm_STD_LIMIT_MIN 3 //极大值滤波窗口-背景标准差下限
#define DST_Bm_STD_LIMIT_MAX 50 //极大值滤波窗口-背景标准差上限 #define DST_Bm_STD_LIMIT_MAX 50 //极大值滤波窗口-背景标准差上限
//目标宽高比计算方式 //目标宽高比计算方式
@ -246,11 +246,6 @@ public:
// 设置当前帧小目标检测个数 // 设置当前帧小目标检测个数
void SetTargetNum(SINT32 num); void SetTargetNum(SINT32 num);
// 设置检测阈值
void SetDetSnr(FLOAT32 fSNR);
// 获取小目标检测参数 // 获取小目标检测参数
DST_PARAMETERS* GetDstParm(); DST_PARAMETERS* GetDstParm();

@ -180,13 +180,13 @@ void XLogger::setLevelByFileState()
void XLogger::createAllLogger() void XLogger::createAllLogger()
{ {
const std::string logger_name_prefix = "NeoLog"; const std::string logger_name_prefix = "Neo";
// logger name with timestamp // logger name with timestamp
int date = NowDateToInt(); int date = NowDateToInt();
int time = NowTimeToInt(); int time = NowTimeToInt();
const std::string logger_name = logger_name_prefix /*+ std::to_string(date)*/ /*+ "_" + std::to_string(time)*/; const std::string logger_name = logger_name_prefix + std::to_string(date) /*+ "_" + std::to_string(time)*/;
const std::string logger_name_Input = "Arith_Input"; const std::string logger_name_Input = "Arith_Input";
const std::string logger_name_Output = "Arith_Output"; const std::string logger_name_Output = "Arith_Output";
@ -194,23 +194,23 @@ void XLogger::createAllLogger()
m_logger = spdlog::create_async<spdlog::sinks::rotating_file_sink_mt>(logger_name, log_dir + "/" + logger_name + ".log", m_logger = spdlog::create_async<spdlog::sinks::rotating_file_sink_mt>(logger_name, log_dir + "/" + logger_name + ".log",
10 * 1024 * 1024, 2); // multi part log files, with every part 500M, max 1000 files 10 * 1024 * 1024, 3); // multi part log files, with every part 500M, max 1000 files
m_logger_in = spdlog::create_async<spdlog::sinks::rotating_file_sink_mt>(logger_name_Input, log_dir + "/" + logger_name_Input + ".log", m_logger_in = spdlog::create_async<spdlog::sinks::rotating_file_sink_mt>(logger_name_Input, log_dir + "/" + logger_name_Input + ".log",
50 * 1024 * 1024, 2);//100M循环写入 50 * 1024 * 1024, 3);//100M循环写入
m_logger_out = spdlog::create_async<spdlog::sinks::rotating_file_sink_mt>(logger_name_Output, log_dir + "/" + logger_name_Output + ".log", m_logger_out = spdlog::create_async<spdlog::sinks::rotating_file_sink_mt>(logger_name_Output, log_dir + "/" + logger_name_Output + ".log",
50* 1024 * 1024, 2);//100M循环写入 50* 1024 * 1024, 3);//100M循环写入
m_logger_Timer = spdlog::create_async<spdlog::sinks::rotating_file_sink_mt>(logger_name_Timer, log_dir + "/" + logger_name_Timer + ".log", m_logger_Timer = spdlog::create_async<spdlog::sinks::rotating_file_sink_mt>(logger_name_Timer, log_dir + "/" + logger_name_Timer + ".log",
10 * 1024 * 1024, 1); // multi part 0.1 * 1024 * 1024, 1); // multi part
// 设置输出格式 // 设置输出格式
m_logger->set_pattern(LOG_OUTPUT_FORMAT_SIMPLE); m_logger->set_pattern(LOG_OUTPUT_FORMAT_SIMPLE);
m_logger_in->set_pattern(LOG_OUTPUT_FORMAT_NONE_10KG); m_logger_in->set_pattern(LOG_OUTPUT_FORMAT_NONE_10KG);
m_logger_out->set_pattern(LOG_OUTPUT_FORMAT_NONE_10KG); m_logger_out->set_pattern(LOG_OUTPUT_FORMAT_NONE_10KG);
m_logger_Timer->set_pattern(LOG_OUTPUT_FORMAT_NONE_10KG); m_logger_Timer->set_pattern(LOG_OUTPUT_FORMAT_SIMPLE);
// 输入日志默认debug其他全关 // 输入日志默认debug其他全关

@ -3,7 +3,6 @@
#include "Arith_Bbox.h" #include "Arith_Bbox.h"
#include "Version.h" #include "Version.h"
#include "debugExport.h" #include "debugExport.h"
#include "Arith_timer.h"
// 获取外部参数 // 获取外部参数
void ARIDLL_GetInputPara(Arith_EOController* pArith,int nWidth, int nHeight, ARIDLL_INPUTPARA stInputPara); void ARIDLL_GetInputPara(Arith_EOController* pArith,int nWidth, int nHeight, ARIDLL_INPUTPARA stInputPara);
@ -49,9 +48,6 @@ int ARIDLL_RunController(ArithHandle hArithSrc, GD_VIDEO_FRAME_S img, ARIDLL_INP
stInputPara.unFrmId, img.u32Width, img.u32Height, stInputPara.unFreq, stInputPara.stServoInfo.fServoAz, stInputPara.stServoInfo.fServoPt, stInputPara.stAirCraftInfo.stAtt.fRoll, stInputPara.unFrmId, img.u32Width, img.u32Height, stInputPara.unFreq, stInputPara.stServoInfo.fServoAz, stInputPara.stServoInfo.fServoPt, stInputPara.stAirCraftInfo.stAtt.fRoll,
stInputPara.stAirCraftInfo.stAtt.fPitch,stInputPara.stAirCraftInfo.stAtt.fYaw,stInputPara.stAirCraftInfo.stPos.B,stInputPara.stAirCraftInfo.stPos.H,stInputPara.stAirCraftInfo.stPos.L,stInputPara.stCameraInfo.fAglReso, stInputPara.stAirCraftInfo.stAtt.fPitch,stInputPara.stAirCraftInfo.stAtt.fYaw,stInputPara.stAirCraftInfo.stPos.B,stInputPara.stAirCraftInfo.stPos.H,stInputPara.stAirCraftInfo.stPos.L,stInputPara.stCameraInfo.fAglReso,
stInputPara.stCameraInfo.nFocus, stInputPara.stCameraInfo.fPixelSize, stInputPara.stCameraInfo.unVideoType); stInputPara.stCameraInfo.nFocus, stInputPara.stCameraInfo.fPixelSize, stInputPara.stCameraInfo.unVideoType);
stopWatch sw;
// 获取算法指针 // 获取算法指针
Arith_EOController* pArith = (Arith_EOController*)hArithSrc; Arith_EOController* pArith = (Arith_EOController*)hArithSrc;
// 算法暂不支持stride特性先进行stride检查 // 算法暂不支持stride特性先进行stride检查
@ -85,12 +81,6 @@ int ARIDLL_RunController(ArithHandle hArithSrc, GD_VIDEO_FRAME_S img, ARIDLL_INP
// 输出算法结果 // 输出算法结果
ARIDLL_Output(pArith, pstOutput); ARIDLL_Output(pArith, pstOutput);
if (pstOutput->nStatus != GLB_STATUS_SEARCH)
{
LOG_TIME("[Controller Time]:STATUS:{}, {}ms", pstOutput->nStatus, sw.elapsed_ms());
}
return 0; return 0;
} }
@ -116,19 +106,7 @@ STD_TRACKER_API int ARIDLL_SearchFrameTargets(ArithHandle hArithSrc, GD_VIDEO_FR
pArith->g_GLB_Detectors->m_FrmObjsCnt = nFrmTargetNum; pArith->g_GLB_Detectors->m_FrmObjsCnt = nFrmTargetNum;
return nFrmTargetNum; return nFrmTargetNum;
} }
nFrmTargetNum = pArith->g_GLB_Detectors->Detect(img); nFrmTargetNum = pArith->g_GLB_Detectors->Detect(img);
//if (pArith->g_GLB_stPara.nStatus != GLB_STATUS_MOTRACK)
//{
// nFrmTargetNum = pArith->g_GLB_Detectors->Detect(img);
//}
//else
//{
// pArith->g_GLB_Detectors->m_FrmObjsCnt = 0;
//}
//// 检测队列 //// 检测队列
//TARGET_OBJECT* pFrameTargetArray = pArith->g_GLB_Detectors->GetTargetArray(); //TARGET_OBJECT* pFrameTargetArray = pArith->g_GLB_Detectors->GetTargetArray();
@ -476,22 +454,10 @@ void ARIDLL_OutputPipeTarget(Arith_EOController* pArith, ARIDLL_OUTPUT* pstOutpu
pipeBox.w = pTarget->snSize.w; pipeBox.w = pTarget->snSize.w;
pipeBox.h = pTarget->snSize.h; pipeBox.h = pTarget->snSize.h;
//if(SIoUB_Expand(pipeBox,trackBox, 10) > 0.1 && !pPipe->bTrackingPipe) if(SIoUB(pipeBox,trackBox) > 0.2 && !pPipe->bTrackingPipe)
//{
// continue;
//}
//
if (!pPipe->bTrackingPipe &&
(pstOutput->nStatus == GLB_STATUS_TRACK || pstOutput->nStatus == GLB_STATUS_MOTRACK))
{
if (ABS(pipeBox.x - trackBox.x) < 200 && ABS(pipeBox.y - trackBox.y) < 200)
{ {
continue; continue;
} }
}
// 告警目标输出 // 告警目标输出
ARIDLL_OBJINFO* pt_detObj = &pstOutput->stAlarmObjs[pstOutput->nAlarmObjCnts]; ARIDLL_OBJINFO* pt_detObj = &pstOutput->stAlarmObjs[pstOutput->nAlarmObjCnts];
@ -515,12 +481,6 @@ void ARIDLL_OutputPipeTarget(Arith_EOController* pArith, ARIDLL_OUTPUT* pstOutpu
obj.nOutputID = pPipe->nAlarmBatchID_1; obj.nOutputID = pPipe->nAlarmBatchID_1;
obj.unClsType = pPipe->unClsType; obj.unClsType = pPipe->unClsType;
obj.fSNR = pTarget->fSNR;
obj.nObjGray = pTarget->pxObjGray;
obj.fBkgGray = pTarget->fBGMean;
obj.fBkgStd = pTarget->fBGStd;
// 若有外部引导批号,则上报该批号 // 若有外部引导批号,则上报该批号
if (pPipe->nGuideBatchID > 0) if (pPipe->nGuideBatchID > 0)
@ -639,10 +599,10 @@ void ARIDLL_OutputPipeTarget(Arith_EOController* pArith, ARIDLL_OUTPUT* pstOutpu
{ {
if (pstOutput->nAlarmObjCnts < ST_OBJ_NUM - 1) if (pstOutput->nAlarmObjCnts < ST_OBJ_NUM - 1)
{ {
//if (pArith->g_GLB_stPara.bFiteredAlarm && obj.fPt < pArith->g_GLB_stPara.fFilterPt) if (pArith->g_GLB_stPara.bFiteredAlarm && obj.fPt < pArith->g_GLB_stPara.fFilterPt)
//{ {
// continue; continue;
//} }
memcpy(pt_detObj, &obj, sizeof(ARIDLL_OBJINFO)); memcpy(pt_detObj, &obj, sizeof(ARIDLL_OBJINFO));
pstOutput->nAlarmObjCnts++; pstOutput->nAlarmObjCnts++;
} }
@ -663,7 +623,6 @@ void ARIDLL_OutputPipeTarget(Arith_EOController* pArith, ARIDLL_OUTPUT* pstOutpu
} }
} }
} }
//仅用于对外送显状态显示 //仅用于对外送显状态显示
if(2 == pstOutput->nStatus)//搜索状态 if(2 == pstOutput->nStatus)//搜索状态
{ {
@ -695,7 +654,6 @@ void ARIDLL_OutputPipeTarget(Arith_EOController* pArith, ARIDLL_OUTPUT* pstOutpu
pstOutput->stAlarmObjs[0] = pstOutput->stAlarmObjs[Index]; pstOutput->stAlarmObjs[0] = pstOutput->stAlarmObjs[Index];
pstOutput->stAlarmObjs[Index] = TempObj; pstOutput->stAlarmObjs[Index] = TempObj;
} }
LOG_DEBUG_OUTPUT("stAlarmObjs[0]nX:{},nY:{}; stTrackers[0]nX:{},nY:{}", pstOutput->stAlarmObjs[0].nX, pstOutput->stAlarmObjs[0].nY, pstOutput->stTrackers[0].nX, pstOutput->stTrackers[0].nY); LOG_DEBUG_OUTPUT("stAlarmObjs[0]nX:{},nY:{}; stTrackers[0]nX:{},nY:{}", pstOutput->stAlarmObjs[0].nX, pstOutput->stAlarmObjs[0].nY, pstOutput->stTrackers[0].nX, pstOutput->stTrackers[0].nY);
} }

@ -89,9 +89,7 @@ typedef struct tagARIDLL_OBJINFO
FLOAT32 fConf;//跟踪置信度 FLOAT32 fConf;//跟踪置信度
ObjSrc ArithSrc;//跟踪算法来源,决策后 ObjSrc ArithSrc;//跟踪算法来源,决策后
float fBkgGray; //目标背景灰度值 unsigned char byte[20];//预留
float fBkgStd; //目标背景灰度标准差
unsigned char byte[12];//预留
}ARIDLL_OBJINFO; }ARIDLL_OBJINFO;

@ -652,14 +652,6 @@ void MOT_Pipe::PIPE_UpdatePipes(TARGET_OBJECT* ptTargetArray, SINT32 pnFrmObjsCn
//自适应调节“管道目标没有被找到的次数阈值” //自适应调节“管道目标没有被找到的次数阈值”
pPipe->nDelCntThres = MAX(3, pPipe->nDelCntThres); pPipe->nDelCntThres = MAX(3, pPipe->nDelCntThres);
// 弱小目标,不容易删除
if (pPipe->ObjectFilter.fPxlsCnt < 5 && pPipe->stMotionMod_mean.bTrackStable)
{
pPipe->nDelCntThres = 10;
}
if (pPipe->unLostCnt > (UINT32)pPipe->nDelCntThres) if (pPipe->unLostCnt > (UINT32)pPipe->nDelCntThres)
{ {
DelPipe(pPipe); DelPipe(pPipe);

@ -3,4 +3,4 @@
#pragma once #pragma once
#include <string> #include <string>
std::string BUILD_TIME = "BUILD_TIME @build_time@"; std::string BUILD_TIME = "BUILD_TIME @build_time@";
std::string VERSION = "BUILD_VERSION 1.4.1"; std::string VERSION = "BUILD_VERSION 1.3.1";

@ -293,26 +293,6 @@ DAT_OUTPUT* ARIDLL_GetTrkDAT_stOutput(ArithHandle hArithSrc, int pipeID)
return pArith->g_GLB_Trackers[pipeID]->pSkyTracker->pSATracker->pDAT_Module->getDAT_stOutput(); return pArith->g_GLB_Trackers[pipeID]->pSkyTracker->pSATracker->pDAT_Module->getDAT_stOutput();
} }
DBG_TRACKER_API int ARIDLL_GetSkyLineY(ArithHandle hArithSrc)
{
Arith_EOController* pArith = (Arith_EOController*)hArithSrc;
auto input = &pArith->g_GLB_stInput;
if (pArith->g_GLB_stPara.bFiteredAlarm)
{
Pole skyPole = { 0 };
skyPole.alpha = pArith->g_GLB_stPara.fFilterPt;
skyPole.beta = input->servoInfo.fAz; //这个车基本是平的,不然需要找到当前视场方位角
POINT32F Skypos = getImagePosFromStablePole(skyPole, input->stCamera, input->servoInfo, input->afPlatformRPY, input->setupErr);
return Skypos.y;
}
return -1;
}
FilterMeanNL* ARIDLL_GetTrackerHistInfo(ArithHandle hArithSrc, int pipeID) FilterMeanNL* ARIDLL_GetTrackerHistInfo(ArithHandle hArithSrc, int pipeID)
{ {

@ -146,9 +146,6 @@ DBG_TRACKER_API DAT_PARAMETERS* ARIDLL_GetTrkDAT_stPara(ArithHandle hArithSrc, i
// 跟踪器局部检测面结果输出 // 跟踪器局部检测面结果输出
DBG_TRACKER_API DAT_OUTPUT* ARIDLL_GetTrkDAT_stOutput(ArithHandle hArithSrc, int pipeID); DBG_TRACKER_API DAT_OUTPUT* ARIDLL_GetTrkDAT_stOutput(ArithHandle hArithSrc, int pipeID);
// 输出天地线
DBG_TRACKER_API int ARIDLL_GetSkyLineY(ArithHandle hArithSrc);
////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////
////////////////////////////TLD检测结果导出/////////////////////////////// ////////////////////////////TLD检测结果导出///////////////////////////////

@ -108,8 +108,8 @@ int main()
char str[100]; char str[100];
sprintf(str,"%d-%d,%d,%d",i,int(stOutput.stTrackers[0].nX - moveX),int(stOutput.stTrackers[0].nY - moveY),stOutput.stTrackers->nPipeLostCnt); sprintf(str,"%d-%d,%d,%d",i,int(stOutput.stTrackers[0].nX - moveX),int(stOutput.stTrackers[0].nY - moveY),stOutput.stTrackers->nPipeLostCnt);
cv::putText(src,cv::String(str),cv::Point(outRect.x - 10,outRect.y),1,2,cv::Scalar(255,255,0)); cv::putText(src,cv::String(str),cv::Point(outRect.x - 10,outRect.y),1,2,cv::Scalar(255,255,0));
imshow("res",src); //imshow("res",src);
cv::waitKey(2); //cv::waitKey(2);
if ((stOutput.nStatus == GLB_STATUS_TRACK || stOutput.nStatus == GLB_STATUS_MOTRACK) && stOutput.nTrackObjCnts == 1) if ((stOutput.nStatus == GLB_STATUS_TRACK || stOutput.nStatus == GLB_STATUS_MOTRACK) && stOutput.nTrackObjCnts == 1)
{ {

@ -33,13 +33,13 @@ void S3315GetImagePara(unsigned char* pImageDataBuffer,unsigned char* pParamData
ImagePara->stAirCraftInfo.stAtt.fYaw = calc_fov_by_mil(pParamNO65DataBuffer->B1B0, 6000) * 6000.0f / 65536.0f; ImagePara->stAirCraftInfo.stAtt.fYaw = calc_fov_by_mil(pParamNO65DataBuffer->B1B0, 6000) * 6000.0f / 65536.0f;
ImagePara->stAirCraftInfo.stAtt.fPitch = calc_fov_by_mil(pParamNO65DataBuffer->B3B2, 6000) * 6000.0f / 65536.0f; ImagePara->stAirCraftInfo.stAtt.fPitch = calc_fov_by_mil(pParamNO65DataBuffer->B3B2, 6000) * 6000.0f / 65536.0f;
ImagePara->stAirCraftInfo.stAtt.fRoll = calc_fov_by_mil(pParamNO65DataBuffer->B5B4, 6000) * 6000.0f / 65536.0f; ImagePara->stAirCraftInfo.stAtt.fRoll = calc_fov_by_mil(pParamNO65DataBuffer->B5B4, 6000) * 6000.0f / 65536.0f;
ImagePara->stCameraInfo.fPixelSize = 25;
ImagePara->unFreq = 50; ImagePara->unFreq = 50;
// pImageDataBuffer 是热像仪640*513 最后一行是热像仪参数 // pImageDataBuffer 是热像仪640*513 最后一行是热像仪参数
unsigned short* pIR = ((unsigned short*)pImageDataBuffer); unsigned short* pIR = ((unsigned short*)pImageDataBuffer);
ImagePara->stCameraInfo.nFocus = pIR[640 * 512 + 64] * 0.1; ImagePara->stCameraInfo.nFocus = pIR[640 * 512 + 64] * 0.1;
ImagePara->stCameraInfo.fPixelSize = pIR[640 * 512 + 29];
/*std::cout << std::hex << std::setw(8) << "pParamDataBuffer" << std::setfill('0') << reinterpret_cast<std::uintptr_t>(pParamDataBuffer) << ": "; /*std::cout << std::hex << std::setw(8) << "pParamDataBuffer" << std::setfill('0') << reinterpret_cast<std::uintptr_t>(pParamDataBuffer) << ": ";

@ -59,7 +59,7 @@ void QFileList::DisplayFileList(QString path)
foreach(QString str, m_vecFileList) foreach(QString str, m_vecFileList)
{ {
//str.toLower(); //str.toLower();
QString es = "raw$|xraw$|rawx$|GD$|avi$|mp4$|yuv|MP4|AVI|data";//by wcw04046 QString es = "raw$|xraw$|rawx$|GD$|avi$|mp4$|yuv|MP4|AVI";//by wcw04046
nPos = str.indexOf(QRegExp(es), 0); nPos = str.indexOf(QRegExp(es), 0);
if (-1 == nPos) if (-1 == nPos)
{ {

@ -482,7 +482,7 @@ VideoStream* QVideoPlayer::OpenOneStream(QString lpszFileName)
{ {
pVid = new GDFileStream(); pVid = new GDFileStream();
} }
else if(extName == "raw" || extName == "xraw" || extName == "rawx" || extName == "yuv" || extName == "data") else if(extName == "raw" || extName == "xraw" || extName == "rawx" || extName == "yuv")
{ {
// 在当前路径下寻找raw文件默认配置 // 在当前路径下寻找raw文件默认配置
int first = lpszFileName.lastIndexOf("/"); int first = lpszFileName.lastIndexOf("/");
@ -638,7 +638,6 @@ void QVideoPlayer::DrawArithResult()
{ {
return; return;
} }
// 更新指南针 // 更新指南针
UpdateCompass(m_stInputPara.stServoInfo.fServoAz, m_stInputPara.stServoInfo.fServoPt); UpdateCompass(m_stInputPara.stServoInfo.fServoAz, m_stInputPara.stServoInfo.fServoPt);
@ -651,8 +650,6 @@ void QVideoPlayer::DrawArithResult()
// 导引信息 // 导引信息
DrawGuideRect(); DrawGuideRect();
DrawSkyLine(m_ImageViewer);
// 绘制单帧检测(外部传入) // 绘制单帧检测(外部传入)
if (bEnableDrawFrameDetect) if (bEnableDrawFrameDetect)
{ {
@ -769,6 +766,7 @@ void QVideoPlayer::DrawTrackersInfo()
QGraphicsScene* scene = m_ImageViewer->imgScene; QGraphicsScene* scene = m_ImageViewer->imgScene;
int num = m_stOutput.nTrackObjCnts; int num = m_stOutput.nTrackObjCnts;
for (size_t i = 0; i < num; i++) for (size_t i = 0; i < num; i++)
{ {
auto obj = &m_stOutput.stTrackers[i]; auto obj = &m_stOutput.stTrackers[i];
@ -850,7 +848,6 @@ void QVideoPlayer::DrawTrackersInfo()
{ {
str = QString::number(obj->nOutputID); str = QString::number(obj->nOutputID);
} }
DrawArtRect(m_ImageViewer, QPen(QColor(255, 0, 0)), bbox, str, false,15, QColor(255,0,0)); DrawArtRect(m_ImageViewer, QPen(QColor(255, 0, 0)), bbox, str, false,15, QColor(255,0,0));
@ -895,27 +892,22 @@ void QVideoPlayer::DrawTrackersInfo()
str = QString(_S("ID:%1 conf%2,Src:%3,mem:%4")).arg(obj->nOutputID).arg(QString::number(fconf, 'f', 3)).arg(src).arg(lostCnt); str = QString(_S("ID:%1 conf%2,Src:%3,mem:%4")).arg(obj->nOutputID).arg(QString::number(fconf, 'f', 3)).arg(src).arg(lostCnt);
str1 = QString(_S("type:%1")).arg(obj->unClsType); str1 = QString(_S("type:%1")).arg(obj->unClsType);
QString str2 = QString(_S("SNR:%1, dGray:%2")).arg(obj->fSNR).arg(obj->nObjGray - obj->fBkgGray);
if (obj->nOutputID >= 2) if (obj->nOutputID >= 2)
{ {
continue; continue;
} }
auto text = scene->addSimpleText(str); auto text = scene->addSimpleText(str);
auto text1 = scene->addSimpleText(str1); auto text1 = scene->addSimpleText(str1);
auto text2 = scene->addSimpleText(str2);
if (sabox.w == 0 || sabox.h == 0) if (sabox.w == 0 || sabox.h == 0)
{ {
text->setPos(QPoint(kcfBox.x - 10, kcfBox.y - 10)); text->setPos(QPoint(kcfBox.x - 10, kcfBox.y - 10));
text1->setPos(QPoint(kcfBox.x - 10, kcfBox.y + kcfBox.h + 10)); text1->setPos(QPoint(kcfBox.x - 10, kcfBox.y + kcfBox.h + 10));
} }
else else
{ {
text->setPos(QPoint(sabox.x - 10, sabox.y - 10)); text->setPos(QPoint(sabox.x - 10, sabox.y - 10));
text1->setPos(QPoint(sabox.x - 10, sabox.y + sabox.h + 10)); text1->setPos(QPoint(sabox.x - 10, sabox.y + sabox.h + 10));
text2->setPos(QPoint(sabox.x - 10, sabox.y + sabox.h + 20));
} }
QFont ft; ft.setPointSize(8); QFont ft; ft.setPointSize(8);
text->setBrush(QBrush(QColor(Qt::red))); text->setBrush(QBrush(QColor(Qt::red)));
@ -927,7 +919,7 @@ void QVideoPlayer::DrawTrackersInfo()
// 查询跟踪阶段目标列表 // 查询跟踪阶段目标列表
DrawFrameRegionDetectObjs(); DrawFrameRegionDetectObjs();
//// 绘制小目标极值点 // 绘制小目标极值点
//POINT16S* pList = ARIDLL_GetSATracker_DSTPoint(m_ArithRunner->pEOTracker, nInPipesID); //POINT16S* pList = ARIDLL_GetSATracker_DSTPoint(m_ArithRunner->pEOTracker, nInPipesID);
//for (size_t i = 0; i < (640 / 16 * 512 / 16) * 2; i++) //for (size_t i = 0; i < (640 / 16 * 512 / 16) * 2; i++)
@ -972,10 +964,10 @@ void QVideoPlayer::DrawFrameDetectObjs()
int cx = obj->pfCenPos.x; int cx = obj->pfCenPos.x;
int cy = obj->pfCenPos.y; int cy = obj->pfCenPos.y;
int w = MAX(10, obj->snSize.w); //int w = MAX(15, obj->snSize.w);
int h = MAX(10, obj->snSize.h); //int h = MAX(15, obj->snSize.h);
//int w = obj->snSize.w; int w = obj->snSize.w;
//int h = obj->snSize.h; int h = obj->snSize.h;
QRectF bbox(cx - w / 2, cy - h / 2, w, h); QRectF bbox(cx - w / 2, cy - h / 2, w, h);
scene->addRect(bbox, QPen(QColor(255, 20, 147), 0.3)); scene->addRect(bbox, QPen(QColor(255, 20, 147), 0.3));
@ -1015,10 +1007,10 @@ void QVideoPlayer::DrawFrameRegionDetectObjs()
int cx = obj->pfCenPos.x; int cx = obj->pfCenPos.x;
int cy = obj->pfCenPos.y; int cy = obj->pfCenPos.y;
int w = MAX(5, obj->snSize.w); //int w = MAX(15, obj->snSize.w);
int h = MAX(5, obj->snSize.h); //int h = MAX(15, obj->snSize.h);
//int w = obj->snSize.w; int w = obj->snSize.w;
//int h = obj->snSize.h; int h = obj->snSize.h;
QRectF bbox(cx - w / 2, cy - h / 2, w, h); QRectF bbox(cx - w / 2, cy - h / 2, w, h);
scene->addRect(bbox, QPen(QColor(20, 255, 147), 0.3)); scene->addRect(bbox, QPen(QColor(20, 255, 147), 0.3));
str = QString::number(obj->fMatchConf, 'f', 2); str = QString::number(obj->fMatchConf, 'f', 2);
@ -1280,7 +1272,7 @@ void QVideoPlayer::PrintSkyInfo(QString str, QString strShow, ArithHandle hArith
// 划分为0 - 上、1 - 右上、2 - 右、3 - 右下、4 - 下、5 - 左下、6 - 左、7 - 左上8个背景区域 // 划分为0 - 上、1 - 右上、2 - 右、3 - 右下、4 - 下、5 - 左下、6 - 左、7 - 左上8个背景区域
auto obj = &mrnBkgBlks[i]; auto obj = &mrnBkgBlks[i];
QRectF bbox(obj->minX, obj->minY, obj->maxX - obj->minX, obj->maxY - obj->minY); QRectF bbox(obj->minX, obj->minY, obj->maxX - obj->minX, obj->maxY - obj->minY);
//scene->addRect(bbox, QPen(QColor(147, 20, 255), 0.3)); scene->addRect(bbox, QPen(QColor(147, 20, 255), 0.3));
} }
TSky_Output* pSky_Output = GetSkyTrackerObjectStatus(pEOTracker, obj->nInPipesID); TSky_Output* pSky_Output = GetSkyTrackerObjectStatus(pEOTracker, obj->nInPipesID);
@ -1607,16 +1599,10 @@ void QVideoPlayer::DrawCrossInImage(QImageViewer* view, QRect rect, QPen pen)
} }
void QVideoPlayer::DrawSkyLine(QImageViewer* view) void QVideoPlayer::DrawSkyLine(QImageViewer* view,int y)
{ {
int skyY = ARIDLL_GetSkyLineY(pEOTracker);
if (skyY > 0)
{
QGraphicsScene* scene = view->imgScene;
scene->addLine(QLine(0, skyY, 640, skyY), QPen(QBrush(QColor(Qt::red)),3,Qt::DashDotLine));
}
} }
void QVideoPlayer::DrawGuideRect() void QVideoPlayer::DrawGuideRect()
@ -1924,21 +1910,10 @@ void QVideoPlayer::DrawPipeAglInfo()
auto TrackListInfo = ARIDLL_GetTrackerHistInfo(pEOTracker, P); auto TrackListInfo = ARIDLL_GetTrackerHistInfo(pEOTracker, P);
DrawAglList(m_ImageViewer, &TrackListInfo->ObjAglListsNear, QPen(QColor(255, 255, 255), 0.5, Qt::DotLine)); DrawAglList(m_ImageViewer, &TrackListInfo->ObjAglListsNear, QPen(QColor(255, 255, 255), 0.5, Qt::DotLine));
DrawAglList(m_ImageViewer, &TrackListInfo->ObjAglListsLong, QPen(QColor(255, 100, 100), 0.5, Qt::DotLine)); DrawAglList(m_ImageViewer, &TrackListInfo->ObjAglListsLong, QPen(QColor(255, 100, 100), 0.5, Qt::DotLine));
if (TrackListInfo->bTrackStable)
{
QPen pen1(QPen(QColor(255, 255, 255), 0.5, Qt::SolidLine)); QPen pen1(QPen(QColor(255, 255, 255), 0.5, Qt::SolidLine));
DrawCrossInImage(m_ImageViewer, QRect(pPipe->stMotionMod_mean.crnObjPrediRtLong.cx - 80, pPipe->stMotionMod_mean.crnObjPrediRtLong.cy - 80, 160, 160), pen1); DrawCrossInImage(m_ImageViewer, QRect(pPipe->stMotionMod_mean.crnObjPrediRtLong.cx - 40, pPipe->stMotionMod_mean.crnObjPrediRtLong.cy - 40, 80, 80), pen1);
}
else
{
QPen pen2(QPen(QColor(255, 255, 255), 0.5, Qt::DotLine));
DrawCrossInImage(m_ImageViewer, QRect(pPipe->stMotionMod_mean.crnObjPrediRtLong.cx - 40, pPipe->stMotionMod_mean.crnObjPrediRtLong.cy - 40, 80, 80), pen2);
}
QPen pen2(QPen(QColor(255, 20, 20), 0.5, Qt::SolidLine)); QPen pen2(QPen(QColor(255, 20, 20), 0.5, Qt::SolidLine));
DrawCrossInImage(m_ImageViewer, QRect(pPipe->stMotionMod_mean.crnObjPrediRtNear.cx - 25, pPipe->stMotionMod_mean.crnObjPrediRtNear.cy - 25, 50, 50), pen2); DrawCrossInImage(m_ImageViewer, QRect(pPipe->stMotionMod_mean.crnObjPrediRtNear.cx - 25, pPipe->stMotionMod_mean.crnObjPrediRtNear.cy - 25, 50, 50), pen2);
} }
else if(bEnableDrawFrameDetect) else if(bEnableDrawFrameDetect)
{ {

@ -243,7 +243,7 @@ private:
void PrintTableInfo(); void PrintTableInfo();
// 绘制天地线 // 绘制天地线
void DrawSkyLine(QImageViewer* view); void DrawSkyLine(QImageViewer* view, int y);
// 绘制导引区域 // 绘制导引区域
void DrawGuideRect(); void DrawGuideRect();

@ -1,26 +0,0 @@
import cv2
import numpy as np
import matplotlib.pyplot as plt
file = open('dump_y16_640x513_2025-08-23-21_47_05.data','rb')
dy500 = np.zeros(1500)
dy300 = np.zeros(1500)
for i in range(1500):
frame = file.read(640*513*2+640*513+640*4)
y8 = np.frombuffer(frame[640*513*2:640*513*2+640*513],dtype=np.uint8).reshape([513,640])
y16 = np.frombuffer(frame[0:640*513*2],dtype=np.uint16).reshape([513,640])
dy500[i] = abs(y16[500,:].astype(np.int16) - (y16[501,:]).astype(np.int16)).mean()
dy300[i] = abs(y16[300,:].astype(np.int16) - (y16[301,:]).astype(np.int16)).mean()
cv2.imshow("",y8)
cv2.waitKey(1)
plt.plot(dy500)
plt.plot(dy300)
plt.show()
Loading…
Cancel
Save