You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

417 lines
13 KiB

#include "Arith_AITTracker.h"
#include "Arith_ImgOperate.h"
#include "Arith_CoordModule.h"
#include "Arith_AI_Tracker_API.h"
using namespace std;
AIT_Interface::AIT_Interface(const char *jsonConf)
{
bAssignflag = false;
m_LockingPipe = NULL;
nTracker = NULL;
m_OccCnt = 0;
m_ArrestCnt = 0;
memset(&m_ObjStatus,0,sizeof(OBJECTSTATUS));
memset(&m_Ai_TkOut,0,sizeof(AIT_OUTPUT));
memset(&m_Ai_TkCmd,0,sizeof(AIT_Command));
gud_nano_config_t nanoConf;
parse_json_to_struct(jsonConf,nanoConf);
#ifdef ENABLE_AI_TRACK
GUD_ALG_AI_TRACK_Create(&nTracker, &nanoConf);
#endif
//printf ("### GUD_ALG_AI_TRACK_Create Success!\n");
}
AIT_Interface::~AIT_Interface()
{
if (nTracker!=NULL)
{
#ifdef ENABLE_AI_TRACK
GUD_ALG_AI_TRACK_Release(nTracker);
#endif
}
}
AIT_Interface::AIT_Interface(Param_AITracker para)
{
#ifdef ENABLE_AI_TRACK
GUD_ALG_AI_TRACK_Create(&nTracker, &para.nano_config);
printf ("### GUD_ALG_AI_TRACK_Create Success!\n");
#endif
}
void AIT_Interface::parse_json_to_struct(const std::string& json_file_path, gud_nano_config_t& config)
{
// 读取 JSON 文件
std::ifstream json_file(json_file_path);
if (!json_file.is_open()) {
std::cerr << "Error: Could not open JSON file!" << std::endl;
return;
}
// 解析 JSON
nlohmann::json json_data;
json_file >> json_data;
// 填充结构体
config.lr_rate = json_data["lr_rate"];
config.ac_score = json_data["ac_score"];
config.device_type = json_data["device_type"];
config.use_reid = json_data["use_reid"];
config.reid_frq = json_data["reid_frq"];
config.use_trace = json_data["use_trace"];
config.trace_len = json_data["trace_len"];
config.use_update = json_data["use_update"];
config.update_frq = json_data["update_frq"];
// 分配内存并复制字符串
//config.Model_temp = strdup((std::string(SOURCE_PATH) + "/AITracker/x86_onnx/" +json_data["Model_temp"].get<std::string>()).c_str());
//config.Model_search = strdup((std::string(SOURCE_PATH) + "/AITracker/x86_onnx/" + json_data["Model_search"].get<std::string>()).c_str());
//config.Model_head = strdup((std::string(SOURCE_PATH) + "/AITracker/x86_onnx/" + json_data["Model_head"].get<std::string>()).c_str());
//config.reid_json = strdup((std::string(SOURCE_PATH) + "/AITracker/x86_onnx/" + json_data["reid_json"].get<std::string>()).c_str());
#if (1)
// 打印结构体内容
std::cout << "lr_rate: " << config.lr_rate << std::endl;
std::cout << "ac_score: " << config.ac_score << std::endl;
std::cout << "device_type: " << config.device_type << std::endl;
std::cout << "use_reid: " << config.use_reid << std::endl;
std::cout << "reid_frq: " << config.reid_frq << std::endl;
std::cout << "use_trace: " << config.use_trace << std::endl;
std::cout << "trace_len: " << config.trace_len << std::endl;
std::cout << "use_update: " << config.use_update << std::endl;
std::cout << "update_frq: " << config.update_frq << std::endl;
std::cout << "Model_temp: " << config.Model_temp << std::endl;
std::cout << "Model_search: " << config.Model_search << std::endl;
std::cout << "Model_head: " << config.Model_head << std::endl;
std::cout << "reid_json: " << config.reid_json << std::endl;
#endif
}
bool AIT_Interface::AIT_Init(GD_VIDEO_FRAME_S img, PIPE * pLockingPipe, GLB_INPUT * p_GLB_Input)
{
// 跟踪管道
m_LockingPipe = pLockingPipe;
//初始化跟踪输出的目标状态
OBJECTSTATUS* pObjStatus = &m_ObjStatus;
SINT32 nEnd = pLockingPipe->ubEnd;
SINT32 nObjSize = (SINT32)(pLockingPipe->ObjectFilter.fPxlsCnt);
TARGET_OBJECT* pTrackingTarget = &pLockingPipe->objHistoryList[nEnd];
pObjStatus->bObjAssigned = true;
pObjStatus->unTotalCnt = 1;
pObjStatus->unTrackedCnt = 1;
pObjStatus->unContiTrackedCnt = 1;
pObjStatus->unContiLostCnt = 0;
pObjStatus->bObjMiss = false;
pObjStatus->ptPosPre.x = pLockingPipe->ptCurrentPnt.x;
pObjStatus->ptPosPre.y = pLockingPipe->ptCurrentPnt.y;
pObjStatus->ptPos.x = pObjStatus->ptPosPre.x;
pObjStatus->ptPos.y = pObjStatus->ptPosPre.y;
pObjStatus->ptPosFilter.x = pObjStatus->ptPosPre.x;
pObjStatus->ptPosFilter.y = pObjStatus->ptPosPre.y;
pObjStatus->ptPosBorn = pObjStatus->ptPos;
pObjStatus->sfSize.w = pTrackingTarget->snSize.w;
pObjStatus->sfSize.h = pTrackingTarget->snSize.h;
pObjStatus->sfSize.s = pObjStatus->sfSize.w * pObjStatus->sfSize.h;
pObjStatus->sfSizeBorn = pObjStatus->sfSize;
pObjStatus->fObjPxlsCnt = (FLOAT32)pTrackingTarget->unObjPxlsCnt;
//更新目标速度、角速度、置信度
pObjStatus->sfSpeed.vx = 0.0f;
pObjStatus->sfSpeed.vy = 0.0f;
pObjStatus->sfAglSpeed.vx = pLockingPipe->sfAglSpeed.vx;
pObjStatus->sfAglSpeed.vy = pLockingPipe->sfAglSpeed.vy;
pObjStatus->fConfidence = 1.0f;
//更新目标角度
Pole pole = getStablePoleFromImagePos(pTrackingTarget->pfCenPos,
p_GLB_Input->stCamera, p_GLB_Input->servoInfo, p_GLB_Input->afPlatformRPY, p_GLB_Input->setupErr);
// 外推一帧送AI跟踪
//pole.beta = DEGLIM360(pObjStatus->afAngle.fAz + pObjStatus->sfAglSpeed.vx);
//pole.alpha = DEGLIM(pObjStatus->afAngle.fPt + pObjStatus->sfAglSpeed.vy);
pObjStatus->afAngle.fAz = (FLOAT32)pole.beta;
pObjStatus->afAngle.fPt = (FLOAT32)pole.alpha;
pObjStatus->afAngleBorn = pObjStatus->afAngle;
//POINT32F imgPos = getImagePosFromStablePole(pole, p_GLB_Input->stCamera, p_GLB_Input->servoInfo, p_GLB_Input->afPlatformRPY, p_GLB_Input->setupErr);
// 控制指令生成:AI跟踪初始化指令直接在外部进行
memset(&m_Ai_TkCmd,0,sizeof(AIT_Command));
gud_rect_f initRect;
initRect.w = MAX(10.0, (float)pObjStatus->sfSize.w);
initRect.h = MAX(10.0, (float)pObjStatus->sfSize.h);
initRect.cx = (float)pObjStatus->ptPos.x;
initRect.cy = (float)pObjStatus->ptPos.y;
initRect.used = 1;
initRect.label = 10001;
//printf ("initBox:[%.2f %.2f %.2f %.2f]\n",initRect.w,initRect.h,initRect.cx,initRect.cy);
std::vector<AI_Target> odRes;
#ifdef ENABLE_AI_TRACK
GUD_ALG_AI_TRACK_Init(nTracker, img, initRect, odRes, &m_Ai_TkOut);
#endif
m_OccCnt = 0;
m_ArrestCnt = 0;
return 1;
}
int AIT_Interface::AIT_Run_Nano(GD_VIDEO_FRAME_S img, CENTERRECT32F srBbox, GLB_INPUT * g_GLB_Input)
{
gud_rect_f searchBox;
// int setbox[4] = {650,488,716,537};
// searchBox.w = float(setbox[2] - setbox[0]);
// searchBox.h = float(setbox[3] - setbox[1]);
// searchBox.cx = float(setbox[0] + searchBox.w/2);
// searchBox.cy = float(setbox[1] + searchBox.h/2);
searchBox.used = 0;
std::vector<AI_Target> odRes;
#ifdef ENABLE_AI_TRACK
GUD_ALG_AI_TRACK_Process(nTracker, img, searchBox, odRes, &m_Ai_TkOut);
#endif
Track_Res_Nano temRs;
temRs = m_Ai_TkOut.stNanoTrack_out;
m_ObjStatus.ptPos.x = (temRs.rect.x + temRs.rect.w/2);
m_ObjStatus.ptPos.y = (temRs.rect.y + temRs.rect.h/2);
m_ObjStatus.sfSize.w = temRs.rect.w;
m_ObjStatus.sfSize.h = temRs.rect.h;
//printf ("[%d %d %d %d] score:%.2f \n",temRs.rect.x,temRs.rect.y,temRs.rect.w,temRs.rect.h,temRs.score);
return 0;
}
void AIT_Interface::AIT_UpdateTracker(SINT16 nWidth, SINT16 nHeight, OBJECTSTATUS * pObjStatus, GLB_INPUT * g_GLB_Input)
{
// 更新帧编号
pObjStatus->unFrmId = g_GLB_Input->unFrmId;
// 更新目标计数器
pObjStatus->unTotalCnt++;
pObjStatus->unTrackedCnt++;
pObjStatus->unContiTrackedCnt++;
pObjStatus->unContiLostCnt = 0;
pObjStatus->bObjMiss = FALSE;
// pObjStatus->ptPosBorn;
// 更新目标位置
POINT32F pfPosPrePre = pObjStatus->ptPosPre;
pObjStatus->ptPosPre = pObjStatus->ptPos;
pObjStatus->ptPos.x = m_Ai_TkOut.fX;
pObjStatus->ptPos.y = m_Ai_TkOut.fY;
pObjStatus->ptPosFilter.x = (pObjStatus->ptPosPre.x + pObjStatus->sfSpeed.vx + pObjStatus->ptPos.x) / 2;
pObjStatus->ptPosFilter.y = (pObjStatus->ptPosPre.y + pObjStatus->sfSpeed.vy + pObjStatus->ptPos.y) / 2;
// 20170915,wsa非射击期间出视场解锁
if (IMGO_IsPoint32FOutImg(nWidth, nHeight, pObjStatus->ptPos))
{
pObjStatus->bObjMiss = TRUE;
}
// 更新目标尺寸
pObjStatus->sfSize.w = m_Ai_TkOut.fObjW;
pObjStatus->sfSize.h = m_Ai_TkOut.fObjH;
pObjStatus->sfSize.s = pObjStatus->sfSize.w * pObjStatus->sfSize.h;
// pObjStatus->sfSizeBorn;
pObjStatus->fObjPxlsCnt = pObjStatus->sfSize.s;
// 更新目标速度
pObjStatus->sfSpeed.vx = (pObjStatus->ptPos.x - pfPosPrePre.x) / 2;
pObjStatus->sfSpeed.vy = (pObjStatus->ptPos.y - pfPosPrePre.y) / 2;
// 标记目标来源
pObjStatus->nObjTypeSrc = ObjSrc::Arith_NanoTrack;
//// 更新目标角度
Pole pole = getStablePoleFromImagePos(pObjStatus->ptPos,
g_GLB_Input->stCamera, g_GLB_Input->servoInfo, g_GLB_Input->afPlatformRPY, g_GLB_Input->setupErr);
pObjStatus->afAngle.fAz = (FLOAT32)pole.beta;
pObjStatus->afAngle.fPt = (FLOAT32)pole.alpha;
////更新目标角速度,从跟踪管道统一取
pObjStatus->sfAglSpeed.vx = m_LockingPipe->sfAglSpeed.vx;
pObjStatus->sfAglSpeed.vy = m_LockingPipe->sfAglSpeed.vy;
}
UBYTE8 AIT_Interface::AIT_MemTracker(OBJECTSTATUS * pObjStatus, SINT32 nWidth, SINT32 nHeight, GLB_INPUT * g_GLB_Input)
{
// 记忆跟踪是否成功的标志
UBYTE8 ubTrackerSuccFlg = TRUE;
pObjStatus->unFrmId = g_GLB_Input->unFrmId;
// 判断记忆跟踪次数是否超过阈值
pObjStatus->unContiLostCnt++;
pObjStatus->unContiTrackedCnt = 0;
pObjStatus->unTotalCnt++;
// 取管道运动模型
FilterMeanNL stMotionMod_mean = m_LockingPipe->stMotionMod_mean;
pObjStatus->ptPos.x = stMotionMod_mean.crnObjPrediRtLong.cx;
pObjStatus->ptPos.y = stMotionMod_mean.crnObjPrediRtLong.cy;
pObjStatus->sfAglSpeed.vx = m_LockingPipe->sfAglSpeed.vx;
pObjStatus->sfAglSpeed.vy = m_LockingPipe->sfAglSpeed.vy;
// 更新目标角度值
pObjStatus->afAngle = stMotionMod_mean.ObjAglListsLong.arfPredict.afAngle;
// 更新目标尺寸
pObjStatus->sfSize.w = m_Ai_TkOut.fObjW;
pObjStatus->sfSize.h = m_Ai_TkOut.fObjH;
pObjStatus->sfSize.s = pObjStatus->sfSize.w * pObjStatus->sfSize.h;
// 若预测帧数超出阈值,或目标超出视场,则标记目标丢失
BBOOL bInsideFOV = true;
bInsideFOV = !IMGO_IsPoint32FOutImg(nWidth, nHeight, pObjStatus->ptPos);
SINT32 nTrackMemFrmNum = GLB_MEMORY_FRM_NUM;
if ((pObjStatus->unContiLostCnt > (UINT32)(nTrackMemFrmNum)) || (!bInsideFOV))
{
pObjStatus->bObjMiss = TRUE;
ubTrackerSuccFlg = FALSE;
}
else
{
ubTrackerSuccFlg = TRUE;
pObjStatus->bObjMiss = FALSE;
}
return ubTrackerSuccFlg;
}
void AIT_Interface::AIT_SyncInfo(AIT_OUTPUT* out)
{
memcpy(&m_Ai_TkOut, out, sizeof(AIT_OUTPUT));
}
OBJECTSTATUS* AIT_Interface::GetTrackeStatus()
{
return &m_ObjStatus;
}
AIT_OUTPUT* AIT_Interface::GetAIPara()
{
return &m_Ai_TkOut;
}
void AIT_Interface::AIT_Cancle()
{
#ifdef ENABLE_AI_TRACK
GUD_ALG_AI_TRACK_Unlock(nTracker);
#endif
//清空跟踪结果
memset(&m_ObjStatus, 0, sizeof(OBJECTSTATUS));
bAssignflag = FALSE;//将AI跟踪器解绑定
memset(&m_Ai_TkCmd,0,sizeof(AIT_Command));// 清空对ai跟踪器的控制字
}
BBOOL AIT_Interface::AIT_Run(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, PIPE* m_LockingPipe)
{
// 跟踪成功状态
BBOOL bTrackStatus = TRUE;
gud_rect_f searchBox = {0};
SINT32 occStatus = 0;
OBJECTSTATUS* pObjStatusAIT = &m_ObjStatus;
searchBox.cx = m_LockingPipe->ptCurrentPnt.x;
searchBox.cy = m_LockingPipe->ptCurrentPnt.y;
//外部控制此处宽高采用原始浮点宽高
searchBox.w = (FLOAT32)m_LockingPipe->objHistoryList[m_LockingPipe->ubEnd].snSize.w;
searchBox.h = (FLOAT32)m_LockingPipe->objHistoryList[m_LockingPipe->ubEnd].snSize.h;
//不使用外部指定搜索区域
if (0 == pObjStatusAIT->nOcclude_flag)
{
searchBox.used = 0;
}
else
{
searchBox.used = 1;
}
std::vector<AI_Target> odRes;
#ifdef ENABLE_AI_TRACK
GUD_ALG_AI_TRACK_Process(nTracker, img, searchBox, odRes, &m_Ai_TkOut);
GUD_ALG_AI_TRACK_Get_ResopneMap(nTracker, responeMap);
#endif
pObjStatusAIT->fConfidence = m_Ai_TkOut.stNanoTrack_out.score;
//根据算法类型取对应的干扰结果
m_Ai_TkOut.type = AITrackerType::NanoTrack;
if (m_Ai_TkOut.type == AITrackerType::NanoTrack)
{
occStatus = m_Ai_TkOut.stNanoTrack_out.disturb_status;
}
if (occStatus > 0)
{
m_OccCnt++;
m_ArrestCnt = 0;
if (m_OccCnt >= 3)
{
pObjStatusAIT->nOcclude_flag = 3;
}
}
else
{
m_OccCnt = 0;
m_ArrestCnt++;
if (m_ArrestCnt >= 3)
{
pObjStatusAIT->nOcclude_flag = 0;
}
}
if (3 == pObjStatusAIT->nOcclude_flag)
{
bTrackStatus = AIT_MemTracker(pObjStatusAIT, (SINT32)img.u32Width, (SINT32)img.u32Height, p_GLB_Input);
memcpy(&m_ObjStatus, pObjStatusAIT, sizeof(OBJECTSTATUS));
return bTrackStatus;
}
// 更新跟踪器
AIT_UpdateTracker((SINT32)img.u32Width, (SINT32)img.u32Height, pObjStatusAIT, p_GLB_Input);
memcpy(&m_ObjStatus, pObjStatusAIT, sizeof(OBJECTSTATUS));
// 返回跟踪状态
return bTrackStatus;
}