#include "Arith_CustomTracker.h" #include "Arith_AIDMonitor.h" #include "Arith_timer.h" CustomTracker::CustomTracker(SINT32 nWidth, SINT32 nHeight) { //kcf跟踪器(通用) m_pKCFTracker = API_KCF_Tracker::Create(); // TLD m_pTLDTracker = API_TLD::Create(); // LK光流计算 m_pNDYTTracker = new NDYT(); // ai跟踪器初始化为空,等待外部绑定 m_pAITracker = NULL; // 精细模板匹配 m_pNccCorrect = new NCCAccuracy(); // 遮挡识别模块 m_pOccJudgeKCF = new OccJudge(); // AI Detect 管道监控模块 Ai_DMonitor = new AID_Monitor(); // AI跟踪器遮挡判断 m_pOccJudgeAI = new OccJudgeAI(); // 遮挡重捕模块 m_pTargetArrest = new TargetArrest(m_pOccJudgeKCF, m_pOccJudgeAI, m_pKCFTracker, m_pTLDTracker); ObjStatus = { 0 };//复合跟踪结果 ObjStatusAI = { 0 }; ObjStatusKCF = { 0 }; // 初始化为无遮挡 tkEventKCF = { OccLev::NO_OCC }; tkEventAI = { OccLev::NO_OCC }; m_nKCFResetCnt = 0; m_nAIResetCnt = 0; m_nAISeResetCnt = 0; } CustomTracker::~CustomTracker() { if (m_pKCFTracker) { delete m_pKCFTracker; } if (m_pTLDTracker) { delete m_pTLDTracker; } if (m_pOccJudgeKCF) { delete m_pOccJudgeKCF; } if (m_pTargetArrest) { delete m_pTargetArrest; } if (m_pNccCorrect) { delete m_pNccCorrect; } if (m_pOccJudgeAI) { delete m_pOccJudgeAI; } if (Ai_DMonitor) { delete Ai_DMonitor; } } bool CustomTracker::Init(GD_VIDEO_FRAME_S img, PIPE* pLockPipe, GLB_INPUT* p_GLB_Input) { // 锁定管道传指针 m_LockingPipe = pLockPipe; SINT32 nEnd = m_LockingPipe->ubEnd; SINT32 nObjSize = (SINT32)(m_LockingPipe->ObjectFilter.fPxlsCnt); TARGET_OBJECT* pTrackingTarget = &m_LockingPipe->objHistoryList[nEnd]; RECT32S tkBox = { SINT32(pTrackingTarget->pfCenPos.x - pTrackingTarget->snSize.w * 0.5), SINT32(pTrackingTarget->pfCenPos.y- pTrackingTarget->snSize.h * 0.5), pTrackingTarget->snSize.w, pTrackingTarget->snSize.h }; CENTERRECT32F ctBox = { pTrackingTarget->pfCenPos.x, pTrackingTarget->pfCenPos.y, (FLOAT32)pTrackingTarget->snSize.w, (FLOAT32)pTrackingTarget->snSize.h }; // 初始化KCF if (m_pKCFTracker) { m_pKCFTracker->KCF_InitObjectParameters(img, pLockPipe, p_GLB_Input); } //初始化NCC修正模块 if (m_pNccCorrect) { m_pNccCorrect->NCC_IniteTemplet(img, ctBox); } //// 初始化TLD模块 m_pTLDTracker->TLD_Init_API(img, ctBox); // 初始化AI跟踪 if(m_pAITracker) { m_pAITracker->AIT_Init(img, pLockPipe, p_GLB_Input); } //初始化NDYT if (m_pNDYTTracker) { m_pNDYTTracker->NDYT_Init(); } return true; } bool CustomTracker::Track(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, API_MOT_PIPE* g_GLB_PipeProc) { // 跟踪成功状态 bool bTrackStatus = false; bool bTrackKCFStatus = false; bool bTrackAIStatus = false; //KCF跟踪 if (m_pKCFTracker) { bTrackKCFStatus = GroundTrackKCF(img, p_GLB_Input, m_pKCFTracker, m_pOccJudgeKCF, &tkEventKCF, m_pTLDTracker->TLD_GetPara()); //更新KCF运动模型 m_pKCFTracker->UpdateTrackerMotion(p_GLB_Input); } //NCC跟踪精度修正 if (m_pNccCorrect) { m_pNccCorrect->NCC_CorrectTarget(img, m_pKCFTracker->GetTrackeStatus()); } // AI跟踪 if (m_pAITracker) { CENTERRECT32F srBbox = { ObjStatus.ptPos.x,ObjStatus.ptPos.y, ObjStatus.sfSize.w,ObjStatus.sfSize.h}; //m_pAITracker->AIT_Run(img, srBbox, p_GLB_Input); bTrackAIStatus = GroundTrackAI(p_GLB_Input, m_pAITracker, m_pOccJudgeAI, &tkEventAI, m_pTLDTracker->TLD_GetPara()); } // 跟踪决策及模型更新 bTrackStatus = TrackDecision(img, p_GLB_Input); // 运行TLD : if(p_GLB_Input->parallelFlg == 0) { if (m_pTLDTracker) { #if 0 m_pTLDTracker->TLD_Run_MutiTracker(img, &ObjStatus, &ObjStatusKCF, &ObjStatusAI, tkEventKCF); #endif UBYTE8 nScale = 0; SINT32 nImageW = 0; SINT32 nImageH = 0; RECT32S bRect = { 0 }; MINMAXRECT mmRect = { 0 }; int ret = m_pTLDTracker->tldPreProcess(img,&ObjStatus, &ObjStatusKCF, &ObjStatusAI,&nScale,&nImageW,&nImageH,&bRect,&mmRect); if(ret != -1) { m_pTLDTracker->tldRunMultiTracker(&ObjStatus, &ObjStatusKCF, &ObjStatusAI, tkEventKCF,nScale,nImageW,nImageH,bRect,mmRect); } } } if (bTrackStatus) { // 处理跟踪事件(对地处理这档重捕,可屏蔽测试AID重捕) EventProcess(tkEventKCF, tkEventAI, img, m_LockingPipe, p_GLB_Input); //// AI识别管道监控处理 //SINT32 alarmID = Ai_DMonitor->Process(img, p_GLB_Input, &ObjStatus, m_LockingPipe, g_GLB_PipeProc); //// 根据AID提供的告警管道编号查询管道,调用重捕重置流程 //PIPE* recapPipe = g_GLB_PipeProc->getPipeByAlarmID(alarmID); //RecapRestProcess(tkEventKCF, img, recapPipe, p_GLB_Input); } else { // 跟踪失败,解锁 Cancle(); // 返回跟踪状态 return bTrackStatus; } // 返回跟踪状态 return bTrackStatus; } void CustomTracker::Cancle() { PIPE* pPipe = m_LockingPipe; if (pPipe) { pPipe->bTrackingPipe = false; } // 如果存在AI跟踪器,解锁 if(m_pAITracker) { m_pAITracker->ATI_Cancle(); } m_pKCFTracker->KCF_CleanUpObjectTracker(); m_pTLDTracker->TLD_CleanUp(); m_pOccJudgeKCF->OccJudge_CleanUpObjAglSimInfo(); } bool CustomTracker::TrackDecision(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input) { UBYTE8 ubSuccessFlag = TRUE; //若同时执行KCF+AI跟踪,则执行面目标跟踪决策 if (m_pKCFTracker && m_pAITracker) { ubSuccessFlag = TO_TrackDecisionOfMiddleObj_KCF_AI(img, p_GLB_Input); } //若仅执行KCF跟踪,则直接将KCF结果赋给跟踪决策结果 else if (m_pKCFTracker) { //20171218,更新kcf模板 if (m_pKCFTracker->GetTrackeStatus()->unContiLostCnt < 1) { m_pKCFTracker->KCF_ModelUpdate(img); } // 取KCF跟踪结果输出 memcpy(&ObjStatus, m_pKCFTracker->GetTrackeStatus(), sizeof(OBJECTSTATUS)); ObjStatus.nDeciStatus = KCF_Deci; } else if (m_pAITracker) { memcpy(&ObjStatus, m_pAITracker->GetTrackeStatus(), sizeof(OBJECTSTATUS)); ObjStatus.nDeciStatus = AIT_Deci; } else { ObjStatus.bObjMiss = true; } //若决策目标丢失,则标记跟踪失败 ubSuccessFlag = TRUE; if (ObjStatus.bObjMiss) { ubSuccessFlag = FALSE; } //决策结果指导AI跟踪位置 if (m_pAITracker) { AIT_OUTPUT* pAITOut = m_pAITracker->GetAIPara(); if (ObjStatus.nOcclude_flag == FULL_OCC) { //记忆阶段取TLD检测结果传入 if (m_pAITracker->GetTrackeStatus()->unContiLostCnt > p_GLB_Input->unFreq && m_pTLDTracker) { RECT32S rsTLDBestRect = m_pTLDTracker->TLD_GetPara()->rsBestNNRect; m_pAITracker->m_Ai_TkCmd.TargetBox.cx = rsTLDBestRect.x + rsTLDBestRect.w / 2; m_pAITracker->m_Ai_TkCmd.TargetBox.cy = rsTLDBestRect.y + rsTLDBestRect.h / 2; m_pAITracker->m_Ai_TkCmd.TargetBox.w = rsTLDBestRect.w; m_pAITracker->m_Ai_TkCmd.TargetBox.h = rsTLDBestRect.h; } else { m_pAITracker->m_Ai_TkCmd.TargetBox.cx = pAITOut->fX; m_pAITracker->m_Ai_TkCmd.TargetBox.cy = pAITOut->fY; if (pAITOut->fObjW > 1) { m_pAITracker->m_Ai_TkCmd.TargetBox.w = pAITOut->fObjW; m_pAITracker->m_Ai_TkCmd.TargetBox.h = pAITOut->fObjH; } else { m_pAITracker->m_Ai_TkCmd.TargetBox.w = ObjStatus.sfSize.w; m_pAITracker->m_Ai_TkCmd.TargetBox.h = ObjStatus.sfSize.h; } } } else { m_pAITracker->m_Ai_TkCmd.TargetBox.cx = ObjStatus.ptPos.x; m_pAITracker->m_Ai_TkCmd.TargetBox.cy = ObjStatus.ptPos.y; if (pAITOut->fObjW > 1) { m_pAITracker->m_Ai_TkCmd.TargetBox.w = pAITOut->fObjW; m_pAITracker->m_Ai_TkCmd.TargetBox.h = pAITOut->fObjH; } else { m_pAITracker->m_Ai_TkCmd.TargetBox.w = ObjStatus.sfSize.w; m_pAITracker->m_Ai_TkCmd.TargetBox.h = ObjStatus.sfSize.h; } } } return ubSuccessFlag; } bool CustomTracker::TO_TrackDecisionOfMiddleObj_KCF_AI(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input) { UBYTE8 ubSuccessFlag = TRUE; MINMAXRECT mmRectKCF = { 0 }; MINMAXRECT mmRectAI = { 0 }; FLOAT32 fIou = 0.f; BBOOL bResetKCF = FALSE; BBOOL bResetAI = FALSE; BBOOL bKcfResetAI = FALSE; BBOOL bIouConfirm = TRUE; OBJECTSTATUS* pObjStatus = &ObjStatus; OBJECTSTATUS* pObjStatusKCF = m_pKCFTracker->GetTrackeStatus(); OBJECTSTATUS* pObjStatusAI = m_pAITracker->GetTrackeStatus(); OBJECT_OCCLUSION* stOcclude = m_pOccJudgeAI->getOccInfo(); AIT_OUTPUT* pAITOut = m_pAITracker->GetAIPara(); TLD_Para* pTLDPara = m_pTLDTracker->TLD_GetPara(); SINT32 nAITNumThre = MAX(20, stOcclude->nObjNumArrestTH); SINT32 nKCFNumThre = MAX(10, stOcclude->nObjNumArrestTH); pObjStatus->bObjLost = FALSE; //估计AI跟踪结果是否为可跟踪目标----------------------- //统计AI第一帧和第二帧之间跟踪框的差异性 mmRectAI.minX = pObjStatusAI->ptPos.x - pObjStatusAI->sfSize.w / 2; mmRectAI.maxX = pObjStatusAI->ptPos.x + pObjStatusAI->sfSize.w / 2; mmRectAI.minY = pObjStatusAI->ptPos.y - pObjStatusAI->sfSize.h / 2; mmRectAI.maxY = pObjStatusAI->ptPos.y + pObjStatusAI->sfSize.h / 2; mmRectKCF.minX = pObjStatusKCF->ptPos.x - pObjStatusKCF->sfSize.w / 2; mmRectKCF.maxX = pObjStatusKCF->ptPos.x + pObjStatusKCF->sfSize.w / 2; mmRectKCF.minY = pObjStatusKCF->ptPos.y - pObjStatusKCF->sfSize.h / 2; mmRectKCF.maxY = pObjStatusKCF->ptPos.y + pObjStatusKCF->sfSize.h / 2; fIou = IMGO_CalcObjIou(mmRectAI, mmRectKCF); //分四类情况讨论 if (pObjStatusKCF->unContiLostCnt == 0 && pObjStatusAI->unContiLostCnt == 0) { pObjStatus->nDeciStatus = KCF_Deci; if (fIou < 0.4 && fIou > 0.1 &&(pObjStatusKCF->sfSize.w > 15 || pObjStatusKCF->sfSize.h > 15) && pAITOut->nDetectNum > 35 && stOcclude->nJamExistCnt < 0 && stOcclude->nDetectChangeCnt < 0) { bResetKCF = TRUE; m_pTargetArrest->nArrestKCFStatus = 12; } if (pAITOut->nDetectNum > 10 && pAITOut->nDetectNum < 25) { m_nAIResetCnt++; if (m_nAIResetCnt > 10) { m_nAIResetCnt = 0; bResetAI = TRUE; m_pTargetArrest->nArrestAIStatus = 15; } } else { m_nAIResetCnt = 0; } } else if (pObjStatusKCF->unContiLostCnt > 0 && pObjStatusAI->unContiLostCnt == 0) { pObjStatus->nDeciStatus = AIT_Deci; //重置KCF(丢失)条件判断--------------------------------------------- if ( (pObjStatusKCF->sfSize.w > 15 || pObjStatusKCF->sfSize.h > 15) && pAITOut->nDetectNum > stOcclude->nObjNumArrestTH && stOcclude->nJamExistCnt < 0 && stOcclude->nDetectChangeCnt < 0 && pObjStatusKCF->unContiLostCnt > 30 && pObjStatusAI->unContiTrackedCnt > 30) { bResetKCF = TRUE; m_pTargetArrest->nArrestKCFStatus = 13; } if (pObjStatusKCF->unContiLostCnt > p_GLB_Input->unFreq * 5) { bResetKCF = TRUE; m_pTargetArrest->nArrestKCFStatus = 14; } } else if (pObjStatusKCF->unContiLostCnt == 0 && pObjStatusAI->unContiLostCnt > 0) { pObjStatus->nDeciStatus = KCF_Deci; //重置AI条件判断--------------------------------------------- if ((pObjStatusKCF->sfSize.w > 15 || pObjStatusKCF->sfSize.h > 15) && pObjStatusKCF->nOcclude_flag == NO_OCC && pObjStatusAI->unContiLostCnt > 30 && pObjStatusAI->unTrackedCnt > 30 && m_nAISeResetCnt < 0) { bResetAI = TRUE; m_nAISeResetCnt = 10; m_pTargetArrest->nArrestAIStatus = 17; } //KCF重置AI if (pObjStatusAI->unContiLostCnt > p_GLB_Input->unFreq * 5 && m_nAISeResetCnt < 0) { bResetAI = TRUE; m_nAISeResetCnt = 10; m_pTargetArrest->nArrestAIStatus = 18; } m_nAISeResetCnt--; } else { pObjStatus->nDeciStatus = Predict_Deci; //决策跟踪状态 pObjStatus->bObjLost = TRUE; } //决策后处理----------------------------------------------------------------- //更新kcf模板 if (pObjStatusKCF->unContiLostCnt < 1) { m_pKCFTracker->KCF_ModelUpdate(img); } //决策结果赋值 /////////////////////////////////////////////////////////////////////////////// if (pObjStatus->nDeciStatus == KCF_Deci) { pObjStatus->ptPos = pObjStatusKCF->ptPos; pObjStatus->sfSize = pObjStatusKCF->sfSize; pObjStatus->nOcclude_flag = pObjStatusKCF->nOcclude_flag; pObjStatus->fConfidence = pObjStatusKCF->fConfidence; } else if (pObjStatus->nDeciStatus == AIT_Deci) { pObjStatus->ptPos = pObjStatusAI->ptPos; pObjStatus->sfSize = pObjStatusAI->sfSize; pObjStatus->nOcclude_flag = pObjStatusAI->nOcclude_flag; pObjStatus->fConfidence = pObjStatusAI->fConfidence; } else { pObjStatus->sfSize = pObjStatusKCF->sfSize; pObjStatus->nOcclude_flag = FULL_OCC; pObjStatus->fConfidence = 0; pObjStatus->nDeciStatus = Predict_Deci; } if (pObjStatus->bObjLost == FALSE) { //更新目标计数器 //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // 更新帧编号 pObjStatus->unFrmId = p_GLB_Input->unFrmId; pObjStatus->unTotalCnt++; pObjStatus->unTrackedCnt++; pObjStatus->unContiTrackedCnt++; pObjStatus->unContiLostCnt = 0; pObjStatus->bObjMiss = FALSE; //更新目标丢失/跟踪失败标志 pObjStatus->bObjLost = FALSE; //更新目标位置 POINT32F pfPosPrePre = pObjStatus->ptPosPre; pObjStatus->ptPosPre = pObjStatus->ptPos; pObjStatus->ptPosFilter.x = (pObjStatus->ptPosPre.x + pObjStatus->sfSpeed.vx + pObjStatus->ptPos.x) / 2; pObjStatus->ptPosFilter.y = (pObjStatus->ptPosPre.y + pObjStatus->sfSpeed.vy + pObjStatus->ptPos.y) / 2; //非射击期间出视场解锁 if (IMGO_IsPoint32FOutImg(p_GLB_Input->nImageWidth, p_GLB_Input->nImageHeight, pObjStatus->ptPos)) { pObjStatus->bObjMiss = TRUE; } //更新目标尺寸 pObjStatus->sfSize.s = pObjStatus->sfSize.w * pObjStatus->sfSize.h; pObjStatus->fObjPxlsCnt = pObjStatus->sfSize.s; //更新目标速度 pObjStatus->sfSpeed.vx = (pObjStatus->ptPos.x - pfPosPrePre.x) / 2; pObjStatus->sfSpeed.vy = (pObjStatus->ptPos.y - pfPosPrePre.y) / 2; //更新目标角度 Pole pole = getStablePoleFromImagePos(pObjStatus->ptPos, p_GLB_Input->stCamera, p_GLB_Input->servoInfo, p_GLB_Input->afPlatformRPY, p_GLB_Input->setupErr); pObjStatus->afAngle.fAz = (FLOAT32)pole.beta; pObjStatus->afAngle.fPt = (FLOAT32)pole.alpha; ////更新目标角速度,从跟踪管道统一取 pObjStatus->sfAglSpeed.vx = m_LockingPipe->sfAglSpeed.vx; pObjStatus->sfAglSpeed.vy = m_LockingPipe->sfAglSpeed.vy; //标记跟踪成功 ubSuccessFlag = TRUE; } else { //更新目标计数器 //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // 更新帧编号 pObjStatus->unFrmId = p_GLB_Input->unFrmId; pObjStatus->unTotalCnt++; pObjStatus->unTrackedCnt++; pObjStatus->unContiTrackedCnt = 0; pObjStatus->unContiLostCnt++; pObjStatus->bObjMiss = FALSE; //更新目标丢失/跟踪失败标志 pObjStatus->bObjLost = TRUE; ANGLE32F afMemoryAngle = { 0 }; POINT32F ptMemoryPos = { 0 }; afMemoryAngle.fAz = DEGLIM360(pObjStatus->afAngle.fAz + pObjStatus->sfAglSpeed.vx); afMemoryAngle.fPt = DEGLIM(pObjStatus->afAngle.fPt + pObjStatus->sfAglSpeed.vy); // 稳定系到图像坐标系 Pole targetCarNUEPole; targetCarNUEPole.alpha = afMemoryAngle.fPt; targetCarNUEPole.beta = afMemoryAngle.fAz; targetCarNUEPole.distance = 0; ptMemoryPos = getImagePosFromStablePole(targetCarNUEPole, p_GLB_Input->stCamera, p_GLB_Input->servoInfo, p_GLB_Input->afPlatformRPY, p_GLB_Input->setupErr); pObjStatus->ptPos = ptMemoryPos; // 每帧更新目标角度值 pObjStatus->afAngle = afMemoryAngle; //更新目标位置 POINT32F pfPosPrePre = pObjStatus->ptPosPre; pObjStatus->ptPosPre = pObjStatus->ptPos; pObjStatus->ptPosFilter.x = (pObjStatus->ptPosPre.x + pObjStatus->sfSpeed.vx + pObjStatus->ptPos.x) / 2; pObjStatus->ptPosFilter.y = (pObjStatus->ptPosPre.y + pObjStatus->sfSpeed.vy + pObjStatus->ptPos.y) / 2; if (pObjStatus->nDeciStatus == AIT_Deci) { if (IMGO_IsPoint32FOutImg(p_GLB_Input->nImageWidth, p_GLB_Input->nImageHeight, pObjStatusAI->ptPos)) { pObjStatus->bObjMiss = TRUE; } } else { //非射击期间出视场解锁 if (IMGO_IsPoint32FOutImg(p_GLB_Input->nImageWidth, p_GLB_Input->nImageHeight, pObjStatus->ptPos)) { pObjStatus->bObjMiss = TRUE; } } //更新目标尺寸 pObjStatus->sfSize.s = pObjStatus->sfSize.w * pObjStatus->sfSize.h; pObjStatus->fObjPxlsCnt = pObjStatus->sfSize.s; //更新目标角速度 pObjStatus->sfAglSpeed.vx = m_LockingPipe->sfAglSpeed.vx; pObjStatus->sfAglSpeed.vy = m_LockingPipe->sfAglSpeed.vy; SINT32 nTrackMemFrmNum = 1000; if (pObjStatus->unContiLostCnt >= (UINT32)(nTrackMemFrmNum)) { /*> 若记忆跟踪超时,则标记跟踪丢失, 通知外部解锁 */ //更新目标丢失/跟踪失败标志 pObjStatus->bObjMiss = TRUE; //标记跟踪失败 ubSuccessFlag = FALSE; } else { //标记跟踪成功 ubSuccessFlag = TRUE; } } if (bResetKCF) { CENTERRECT32F crfCandiRect; crfCandiRect.cx = pObjStatusAI->ptPos.x; crfCandiRect.cy = pObjStatusAI->ptPos.y; crfCandiRect.w = pObjStatusAI->sfSize.w; crfCandiRect.h = pObjStatusAI->sfSize.h; pObjStatus->ptPos = pObjStatusAI->ptPos; pObjStatus->sfSize = pObjStatusAI->sfSize; // 初始化目标响应及相似度信息 m_pKCFTracker->KCF_InitObjectParametersDeci(img, p_GLB_Input, crfCandiRect, pObjStatus); m_pOccJudgeKCF->OccJudge_CleanUpObjAglSimInfo(); m_pTargetArrest->ObjArrest_CleanUpObjArrestInfo(); //初始化NCC修正模板 m_pNccCorrect->NCC_IniteTemplet(img, crfCandiRect); } if (bResetAI) { m_pAITracker->m_Ai_TkCmd.InitBox.cx = pObjStatusKCF->ptPos.x; m_pAITracker->m_Ai_TkCmd.InitBox.cy = pObjStatusKCF->ptPos.y; m_pAITracker->m_Ai_TkCmd.InitBox.w = pObjStatusKCF->sfSize.w; m_pAITracker->m_Ai_TkCmd.InitBox.h = pObjStatusKCF->sfSize.h; pObjStatusAI->nOcclude_flag = 0; //g_GLB_stOutput.bIniteDaSiamRPN = 1; //m_nAIResetCnt = 10; } return ubSuccessFlag; } void CustomTracker::EventProcess(TrackEvent pEventkcf, TrackEvent pEventAI, GD_VIDEO_FRAME_S img, PIPE* pLockingPipe, GLB_INPUT* p_GLB_Input) { // 完全遮挡时,执行目标重捕 if (pEventkcf.occ == OccLev::FULL_OCC) { m_pTargetArrest->TLD_RecaptureKCF_Run(img, pLockingPipe, p_GLB_Input); } if (pEventAI.occ == OccLev::FULL_OCC && m_pAITracker) { m_pTargetArrest->TLD_RecaptureDasiamRPN_Run(img, pLockingPipe, p_GLB_Input, m_pAITracker); } } void CustomTracker::RecapRestProcess(TrackEvent tkEvent, GD_VIDEO_FRAME_S img, PIPE* pLockingPipe, GLB_INPUT* p_GLB_Input) { m_pTargetArrest->AID_RecaptureKCF_Run(img, pLockingPipe, p_GLB_Input); } bool CustomTracker::GroundTrackKCF(GD_VIDEO_FRAME_S img, GLB_INPUT* p_GLB_Input, API_KCF_Tracker* pKCFTracker, OccJudge* pOccJudge, TrackEvent* tkEventStatus, TLD_Para* pTLDPara) { BBOOL bSuccess = TRUE; RECT32F rfDetectBox = { 0 }; RECT32F rfInputBox = { 0 }; GLB_STATUS nStatus = GLB_STATUS_UNKOWN; OBJECTSTATUS* pObjStatusKCF = pKCFTracker->GetTrackeStatus(); if (pObjStatusKCF->unContiLostCnt <= 0) { nStatus = GLB_STATUS_TRACK; } if (m_pNDYTTracker) { rfInputBox.x = pObjStatusKCF->ptPos.x - pObjStatusKCF->sfSize.w / 2; rfInputBox.y = pObjStatusKCF->ptPos.y - pObjStatusKCF->sfSize.h / 2; rfInputBox.w = pObjStatusKCF->sfSize.w; rfInputBox.h = pObjStatusKCF->sfSize.h; rfDetectBox = m_pNDYTTracker->NDYT_Run(img, rfInputBox, nStatus); } if (nStatus == GLB_STATUS_TRACK) { pKCFTracker->KCF_DetectReset(rfDetectBox); } else { m_pNDYTTracker->NDYT_Init(); } // 运行KCF pKCFTracker->KCF_Run_Detect(img, p_GLB_Input); //目标遮挡判断 tkEventStatus->occ = pOccJudge->OccEventRecognition(img, pObjStatusKCF, pKCFTracker->GetFHogFeaArray31C(), pKCFTracker->GetFHogFeaSize(), pObjStatusKCF->fConfidence, pTLDPara); // 如果存在遮挡事件或者响应低于阈值,则进入记忆跟踪 if (tkEventStatus->occ == FULL_OCC) { bSuccess = pKCFTracker->KCF_MemTracker(pObjStatusKCF, (SINT32)img.u32Width, (SINT32)img.u32Height, p_GLB_Input); ObjStatusKCF = *pKCFTracker->GetTrackeStatus(); return bSuccess; } // 更新跟踪器 pKCFTracker->KCF_UpdateTracker((SINT32)img.u32Width, (SINT32)img.u32Height, pObjStatusKCF, p_GLB_Input); ObjStatusKCF = *pKCFTracker->GetTrackeStatus(); return bSuccess; } bool CustomTracker::GroundTrackAI(GLB_INPUT* p_GLB_Input, AIT_Interface* pAITracker, OccJudgeAI* pOccJudge, TrackEvent* tkEventAI, TLD_Para* pTLDPara) { BBOOL bSuccess = TRUE; memset(&pAITracker->m_Ai_TkCmd, 0, sizeof(AIT_Command)); pAITracker->m_Ai_TkCmd.bTrack = true; OBJECTSTATUS* pObjStatusAI = pAITracker->GetTrackeStatus(); //遮挡状态判断 tkEventAI->occ = pOccJudge->OCC_FrameProcess(p_GLB_Input, pObjStatusAI, pAITracker->GetAIPara(), pTLDPara); //记忆跟踪状态更新 if (tkEventAI->occ == FULL_OCC) { bSuccess = pAITracker->AIT_MemTracker(pObjStatusAI, p_GLB_Input->nImageWidth, p_GLB_Input->nImageHeight, p_GLB_Input); ObjStatusAI = *pAITracker->GetTrackeStatus(); return bSuccess; } //正常跟踪状态更新 pAITracker->AIT_UpdateTracker(p_GLB_Input->nImageWidth, p_GLB_Input->nImageHeight, pObjStatusAI, p_GLB_Input); ObjStatusAI = *pAITracker->GetTrackeStatus(); return bSuccess; } CENTERRECT CustomTracker::getKCFTracker_SrBox() { return m_pKCFTracker->KCF_GetSrBox(); } CENTERRECT CustomTracker::getTLD_SrBox() { CENTERRECT rc = { 0 }; RECT32S srBox = m_pTLDTracker->TLD_GetSearchRect(); rc.cx = srBox.x + srBox.w / 2; rc.cy = srBox.y + srBox.h / 2; rc.w = srBox.w; rc.h = srBox.h; return rc; }