main
wangchongwu 4 months ago
parent 156692eef3
commit 39149cf94c

@ -197,10 +197,8 @@ void ProcessIR(string filePath, string outname)
<< std::endl;
// 基于外参的快拼
stitcher->GeoStitch(frame, info);
stitcher->Run(frame, info);
// 接收帧
auto a = stitcher->ReceiveFrame(frame, info);
cv::Mat res;
cv::resize(mat_pan, res, cv::Size(pan.u32Width / 4, pan.u32Height / 4));
@ -215,11 +213,6 @@ void ProcessIR(string filePath, string outname)
i = i + 1;
}
// 处理帧
stitcher->ProcessFrame();
// 输出谷歌png
stitcher->ExportGeoPng();
imshow("pan_opt", mat_pan);
waitKey(0);
@ -319,7 +312,7 @@ void ProcessVL(string filePath,string outname)
cv::TickMeter tm;
tm.start();
// 基于外参的快拼
stitcher->GeoStitch(frame, info);
stitcher->Run(frame, info);
tm.stop();
@ -344,7 +337,8 @@ void ProcessVL(string filePath,string outname)
cv::TickMeter tm;
tm.start();
// 处理帧
//stitcher->ProcessFrame();
stitcher->OptAndOutCurrPan();
tm.stop();
cout << "time opt:" << tm.getTimeMilli() << endl;
@ -357,18 +351,13 @@ void ProcessVL(string filePath,string outname)
waitKey(1);
// 输出谷歌png
//stitcher->ExportGeoPng();
// 输出谷歌tile
//stitcher->ExportGoogleTile();
}
// 处理前视可见光扫描
void ProcessFrontVL(string filePath)
{
auto stitcher = API_FrontStitch::Create(IMAGE_WIDTH_VL, IMAGE_HEIGHT_VL);
auto stitcher = API_FrontStitch::Create();
GD_VIDEO_FRAME_S frame = { 0 };//输入帧
GD_VIDEO_FRAME_S pan = { 0 };//输出全景
@ -420,12 +409,12 @@ void ProcessFrontVL(string filePath)
info.craft.stAtt.fRoll += gr.generate();
cv::Mat mat_src(IMAGE_HEIGHT_VL * 1.5, IMAGE_WIDTH_VL, CV_8UC1, pFrameVL);
cv::Mat IMG;
cv::cvtColor(mat_src, IMG, cv::COLOR_YUV2BGR_NV12);
//cv::Mat mat_src(IMAGE_HEIGHT_VL * 1.5, IMAGE_WIDTH_VL, CV_8UC1, pFrameVL);
//cv::Mat IMG;
//cv::cvtColor(mat_src, IMG, cv::COLOR_YUV2BGR_NV12);
imshow("src", IMG);
waitKey(1);
//imshow("src", IMG);
//waitKey(1);
frame.enPixelFormat = GD_PIXEL_FORMAT_NV12;
frame.u32Width = IMAGE_WIDTH_VL;
@ -442,7 +431,7 @@ void ProcessFrontVL(string filePath)
}
else
{
if (i % 60 != 0)
if (i % 20 != 0)
{
i = i + 1;
continue;
@ -456,14 +445,13 @@ void ProcessFrontVL(string filePath)
cv::TickMeter tm;
tm.start();
// 基于外参的快拼
stitcher->PoleStitch(frame, info);
stitcher->Run(frame, info);
tm.stop();
cout << "time:" << tm.getTimeMilli() << endl;
}
cv::Mat res;
@ -474,6 +462,7 @@ void ProcessFrontVL(string filePath)
i = i + 1;
}
stitcher->OptAndOutCurrPan();
cv::Mat res;
cv::resize(mat_pan, res, cv::Size(pan.u32Width / 4, pan.u32Height / 4));
@ -495,5 +484,5 @@ int main(int, char**)
//
//ProcessVL("H:/vl_1920_1080_para40_y8/1.video", "1");
ProcessFrontVL("H:/vl_1920_1080_para40_y8/1.video");
ProcessFrontVL("H:/vl_1920_1080_para40_y8/20241219152917_4.video");
}

@ -1,11 +1,11 @@
/*********版权所有C2024,武汉高德红外股份有限公司***************
/*********版权所有C2025,武汉高德红外股份有限公司***************
* API_FrontStitch.h
*
*
*
*
*
*
*
* 04046wcw
* 2025/03/01
*******************************************************************/
#pragma once
@ -28,14 +28,17 @@ public:
// 初始化拼接,以info给定的伺服指向为扫描中心结合方位范围和俯仰范围初始化拼接图
virtual FPanInfo Init(FrameInfo info, ScanRange stAzRange, ScanRange stPtRange) = 0;
// 极坐标快拼
virtual BYTE8 PoleStitch(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
// 运行拼接流程
virtual BYTE8 Run(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
// 立即优化并输出当前全景图
virtual SINT32 OptAndOutCurrPan() = 0;
// 获取全景图
virtual GD_VIDEO_FRAME_S ExportPanAddr() = 0;
public:
static API_FrontStitch* Create(SINT32 nWidth, SINT32 nHeight);
static API_FrontStitch* Create(std::string cachedir = "./cache");
static void Destroy(API_FrontStitch* obj);
};

@ -1,11 +1,11 @@
/*********版权所有C2024武汉高德红外股份有限公司***************
* API_UnderStitch.h
*
*
*
*
*
*
*
* 04046wcw
* 2025/02/01
*******************************************************************/
#pragma once
@ -29,26 +29,17 @@ public:
// 设置输出标识和路径
virtual void SetOutput(std::string name, std::string outdir) = 0;
// 几何校正快拼
virtual BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
// 运行拼接流程
virtual SINT32 Run(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
// 实时裁切瓦片
virtual void CutTileRealTime() = 0;
// 运行参数配置
virtual void SetConfig(UPanConfig config) = 0;
// 缓存接收帧
virtual SINT32 ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
// 立即优化并输出当前全景图
virtual SINT32 OptAndOutCurrPan() = 0;
// 处理帧
virtual SINT32 ProcessFrame() = 0;
// 获取全景图
virtual GD_VIDEO_FRAME_S ExportPanAddr() = 0;
// 输出地理产品:kml png全景
virtual bool ExportGeoPng() = 0;
// 输出地理产品:谷歌标准瓦片
virtual bool ExportGoogleTile() = 0;
// 获取全景图
virtual GD_VIDEO_FRAME_S ExportPanAddr() = 0;
public:
static API_UnderStitch* Create(std::string cachedir = "./cache");

@ -123,8 +123,6 @@ void BA_Task::OptFrame(vector<KeyType> frameInd,cv::Mat H_map)
// 开始BA
// 将 cv::Mat 转换为 Ceres 需要的数组形式
std::vector<double*> h_list(_currMatrix.size());
@ -162,15 +160,11 @@ void BA_Task::OptFrame(vector<KeyType> frameInd,cv::Mat H_map)
{
continue;
}
#ifdef SHOW_MATCH
// 绘制匹配结果
drawMatch(i, j, matches, H_map);
// 绘制匹配结果
drawMatch(i, j, matches, H_map);
#endif
// 添加匹配点对的残差块
for (int m = 0; m < matches.size(); m++)
{
@ -338,33 +332,33 @@ SINT32 BA_Task::CalMatchMat()
void BA_Task::drawMatch(int i, int j, std::vector<cv::DMatch> matches, cv::Mat H_map)
{
cv::Mat image(1000, 1000, CV_8UC3, cv::Scalar(0, 0, 0));
cv::Mat imagetmp(1000, 1000, CV_8UC3, cv::Scalar(0, 0, 0));
vector<vector<cv::Point2f>> tmpPoly;
tmpPoly.push_back(_polygon[i]);
tmpPoly.push_back(_polygon[j]);
//cv::Mat image(1000, 1000, CV_8UC3, cv::Scalar(0, 0, 0));
//cv::Mat imagetmp(1000, 1000, CV_8UC3, cv::Scalar(0, 0, 0));
//vector<vector<cv::Point2f>> tmpPoly;
//tmpPoly.push_back(_polygon[i]);
//tmpPoly.push_back(_polygon[j]);
cv::warpPerspective(_imgVec[i], imagetmp, H_map * _origMatrix[i], imagetmp.size());
//cv::warpPerspective(_imgVec[i], imagetmp, H_map * _origMatrix[i], imagetmp.size());
// 生成遮罩(全白图像,表示有效区域)
cv::Mat mask1 = cv::Mat::ones(_imgVec[i].size(), CV_8UC1) * 255;
cv::Mat warped_mask1;
cv::warpPerspective(mask1, warped_mask1, H_map * _origMatrix[i], image.size());
//// 生成遮罩(全白图像,表示有效区域)
//cv::Mat mask1 = cv::Mat::ones(_imgVec[i].size(), CV_8UC1) * 255;
//cv::Mat warped_mask1;
//cv::warpPerspective(mask1, warped_mask1, H_map * _origMatrix[i], image.size());
imagetmp.copyTo(image, warped_mask1);
//imagetmp.copyTo(image, warped_mask1);
cv::warpPerspective(_imgVec[j], imagetmp, H_map * _origMatrix[j], imagetmp.size());
cv::Mat mask2 = cv::Mat::ones(_imgVec[i].size(), CV_8UC1) * 255;
cv::Mat warped_mask2;
cv::warpPerspective(mask2, warped_mask2, H_map * _origMatrix[j], image.size());
//cv::warpPerspective(_imgVec[j], imagetmp, H_map * _origMatrix[j], imagetmp.size());
//cv::Mat mask2 = cv::Mat::ones(_imgVec[i].size(), CV_8UC1) * 255;
//cv::Mat warped_mask2;
//cv::warpPerspective(mask2, warped_mask2, H_map * _origMatrix[j], image.size());
imagetmp.copyTo(image, warped_mask2);
//imagetmp.copyTo(image, warped_mask2);
drawPolygons(image, tmpPoly);
//drawPolygons(image, tmpPoly);
// 显示绘制结果
cv::imshow("Polygons", image);
cv::waitKey(1);
//// 显示绘制结果
//cv::imshow("Polygons", image);
//cv::waitKey(1);
// 可视化匹配结果

@ -1,5 +1,10 @@
#include "Arith_BlendMap.h"
#include "Arith_Utils.h"
#include "Arith_FrontStitch.h"
using std::max;
using std::min;
MapBlend::MapBlend(FileCache<FrameCache>* cache)
{
_cache = cache;
@ -58,3 +63,100 @@ void MapBlend::DirectMap(vector<KeyType> frameInd, cv::Mat mapH, cv::Mat pan, cv
void MapBlend::BlendMap(vector<KeyType> frameInd, cv::Mat mapH, cv::Mat pan, cv::Mat pan_mask)
{
}
FrontMapBlend::FrontMapBlend(FileCache<FrameCache>* cache)
{
_cache = cache;
}
FrontMapBlend::~FrontMapBlend()
{
}
void FrontMapBlend::DirectMap(vector<KeyType> frameInd, FPanInfo _panPara, cv::Mat _panImage, cv::Mat pan_mask, cuda_Mem cuda_resor)
{
_panImage.setTo(0);
// 从文件缓存获取帧
auto _t_frame_cache = std::make_shared<FrameCache>();
for (size_t i = 0; i < frameInd.size(); i++)
{
KeyType key = frameInd[i];
bool flag = _cache->get(key, _t_frame_cache);
if (!flag)
{
continue;
}
// 读取当前H
cv::Mat H = cv::Mat(3, 3, CV_64FC1, _t_frame_cache->H);
auto info = _t_frame_cache->_para;
float dep = info.nEvHeight;
GD_VIDEO_FRAME_S img = _t_frame_cache->_frame_info;
// 获取图像数据
cv::Mat rgb = getRGBMatFromGDFrame(_t_frame_cache->_frame_info, _t_frame_cache->_data);
//Pole CT_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width/2, img.u32Height/2), info.nEvHeight);
// 计算帧的map四至注意这里不是线性变化不能完全保证四至约束下的原图能完全覆盖即反采样后图像可能不完整
Pole leftTop_map = getPoleFromImgWithH(H, cv::Point2f(0, 0), info.nEvHeight);
Pole rightTop_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width, 0), info.nEvHeight);
Pole rightBottom_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width, img.u32Height), info.nEvHeight);
Pole leftBottom_map = getPoleFromImgWithH(H, cv::Point2f(0, img.u32Height), info.nEvHeight);
//leftTop_map.beta = DEGLIM360(leftTop_map.beta);
//rightTop_map.beta = DEGLIM360(rightTop_map.beta);
//rightBottom_map.beta = DEGLIM360(rightBottom_map.beta);
//leftBottom_map.beta = DEGLIM360(leftBottom_map.beta);
// 转全景图的像方
cv::Point2f p_leftTop_map = getFPanFromPole(leftTop_map, _panPara);
cv::Point2f p_rightTop_map = getFPanFromPole(rightTop_map, _panPara);
cv::Point2f p_rightBottom_map = getFPanFromPole(rightBottom_map, _panPara);
cv::Point2f p_leftBottom_map = getFPanFromPole(leftBottom_map, _panPara);
// 计算全景图上范围
int right = max(max(max(p_leftTop_map.x, p_rightTop_map.x), p_rightBottom_map.x), p_leftBottom_map.x);
int left = min(min(min(p_leftTop_map.x, p_rightTop_map.x), p_rightBottom_map.x), p_leftBottom_map.x);
int top = min(min(min(p_leftTop_map.y, p_rightTop_map.y), p_rightBottom_map.y), p_leftBottom_map.y);
int bottom = max(max(max(p_leftTop_map.y, p_rightTop_map.y), p_rightBottom_map.y), p_leftBottom_map.y);
int xRange = right - left;
int yRnage = bottom - top;
//反映射到像素坐标
MINMAXRECT32S RANGES = { 0 };
RANGES.minY = std::max(0, top);
RANGES.maxY = std::min(_panImage.rows - 1, bottom);
RANGES.minX = std::max(0, left);
RANGES.maxX = std::min(_panImage.cols - 1, right);
cv::Rect2d roi(RANGES.minX, RANGES.minY, RANGES.maxX - RANGES.minX, RANGES.maxY - RANGES.minY);
if (roi.width < 0)
{
int a = 0;
}
cv::Mat H_inv = H.inv();
//UpdatePan_CPU(rgb, _panImage, roi, H_inv, _panPara, dep);
UpdatePan_CUDA(rgb, _panImage, roi, H_inv, _panPara, dep, cuda_resor);
}
}

@ -16,6 +16,7 @@
#include "FileCache.h"
// 下视批量绘制
class MapBlend
{
public:
@ -32,6 +33,19 @@ private:
FileCache<FrameCache>* _cache;
};
// 前视批量绘制
class FrontMapBlend
{
public:
FrontMapBlend(FileCache<FrameCache>* cache);
~FrontMapBlend();
public:
void DirectMap(vector<KeyType> frameInd, FPanInfo _panPara, cv::Mat pan, cv::Mat pan_mask, cuda_Mem cuda_resor); //直接投影
private:
FileCache<FrameCache>* _cache;
};
#endif

@ -67,7 +67,7 @@ void FeatureMatcher::matchFeatures_WithH(vector<KeyPoint> keypoints1, cv::Mat& d
auto warp_pt1 = warpPointWithH(H1, keypoints1[mc.queryIdx].pt);
auto warp_pt2 = warpPointWithH(H2, keypoints2[mc.trainIdx].pt);
if (fabs(warp_pt1.x - warp_pt2.x) + fabs(warp_pt1.y - warp_pt2.y) < 15)
if (fabs(warp_pt1.x - warp_pt2.x) + fabs(warp_pt1.y - warp_pt2.y) < 800)
{
matches.push_back(mc);
}

@ -0,0 +1,60 @@
#include "Arith_FrontSolver.h"
#include "Arith_GeoSolver.h"
#include "Arith_CoordModule.h"
Pole getPoleFromImgWithH(cv::Mat H, cv::Point2f pt, float dep)
{
// 投影到地面坐标,这一步可以利用前面优化成果
cv::Point2f grdPt = warpPointWithH(H, pt);
// 补深度信息并转为常用的NUE坐标系
PointXYZ grdPtXYZ = { 0 };
grdPtXYZ.X = grdPt.y;
grdPtXYZ.Y = -dep;
grdPtXYZ.Z = grdPt.x;
// 地面点转极坐标
Pole pole = getPoleFromXYZ(grdPtXYZ);
return pole;
}
cv::Point2f getImgPosFromPole(cv::Mat H_inv, Pole _pole, float dep)
{
PointXYZ virPt = getXYZFromPole(_pole);
//虚拟点投影到地面
float ratio = -dep / virPt.Y;
PointXYZ realPt = { 0 };
realPt.X = virPt.X * ratio;
realPt.Y = virPt.Y * ratio;
realPt.Z = virPt.Z * ratio;
// 转东北地
PointXYZ realPtGeo = { 0 };
realPtGeo.X = realPt.Z;
realPtGeo.Y = realPt.X;
realPtGeo.Z = -realPt.Y;
// 投影回像方
cv::Point2f px = warpPointWithH(H_inv, cv::Point2f(realPtGeo.X, realPtGeo.Y));
return px;
}
Pole getPoleFromFPan(cv::Point2f pt, FPanInfo _panPara)
{
Pole _pole = { 0 };
_pole.beta = DEGLIM(_panPara.centerGeo.fAz + (pt.x - _panPara.m_pan_width / 2) * _panPara.fAglRes);
_pole.alpha = DEGLIM(_panPara.centerGeo.fPt + (_panPara.m_pan_height / 2 - pt.y) * _panPara.fAglRes);
return _pole;
}
cv::Point2f getFPanFromPole(Pole _pole, FPanInfo _panPara)
{
cv::Point2f pt = { 0 };
pt.x = DEGLIM(_pole.beta - _panPara.centerGeo.fAz) / _panPara.fAglRes + _panPara.m_pan_width / 2;
pt.y = DEGLIM(_panPara.centerGeo.fPt - _pole.alpha) / _panPara.fAglRes + _panPara.m_pan_height / 2;
return pt;
}

@ -0,0 +1,7 @@
// 前视扫描投影变换模型
#include "StitchStruct.h"
Pole getPoleFromImgWithH(cv::Mat H, cv::Point2f pt, float dep);
cv::Point2f getImgPosFromPole(cv::Mat H_inv, Pole _pole, float dep);
Pole getPoleFromFPan(cv::Point2f pt, FPanInfo _panPara);
cv::Point2f getFPanFromPole(Pole _pole, FPanInfo _panPara);

@ -10,9 +10,9 @@
using namespace std;
using namespace cv;
API_FrontStitch* API_FrontStitch::Create(SINT32 nWidth, SINT32 nHeight)
API_FrontStitch* API_FrontStitch::Create(std::string cachedir)
{
return new FrontStitch(nWidth, nHeight);
return new FrontStitch(cachedir);
}
@ -21,9 +21,18 @@ void API_FrontStitch::Destroy(API_FrontStitch* obj)
delete obj;
}
FrontStitch::FrontStitch(SINT32 nWidth, SINT32 nHeight)
FrontStitch::FrontStitch(std::string cachedir)
{
// 创建缓存对象
_cache = new FileCache<FrameCache>(50, cachedir);
_GeoSolver = new GeoSolver();
_FeaMatcher = new FeatureMatcher(DetectorType::SIFT, MatcherType::FLANN);
_BATask = new BA_Task(_cache);
_FBlendTask = new FrontMapBlend(_cache);
}
FrontStitch::~FrontStitch()
@ -88,7 +97,7 @@ FPanInfo FrontStitch::Init(FrameInfo info, ScanRange stAzRange, ScanRange stPtRa
_panPara.m_pan_height = DEGLIM360(Pole_up.alpha - Pole_bottom.alpha) / _panPara.fAglRes;
_panImage = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC3);
_panMask = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC1);
cudaMalloc((void**)&_cuda_mem.global_cuda_Frame, info.nWidth * info.nHeight * 3);//rgb
cudaMalloc((void**)&_cuda_mem.global_cuda_Pan, _panPara.m_pan_width * _panPara.m_pan_height * 3);//rgb
@ -99,6 +108,27 @@ FPanInfo FrontStitch::Init(FrameInfo info, ScanRange stAzRange, ScanRange stPtRa
return _panPara;
}
BYTE8 FrontStitch::Run(GD_VIDEO_FRAME_S img, FrameInfo info)
{
// 极坐标拼接
PoleStitch(img, info);
//ReceiveFrame(img, info);
return 0;
}
SINT32 FrontStitch::OptAndOutCurrPan()
{
// 优化所有帧 效果不佳,放弃
//ProcessFrame(_recvFrameKey);
// 重绘所有帧
//_FBlendTask->DirectMap(_recvFrameKey, _panPara, _panImage, _panMask, _cuda_mem);
return SINT32();
}
BYTE8 FrontStitch::PoleStitch(GD_VIDEO_FRAME_S img, FrameInfo info)
{
cv::Mat H = _GeoSolver->findHomography(info);
@ -107,13 +137,13 @@ BYTE8 FrontStitch::PoleStitch(GD_VIDEO_FRAME_S img, FrameInfo info)
// 获取rgb源图像
cv::Mat rgb = getRGBMatFromGDFrame(img, img.u64VirAddr[0]);
Pole CT_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width/2, img.u32Height/2), info.nEvHeight);
//Pole CT_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width/2, img.u32Height/2), info.nEvHeight);
// 计算帧的map四至注意这里不是线性变化不能完全保证四至约束下的原图能完全覆盖即反采样后图像可能不完整
Pole leftTop_map = getPoleFromImgWithH(H,cv::Point2f(0,0), info.nEvHeight);
Pole rightTop_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width,0), info.nEvHeight);
Pole rightBottom_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width,img.u32Height), info.nEvHeight);
Pole leftBottom_map = getPoleFromImgWithH(H, cv::Point2f(0,img.u32Height), info.nEvHeight);
Pole leftTop_map = getPoleFromImgWithH(H, cv::Point2f(0, 0), info.nEvHeight);
Pole rightTop_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width, 0), info.nEvHeight);
Pole rightBottom_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width, img.u32Height), info.nEvHeight);
Pole leftBottom_map = getPoleFromImgWithH(H, cv::Point2f(0, img.u32Height), info.nEvHeight);
//leftTop_map.beta = DEGLIM360(leftTop_map.beta);
//rightTop_map.beta = DEGLIM360(rightTop_map.beta);
@ -121,21 +151,21 @@ BYTE8 FrontStitch::PoleStitch(GD_VIDEO_FRAME_S img, FrameInfo info)
//leftBottom_map.beta = DEGLIM360(leftBottom_map.beta);
// 转全景图的像方
cv::Point2f p_leftTop_map = getFPanFromPole(leftTop_map,_panPara);
cv::Point2f p_leftTop_map = getFPanFromPole(leftTop_map, _panPara);
cv::Point2f p_rightTop_map = getFPanFromPole(rightTop_map, _panPara);
cv::Point2f p_rightBottom_map = getFPanFromPole(rightBottom_map, _panPara);
cv::Point2f p_leftBottom_map = getFPanFromPole(leftBottom_map, _panPara);
// 计算全景图上范围
int right = max(max(max(p_leftTop_map.x, p_rightTop_map.x), p_rightBottom_map.x), p_leftBottom_map.x);
int left = min(min(min(p_leftTop_map.x, p_rightTop_map.x), p_rightBottom_map.x), p_leftBottom_map.x);
int top = min(min(min(p_leftTop_map.y, p_rightTop_map.y), p_rightBottom_map.y), p_leftBottom_map.y);
int bottom = max(max(max(p_leftTop_map.y, p_rightTop_map.y), p_rightBottom_map.y), p_leftBottom_map.y);
// 计算全景图上范围
int right = max(max(max(p_leftTop_map.x, p_rightTop_map.x), p_rightBottom_map.x), p_leftBottom_map.x);
int left = min(min(min(p_leftTop_map.x, p_rightTop_map.x), p_rightBottom_map.x), p_leftBottom_map.x);
int top = min(min(min(p_leftTop_map.y, p_rightTop_map.y), p_rightBottom_map.y), p_leftBottom_map.y);
int bottom = max(max(max(p_leftTop_map.y, p_rightTop_map.y), p_rightBottom_map.y), p_leftBottom_map.y);
int xRange = right - left;
int yRnage = bottom - top;
int yRnage = bottom - top;
//反映射到像素坐标
//反映射到像素坐标
MINMAXRECT32S RANGES = { 0 };
RANGES.minY = std::max(0, top);
RANGES.maxY = std::min(_panImage.rows - 1, bottom);
@ -143,8 +173,8 @@ BYTE8 FrontStitch::PoleStitch(GD_VIDEO_FRAME_S img, FrameInfo info)
RANGES.minX = std::max(0, left);
RANGES.maxX = std::min(_panImage.cols - 1, right);
cv::Rect2d roi(RANGES.minX , RANGES.minY, RANGES.maxX - RANGES.minX, RANGES.maxY- RANGES.minY);
cv::Rect2d roi(RANGES.minX, RANGES.minY, RANGES.maxX - RANGES.minX, RANGES.maxY - RANGES.minY);
if (roi.width < 0)
{
int a = 0;
@ -152,7 +182,7 @@ BYTE8 FrontStitch::PoleStitch(GD_VIDEO_FRAME_S img, FrameInfo info)
cv::Mat H_inv = H.inv();
//UpdatePan_CPU(rgb, _panImage, roi, H_inv, _panPara, dep);
UpdatePan_CUDA(rgb, _panImage, roi, H_inv, _panPara, dep, _cuda_mem);
@ -164,12 +194,74 @@ BYTE8 FrontStitch::PoleStitch(GD_VIDEO_FRAME_S img, FrameInfo info)
SINT32 FrontStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
{
return SINT32();
// 构造单帧缓存
auto _t_frame_cache = std::make_shared<FrameCache>();
_t_frame_cache->_para = para;
_t_frame_cache->_frame_info = img;
int imgSize = 0;
if (img.enPixelFormat == GD_PIXEL_FORMAT_GRAY_Y8)
{
imgSize = img.u32Height * img.u32Width;
}
if (img.enPixelFormat == GD_PIXEL_FORMAT_RGB_PACKED)
{
imgSize = img.u32Height * img.u32Width * 3;
}
if (img.enPixelFormat == GD_PIXEL_FORMAT_NV12)
{
imgSize = img.u32Height * img.u32Width * 1.5;
}
// 深拷贝图像数据
memcpy(_t_frame_cache->_data, img.u64VirAddr[0], imgSize);
// 提取特征点
std::vector<cv::KeyPoint> keypoints;
cv::Mat descriptors;
cv::Mat src = getRGBAMatFromGDFrame(img, img.u64VirAddr[0]);
_FeaMatcher->extractFeatures(src, keypoints, descriptors);
size_t keyNum = MIN(keypoints.size(), FEA_NUM_MAX);
_t_frame_cache->ptNum = keyNum;
// 保存特征点
memcpy(_t_frame_cache->_pt, keypoints.data(), sizeof(cv::KeyPoint) * keyNum);
// 保存描述子
memcpy(_t_frame_cache->_desp, descriptors.data, sizeof(float) * keyNum * FEA_DES_SIZE);
cv::Mat_<double> H0 = _GeoSolver->findHomography(para);
// 保存初始H
memcpy(_t_frame_cache->H, H0.data, sizeof(double) * 9);
// 预处理结果加入文件缓存
_cache->set(para.nFrmID, _t_frame_cache);
_recvFrameKey.push_back(para.nFrmID);
_totalFrameCnt++;
return _totalFrameCnt;
}
SINT32 FrontStitch::ProcessFrame()
SINT32 FrontStitch::ProcessFrame(vector<KeyType> keys)
{
return SINT32();
if (keys.size() == 0)
{
return -1;
}
// 优化所有帧
_BATask->OptFrame(keys, cv::Mat::zeros(cv::Size(3,3),CV_8UC1));
return 0;
}
GD_VIDEO_FRAME_S FrontStitch::ExportPanAddr()
@ -208,61 +300,3 @@ void UpdatePan_CPU(cv::Mat rgbFrame, cv::Mat pan, cv::Rect2d roi, cv::Mat H_inv,
}
Pole getPoleFromImgWithH(cv::Mat H, cv::Point2f pt, float dep)
{
// 投影到地面坐标,这一步可以利用前面优化成果
cv::Point2f grdPt = warpPointWithH(H, pt);
// 补深度信息并转为常用的NUE坐标系
PointXYZ grdPtXYZ = { 0 };
grdPtXYZ.X = grdPt.y;
grdPtXYZ.Y = -dep;
grdPtXYZ.Z = grdPt.x;
// 地面点转极坐标
Pole pole = getPoleFromXYZ(grdPtXYZ);
return pole;
}
cv::Point2f getImgPosFromPole(cv::Mat H_inv, Pole _pole, float dep)
{
PointXYZ virPt = getXYZFromPole(_pole);
//虚拟点投影到地面
float ratio = -dep / virPt.Y;
PointXYZ realPt = { 0 };
realPt.X = virPt.X * ratio;
realPt.Y = virPt.Y * ratio;
realPt.Z = virPt.Z * ratio;
// 转东北地
PointXYZ realPtGeo = { 0 };
realPtGeo.X = realPt.Z;
realPtGeo.Y = realPt.X;
realPtGeo.Z = -realPt.Y;
// 投影回像方
cv::Point2f px = warpPointWithH(H_inv, cv::Point2f(realPtGeo.X, realPtGeo.Y));
return px;
}
Pole getPoleFromFPan(cv::Point2f pt, FPanInfo _panPara)
{
Pole _pole = { 0 };
_pole.beta = DEGLIM(_panPara.centerGeo.fAz + (pt.x - _panPara.m_pan_width / 2) * _panPara.fAglRes);
_pole.alpha = DEGLIM(_panPara.centerGeo.fPt + (_panPara.m_pan_height / 2 - pt.y) * _panPara.fAglRes);
return _pole;
}
cv::Point2f getFPanFromPole(Pole _pole, FPanInfo _panPara)
{
cv::Point2f pt = { 0 };
pt.x = DEGLIM(_pole.beta - _panPara.centerGeo.fAz) / _panPara.fAglRes + _panPara.m_pan_width / 2;
pt.y = DEGLIM(_panPara.centerGeo.fPt - _pole.alpha) / _panPara.fAglRes + _panPara.m_pan_height / 2;
return pt;
}

@ -12,50 +12,67 @@
#ifndef _FRONTSTITCH_H
#define _FRONTSTITCH_H
#include "API_FrontStitch.h"
#include "Arith_GeoSolver.h"
#include <cuda_runtime.h>
#include "opencv2/opencv.hpp"
// 前视扫描投影变换模型
Pole getPoleFromImgWithH(cv::Mat H, cv::Point2f pt, float dep);
cv::Point2f getImgPosFromPole(cv::Mat H_inv, Pole _pole, float dep);
Pole getPoleFromFPan(cv::Point2f pt, FPanInfo _panPara);
cv::Point2f getFPanFromPole(Pole _pole, FPanInfo _panPara);
#include "Arith_BATask.h"
#include <map>
#include "Arith_FrontSolver.h"
#include "Arith_BlendMap.h"
class FrontStitch:public API_FrontStitch
{
public:
FrontStitch(SINT32 nWidth, SINT32 nHeight);
FrontStitch(std::string cachedir = "./cache");
~FrontStitch();
// 初始化拼接
FPanInfo Init(FrameInfo info, ScanRange stAzRange, ScanRange stPtRange);
// 运行前视拼接
BYTE8 Run(GD_VIDEO_FRAME_S img, FrameInfo para);
// 立即优化并输出当前全景图
SINT32 OptAndOutCurrPan();
// 获取全景图
GD_VIDEO_FRAME_S ExportPanAddr();
private:
// 极坐标快拼
BYTE8 PoleStitch(GD_VIDEO_FRAME_S img, FrameInfo para);
BYTE8 PoleStitch(GD_VIDEO_FRAME_S img, FrameInfo para);
// 缓存接收帧
SINT32 ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para);
// 处理帧
SINT32 ProcessFrame();
SINT32 ProcessFrame(vector<KeyType> keys);
// 获取全景图
GD_VIDEO_FRAME_S ExportPanAddr();
private:
GeoSolver* _GeoSolver;
GeoSolver* _GeoSolver;//外参计算
FeatureMatcher* _FeaMatcher;//特征匹配
BA_Task* _BATask;//BA
FrontMapBlend* _FBlendTask;// 前视融合模块
vector<KeyType> _recvFrameKey;// 接收帧总表
std::map<KeyType, int> _recvFrameOptFlag;//接收帧的优化标记
FileCache<FrameCache>* _cache;//文件缓存,存储外部传入的原始帧信息以及预处理结果
int _totalFrameCnt;//处理帧计数
private:
FPanInfo _panPara;//全景配置
cv::Mat _panImage;
cv::Mat _panMask; //覆盖区域遮罩
// device mem
private:
cuda_Mem _cuda_mem;

@ -110,17 +110,17 @@ UPanInfo UnderStitch::InitMap(FrameInfo info)
}
UPanInfo UnderStitch::Init(FrameInfo info)
{
_panPara = InitMap(info);
_panImage = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC4);
_panMask = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC1);
// 默认配置参数
_config.bOutGoogleTile = true;
_config.bUseBA = true;
return _panPara;
}
@ -132,6 +132,53 @@ void UnderStitch::SetOutput(std::string filename, std::string outdir)
_kmlPath = _outDir + "/" + _filename + ".kml";
}
void UnderStitch::SetConfig(UPanConfig config)
{
_config = config;
}
SINT32 UnderStitch::Run(GD_VIDEO_FRAME_S img, FrameInfo para)
{
// 快拼
GeoStitch(img, para);
if (_config.bOutGoogleTile)
{
// 实时裁切瓦片输出
CutTileRealTime();
}
if (_config.bUseBA)
{
// 预处理+缓存
ReceiveFrame(img, para);
// 按照需要执行优化
//if (0)
//{
// ProcessFrame(_recvFrameKey);
//}
}
// todo
// 推扫出视场,重新初始化全景图
return 0;
}
SINT32 UnderStitch::OptAndOutCurrPan()
{
if (_config.bUseBA)
{
// 优化所有帧
ProcessFrame(_recvFrameKey);
}
// 输出当前所有瓦片
ExportGoogleTile();
return _recvFrameKey.size();
}
//BYTE8 UnderStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
@ -244,7 +291,7 @@ void UnderStitch::CutTileRealTime()
if (ptile->nStatus > 0)
{
//cnt++;
//continue;
continue;
}
SINT32 x1 = box.x/ downSampleR;
@ -329,21 +376,26 @@ SINT32 UnderStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
_recvFrameKey.push_back(para.nFrmID);
_totalFrameCnt++;
// 优化表初始化
_recvFrameOptFlag[para.nFrmID] = 0;
_totalFrameCnt++;
return _totalFrameCnt;
}
SINT32 UnderStitch::ProcessFrame()
SINT32 UnderStitch::ProcessFrame(vector<KeyType> keys)
{
if (keys.size() == 0)
{
return -1;
}
// 优化所有帧
_BATask->OptFrame(_recvFrameKey, _H_pan);
_BATask->OptFrame(keys, _H_pan);
// 重投影所有帧到全景
_BlendTask->DirectMap(_recvFrameKey, _H_pan, _panImage,_panMask);
_BlendTask->DirectMap(keys, _H_pan, _panImage,_panMask);
return 0;
}

@ -7,6 +7,7 @@
#include "FileCache.h"
#include "GoogleTile.h"
#include "Arith_BlendMap.h"
#include <map>
class UnderStitch :public API_UnderStitch
{
@ -18,6 +19,13 @@ public:
void SetOutput(std::string filename, std::string outdir);
void SetConfig(UPanConfig config);
SINT32 Run(GD_VIDEO_FRAME_S img, FrameInfo para);
SINT32 OptAndOutCurrPan();
private:
BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para);
@ -28,7 +36,7 @@ public:
SINT32 ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para);
// 处理帧
SINT32 ProcessFrame();
SINT32 ProcessFrame(vector<KeyType> keys);
// 输出地理产品:kml png全景
bool ExportGeoPng();
@ -50,13 +58,11 @@ private:
private:
vector<KeyType> _recvFrameKey;// 接收帧总表
std::map<KeyType, int> _recvFrameOptFlag;//接收帧的优化标记
FileCache<FrameCache>* _cache;//文件缓存,存储外部传入的原始帧信息以及预处理结果
private:
googleTile _googleProduct;
@ -83,6 +89,8 @@ private:
int _totalFrameCnt;//处理帧计数
UPanConfig _config;
private:
string _kmlPath;

@ -238,7 +238,7 @@ int googleTile::ZoomLevel(float mp)
int earthLen = 40075017;
nLev = std::floor(std::log2(earthLen / meters_cover)) + 1;
nLev = std::floor(std::log2(earthLen / meters_cover));
return nLev;
}

@ -9,6 +9,14 @@ using cv::Point2f;
using cv::Mat;
using cv::Mat_;
// 下视拼接参数控制
struct UPanConfig
{
bool bUseBA;//开启BA
bool bOutGoogleTile;//输出动态瓦片
};
// 帧内外方位元素
struct FrameInfo
{
@ -31,26 +39,6 @@ struct UPanInfo
float map_shiftY;// 平移Y
};
// 扫描范围(-180-180
struct ScanRange
{
float Agl0;
float Agl1;
};
// 前视全景图配置
struct FPanInfo
{
int m_pan_width;
int m_pan_height;
float fAglRes;
// 大地系范围
ANGLE32F centerGeo;
ScanRange stAzRangeGeo;
ScanRange stPtRangeGeo;
};
// 匹配关系网BA的边
struct Match_Net
@ -80,9 +68,6 @@ struct FrameCache
// 前视拼接相关
// 前视极坐标范围
struct PoleArea
{
@ -92,6 +77,26 @@ struct PoleArea
float bottom;
};
// 扫描范围(-180-180
struct ScanRange
{
float Agl0;
float Agl1;
};
// 前视全景图配置
struct FPanInfo
{
int m_pan_width;
int m_pan_height;
float fAglRes;
// 大地系范围
ANGLE32F centerGeo;
ScanRange stAzRangeGeo;
ScanRange stPtRangeGeo;
};
//
// cuda显存资源
struct cuda_Mem
@ -100,4 +105,6 @@ struct cuda_Mem
unsigned char* global_cuda_Pan;
unsigned char* global_cuda_pan_mask;
double* global_cuda_H_inv_data;
};
};

@ -217,11 +217,11 @@ __global__ void projKernel(unsigned char* framePtr, SIZE32S size, unsigned char*
}
#define DYNAMIC_MEM
//#define DYNAMIC_MEM
void UpdatePan_CUDA(cv::Mat rgbFrame, cv::Mat pan, cv::Rect2d roi, cv::Mat h_inv, FPanInfo _panPara, float dep, cuda_Mem cuda_res)
{
// check roi
//// check roi
if (roi.width <= 0 || roi.height <= 0)
{
return;
@ -249,7 +249,7 @@ void UpdatePan_CUDA(cv::Mat rgbFrame, cv::Mat pan, cv::Rect2d roi, cv::Mat h_inv
cudaMemcpy(cuda_H_inv_data, (double*)h_inv.data,9 * sizeof(double), cudaMemcpyHostToDevice);
// 定义线程块和网格大小
dim3 blockSize(16, 16);
dim3 blockSize(8, 8);
dim3 gridSize((pan_sub_w + blockSize.x - 1) / blockSize.x, (pan_sub_h + blockSize.y - 1) / blockSize.y);//向上取整,确保线程覆盖全区域
SIZE32S FrameSize = { rgbFrame.cols, rgbFrame.rows };
@ -267,7 +267,7 @@ void UpdatePan_CUDA(cv::Mat rgbFrame, cv::Mat pan, cv::Rect2d roi, cv::Mat h_inv
if (err != cudaSuccess) {
printf("CUDA error: %s\n", cudaGetErrorString(err));
}
cudaDeviceSynchronize();
//cudaDeviceSynchronize();
// 拷贝结果回主机
cv::Mat pan_sub = pan(roi);

Loading…
Cancel
Save