拼接库扩展为下视拼接和前视拼接两个功能开发

main
wangchongwu 5 months ago
parent f3f05bebed
commit 832d207ccd

@ -1,5 +1,6 @@
#include <iostream>
#include "API_VideoStitch.h"
#include "API_UnderStitch.h"
#include "API_FrontStitch.h"
#include "S729.h"
#include "PlatformDefine.h"
#include <string.h>
@ -112,7 +113,7 @@ unsigned char pImageVL[IMAGE_WIDTH_VL * (IMAGE_HEIGHT_VL) * 2] = { 0 };
void ProcessIR()
{
auto stitcher = API_VideoStitch::Create(IMAGE_WIDTH_IR, IMAGE_HEIGHT_IR);
auto stitcher = API_UnderStitch::Create(IMAGE_WIDTH_IR, IMAGE_HEIGHT_IR);
//stitcher->Test();
GD_VIDEO_FRAME_S frame = { 0 };//输入帧
@ -218,7 +219,7 @@ void ProcessIR()
void ProcessVL(string filePath,string outname)
{
auto stitcher = API_VideoStitch::Create(IMAGE_WIDTH_VL, IMAGE_HEIGHT_VL);
auto stitcher = API_UnderStitch::Create(IMAGE_WIDTH_VL, IMAGE_HEIGHT_VL);
GD_VIDEO_FRAME_S frame = { 0 };//输入帧
GD_VIDEO_FRAME_S pan = { 0 };//输出全景
@ -340,20 +341,139 @@ void ProcessVL(string filePath,string outname)
// 输出谷歌png
//stitcher->ExportGeoPng("D:/google_tiles", outname);
stitcher->ExportGeoPng("D:/google_tiles", outname);
// 输出谷歌tile
stitcher->ExportGoogleTile("D:/google_tiles", outname);
//stitcher->ExportGoogleTile("D:/google_tiles", outname);
}
// 处理前视可见光扫描
void ProcessFrontVL(string filePath)
{
auto stitcher = API_FrontStitch::Create(IMAGE_WIDTH_VL, IMAGE_HEIGHT_VL);
GD_VIDEO_FRAME_S frame = { 0 };//输入帧
GD_VIDEO_FRAME_S pan = { 0 };//输出全景
cv::Mat mat_pan;//全景显示
FILE* file = fopen(filePath.c_str(), "rb");
GaussianRandom gr(0.0, 1, 0.0);
int i = 0;
while (!feof(file))
{
SINT32 nVLFrameSize = 1.5 * IMAGE_WIDTH_VL * IMAGE_HEIGHT_VL + IMAGE_WIDTH_VL * PARA_IR_LINE;
fread(pFrameVL, 1, nVLFrameSize, file);
S729paras_VL Paras_VL = { 0 };
memcpy(&Paras_VL, (unsigned char*)(pFrameVL + int(1.5 * IMAGE_WIDTH_VL * IMAGE_HEIGHT_VL)), sizeof(S729paras_VL));
FrameInfo info = { 0 };
info.nFrmID = i;
info.camInfo.nFocus = Paras_VL.Paras_VL.caminfo.nFocal;
info.camInfo.fPixelSize = Paras_VL.Paras_VL.caminfo.nPixleSize;
info.craft.stAtt.fYaw = Paras_VL.Paras_VL.airCraftInfo.fYaw;
info.craft.stAtt.fPitch = Paras_VL.Paras_VL.airCraftInfo.fPitch;
info.craft.stAtt.fRoll = Paras_VL.Paras_VL.airCraftInfo.fRoll;
info.craft.stPos.B = Paras_VL.Paras_VL.airCraftInfo.B;
info.craft.stPos.L = Paras_VL.Paras_VL.airCraftInfo.L;
info.craft.stPos.H = Paras_VL.Paras_VL.airCraftInfo.H;
info.nEvHeight = Paras_VL.Paras_VL.airCraftInfo.H - 25;
info.servoInfo.fServoAz = Paras_VL.Paras_VL.servoInfo.fAz;
info.servoInfo.fServoPt = Paras_VL.Paras_VL.servoInfo.fPz + 90;
info.nWidth = IMAGE_WIDTH_VL;
info.nHeight = IMAGE_HEIGHT_VL;
info.craft.stAtt.fYaw += gr.generate();
info.craft.stAtt.fPitch += gr.generate();
info.craft.stAtt.fRoll += gr.generate();
cv::Mat mat_src(IMAGE_HEIGHT_VL * 1.5, IMAGE_WIDTH_VL, CV_8UC1, pFrameVL);
cv::Mat IMG;
cv::cvtColor(mat_src, IMG, cv::COLOR_YUV2BGR_NV12);
imshow("src", IMG);
waitKey(1);
frame.enPixelFormat = GD_PIXEL_FORMAT_NV12;
frame.u32Width = IMAGE_WIDTH_VL;
frame.u32Height = IMAGE_HEIGHT_VL;
frame.u64VirAddr[0] = pFrameVL;
if (i == 0)
{
stitcher->Init(info,5,5);
pan = stitcher->ExportPanAddr();
mat_pan = cv::Mat(pan.u32Height, pan.u32Width, CV_8UC4, pan.u64VirAddr[0]);
}
else
{
if (i % 50 != 0)
{
i = i + 1;
continue;
}
std::cout << info.craft.stPos.B << " " << info.craft.stPos.L << " " << info.craft.stPos.H << " "
<< info.craft.stAtt.fYaw << " " << info.craft.stAtt.fPitch << " " << info.craft.stAtt.fRoll << " "
<< info.servoInfo.fServoAz << " " << info.servoInfo.fServoPt
<< std::endl;
cv::TickMeter tm;
tm.start();
// 基于外参的快拼
//stitcher->FrontStitch(frame, info);
tm.stop();
cout << "time:" << tm.getTimeMilli() << endl;
}
cv::Mat res;
cv::resize(mat_pan, res, cv::Size(pan.u32Width / 4, pan.u32Height / 4));
imshow("pan_opt", res);
waitKey(1);
i = i + 1;
}
cv::Mat res;
cv::resize(mat_pan, res, cv::Size(pan.u32Width / 4, pan.u32Height / 4));
imshow("pan_opt", res);
waitKey(0);
}
int main(int, char**)
{
//ProcessIR();
ProcessVL("H:/vl_1920_1080_para40_y8/22.video","22");
ProcessVL("H:/vl_1920_1080_para40_y8/20241219152643_1.video", "20241219152643_1");
ProcessVL("H:/vl_1920_1080_para40_y8/20241219152917_4.video", "20241219152917_4");
ProcessVL("H:/vl_1920_1080_para40_y8/20241219153515_10.video", "20241219153515_10");
//ProcessVL("H:/vl_1920_1080_para40_y8/22.video","22");
//ProcessVL("H:/vl_1920_1080_para40_y8/20241219152643_1.video", "20241219152643_1");
//ProcessVL("H:/vl_1920_1080_para40_y8/20241219152917_4.video", "20241219152917_4");
//ProcessVL("H:/vl_1920_1080_para40_y8/20241219153515_10.video", "20241219153515_10");
//
//ProcessVL("H:/vl_1920_1080_para40_y8/55.video", "5");
ProcessFrontVL("H:/vl_1920_1080_para40_y8/22.video");
}

@ -0,0 +1,43 @@
/*********版权所有C2024武汉高德红外股份有限公司***************
* API_FrontStitch.h
*
*
*
*
*
*
*******************************************************************/
#pragma once
#ifdef _WIN32
#define STD_STITCH_API __declspec(dllexport)
#else
#define STD_STITCH_API __attribute__ ((visibility("default")))
#endif
#include "StitchStruct.h"
// 视频帧前视拼接,基于极坐标
class STD_STITCH_API API_FrontStitch
{
public:
virtual ~API_FrontStitch() = default;
// 初始化拼接
virtual FPanInfo Init(FrameInfo info, float AzRange, float PtRange) = 0;
// 几何校正快拼
virtual BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
// 获取全景图
virtual GD_VIDEO_FRAME_S ExportPanAddr() = 0;
public:
static API_FrontStitch* Create(SINT32 nWidth, SINT32 nHeight);
static void Destroy(API_FrontStitch* obj);
};

@ -1,5 +1,5 @@
/*********版权所有C2024武汉高德红外股份有限公司***************
* API_VideoStitch.h
* API_UnderStitch.h
*
*
*
@ -17,13 +17,14 @@
#include "StitchStruct.h"
class STD_STITCH_API API_VideoStitch
// 视频帧下视地理拼接
class STD_STITCH_API API_UnderStitch
{
public:
virtual ~API_VideoStitch() = default;
virtual ~API_UnderStitch() = default;
// 初始化拼接
virtual PanInfo Init(FrameInfo info) = 0;
virtual UPanInfo Init(FrameInfo info) = 0;
// 几何校正快拼
virtual BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
@ -44,6 +45,6 @@ public:
virtual bool ExportGoogleTile(std::string dir, std::string name) = 0;
public:
static API_VideoStitch* Create(SINT32 nWidth, SINT32 nHeight);
static void Destroy(API_VideoStitch* obj);
static API_UnderStitch* Create(SINT32 nWidth, SINT32 nHeight);
static void Destroy(API_UnderStitch* obj);
};

@ -0,0 +1,75 @@
#include "API_FrontStitch.h"
#include "Arith_FrontStitch.h"
#include "Arith_Utils.h"
#include "Arith_CoordModule.h"
#include "Arith_FeaMatch.h"
#include <opencv2/opencv.hpp>
#include <omp.h>
using namespace std;
using namespace cv;
API_FrontStitch* API_FrontStitch::Create(SINT32 nWidth, SINT32 nHeight)
{
return new FrontStitch(nWidth, nHeight);
}
void API_FrontStitch::Destroy(API_FrontStitch* obj)
{
delete obj;
}
FrontStitch::FrontStitch(SINT32 nWidth, SINT32 nHeight)
{
_GeoSolver = new GeoSolver();
}
FrontStitch::~FrontStitch()
{
}
FPanInfo FrontStitch::Init(FrameInfo info, float AzRange, float PtRange)
{
_fAglRes = 0.001;
// 设置当前拼接起始位置
_panPara.center = _GeoSolver->SetOriginPoint(info);
_panPara.range.fAz = AzRange;
_panPara.range.fPt = PtRange;
_panPara.m_pan_width = AzRange / _fAglRes;
_panPara.m_pan_height = PtRange / _fAglRes;
_panImage = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC4);
return _panPara;
}
BYTE8 FrontStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
{
return BYTE8();
}
SINT32 FrontStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
{
return SINT32();
}
SINT32 FrontStitch::ProcessFrame()
{
return SINT32();
}
GD_VIDEO_FRAME_S FrontStitch::ExportPanAddr()
{
GD_VIDEO_FRAME_S pan_out;
pan_out.enPixelFormat = GD_PIXEL_FORMAT_RGB_PACKED;
pan_out.u32Width = _panPara.m_pan_width;
pan_out.u32Height = _panPara.m_pan_height;
pan_out.u64VirAddr[0] = _panImage.data;
return pan_out;
}

@ -0,0 +1,52 @@
/*********版权所有C2025武汉高德红外股份有限公司***************************************
* Arith_FrontStitch.h
*
*
*
* V0.5
* 04046wcw
* 2025/02/17
*
*****************************************************************************************/
#pragma once
#ifndef _FRONTSTITCH_H
#define _FRONTSTITCH_H
#include "Arith_GeoSolver.h"
class FrontStitch:public API_FrontStitch
{
public:
FrontStitch(SINT32 nWidth, SINT32 nHeight);
~FrontStitch();
// 初始化拼接
FPanInfo Init(FrameInfo info, float AzRange, float PtRange);
// 几何校正快拼
BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para);
// 缓存接收帧
SINT32 ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para);
// 处理帧
SINT32 ProcessFrame();
// 获取全景图
GD_VIDEO_FRAME_S ExportPanAddr();
private:
GeoSolver* _GeoSolver;
private:
FPanInfo _panPara;//全景配置
float _fAglRes;//全景角分辨率
cv::Mat _panImage;
};
#endif

@ -1,5 +1,5 @@
#include "Arith_GeoSolver.h"
#include "Arith_VideoStitch.h"
#include "Arith_UnderStitch.h"
#include "Arith_Utils.h"
#include "Arith_CoordModule.h"
#include "Arith_SysStruct.h"
@ -31,19 +31,65 @@ GeoSolver::~GeoSolver()
// return pos_pan;
//}
void GeoSolver::SetOriginPoint(FrameInfo info)
ANGLE32F GeoSolver::SetOriginPoint(FrameInfo info)
{
originPoint = getXYZFromBLH(info.craft.stPos);
origininfo = info;
// 这是一个依靠大地系直角坐标进行极坐标转换的正反算过程可见极坐标转换相对平面的H矩阵转换要复杂很多包含了非线性过程。
#if 0
// test1从像方转拼接点极坐标
ANGLE32F result = { 0 };
cv::Mat H = findHomography(info);
cv::Point2f grdPt = warpPointWithH(H, cv::Point2f(info.nWidth / 2, info.nHeight / 2));
PointXYZ pt = { 0 };
pt.X = grdPt.y;
pt.Y = -info.nEvHeight;
pt.Z = grdPt.x;
// 地面点转极坐标
Pole pole = getPoleFromXYZ(pt);
// test2从拼接点极坐标转像方
pole.distance = 1;
PointXYZ virPt = getXYZFromPole(pole);
//虚拟点投影到地面
float ratio = -info.nEvHeight / virPt.Y;
PointXYZ realPt = { 0 };
realPt.X = virPt.X * ratio;
realPt.Y = virPt.Y * ratio;
realPt.Z = virPt.Z * ratio;
// 转东北地
PointXYZ realPtGeo = { 0 };
realPtGeo.X = realPt.Z;
realPtGeo.Y = realPt.X;
realPtGeo.Z = -realPt.Y;
// 投影回像方
cv::Point2f px = warpPointWithH(H.inv(), cv::Point2f(realPtGeo.X, realPtGeo.Y));
#endif
return result;
}
PointBLH GeoSolver::getBLHFromFrame(cv::Mat H, cv::Point2f pt)
{
cv::Point2f pt_Geo = warpPointWithH(H, pt);
return getBLHFromGeo(pt_Geo);
}
PointBLH GeoSolver::getBLHFromGeo(cv::Point2f ptInGeo)
{
PointXYZ ptNUE = { 0 };
@ -186,6 +232,8 @@ Mat GeoSolver::Mat_TransENG2uv(FrameInfo info)
Proj GeoSolver::AnlayseTform(FrameInfo info)
{
Proj projection;
@ -310,4 +358,17 @@ vector<cv::Point2f> warpRectWithH(cv::Mat H,cv::Size size)
_res.push_back(warpPointWithH(H, cv::Point2f(size.width,size.height)));
_res.push_back(warpPointWithH(H, cv::Point2f(0,size.height)));
return _res;
}
}
PointXYZ getNUEFromENG(PointXYZ p)
{
PointXYZ ptrans = { 0 };
}
PointXYZ getENGFromNUE(PointXYZ p)
{
return PointXYZ();
}

@ -45,8 +45,8 @@ public:
// 测试
cv::Mat findHomography2(FrameInfo info);
// 设置起始拼接点外参
void SetOriginPoint(FrameInfo info);
// 设置起始拼接点外参,返回当前光轴大地指向
ANGLE32F SetOriginPoint(FrameInfo info);
// 根据H计算原始像方的经纬度
PointBLH getBLHFromFrame(cv::Mat H, cv::Point2f ptInFrame);
@ -59,6 +59,8 @@ public:
private:
// 计算当前帧像方-地理坐标系R t反投影关系
Proj AnlayseTform(FrameInfo info);
@ -93,4 +95,8 @@ double computeQuadrilateralIOU(const vector<cv::Point2f>& quad1, const vector<cv
cv::Point2f warpPointWithH(cv::Mat H,cv::Point2f srcPt);
// H映射多边形按照顺时针
vector<cv::Point2f> warpRectWithH(cv::Mat H,cv::Size size);
vector<cv::Point2f> warpRectWithH(cv::Mat H,cv::Size size);
// 坐标系统转换
PointXYZ getNUEFromENG(PointXYZ p);
PointXYZ getENGFromNUE(PointXYZ p);

@ -1,5 +1,5 @@
#include "API_VideoStitch.h"
#include "Arith_VideoStitch.h"
#include "API_UnderStitch.h"
#include "Arith_UnderStitch.h"
#include "Arith_Utils.h"
#include "Arith_CoordModule.h"
#include "Arith_FeaMatch.h"
@ -9,19 +9,19 @@
using namespace std;
using namespace cv;
API_VideoStitch * API_VideoStitch::Create(SINT32 nWidth, SINT32 nHeight)
API_UnderStitch* API_UnderStitch::Create(SINT32 nWidth, SINT32 nHeight)
{
return new VideoStitch(nWidth,nHeight);
return new UnderStitch(nWidth,nHeight);
}
void API_VideoStitch::Destroy(API_VideoStitch * obj)
void API_UnderStitch::Destroy(API_UnderStitch* obj)
{
delete obj;
}
VideoStitch::VideoStitch(SINT32 nWidth, SINT32 nHeight)
UnderStitch::UnderStitch(SINT32 nWidth, SINT32 nHeight)
{
_GeoSolver = new GeoSolver();
@ -38,7 +38,7 @@ VideoStitch::VideoStitch(SINT32 nWidth, SINT32 nHeight)
_totalFrameCnt = 0;
}
VideoStitch::~VideoStitch()
UnderStitch::~UnderStitch()
{
delete _GeoSolver;
delete _cache;
@ -46,7 +46,8 @@ VideoStitch::~VideoStitch()
PanInfo VideoStitch::InitMap(FrameInfo info)
UPanInfo UnderStitch::InitMap(FrameInfo info)
{
// 设置拼接原点
_GeoSolver->SetOriginPoint(info);
@ -59,7 +60,7 @@ PanInfo VideoStitch::InitMap(FrameInfo info)
double gsd = (info.nEvHeight * info.camInfo.fPixelSize) / (info.camInfo.nFocus * 1000);
// 全景图初始化
PanInfo panPara = { 0 };
UPanInfo panPara = { 0 };
panPara.m_pan_width = MIN(info.nWidth * 5,5000);//全景宽
panPara.m_pan_height = MIN(info.nWidth * 5,5000);//全景高
panPara.scale = gsd / 2 ;//比例尺,1m = ?pix
@ -87,7 +88,7 @@ PanInfo VideoStitch::InitMap(FrameInfo info)
PanInfo VideoStitch::Init(FrameInfo info)
UPanInfo UnderStitch::Init(FrameInfo info)
{
_panPara = InitMap(info);
@ -99,7 +100,7 @@ PanInfo VideoStitch::Init(FrameInfo info)
return _panPara;
}
//BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
//BYTE8 UnderStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
//{
// Proj t_Proj = _GeoStitcher->AnlayseTform(para);
//
@ -153,7 +154,7 @@ PanInfo VideoStitch::Init(FrameInfo info)
// return 0;
//}
BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
BYTE8 UnderStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
{
cv::Mat src = getRGBAMatFromGDFrame(img,img.u64VirAddr[0]);
@ -178,7 +179,8 @@ BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
SINT32 VideoStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
SINT32 UnderStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
{
// 构造单帧缓存
auto _t_frame_cache = std::make_shared<FrameCache>();
@ -242,7 +244,7 @@ SINT32 VideoStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
return _totalFrameCnt;
}
SINT32 VideoStitch::ProcessFrame()
SINT32 UnderStitch::ProcessFrame()
{
// 优化所有帧
_BATask->OptFrame(_recvFrameKey, _H_pan);
@ -258,7 +260,7 @@ SINT32 VideoStitch::ProcessFrame()
bool VideoStitch::ExportGeoPng(std::string dir, std::string name)
bool UnderStitch::ExportGeoPng(std::string dir, std::string name)
{
// 计算全景图的坐标范围
auto P1 = getBLHFromPan(cv::Point2f(0, 0), _H_pan);
@ -290,7 +292,7 @@ bool VideoStitch::ExportGeoPng(std::string dir, std::string name)
bool VideoStitch::ExportGoogleTile(std::string dir, std::string name)
bool UnderStitch::ExportGoogleTile(std::string dir, std::string name)
{
// 计算全景图的坐标范围
auto P1 = getBLHFromPan(cv::Point2f(0, 0), _H_pan);
@ -319,7 +321,7 @@ bool VideoStitch::ExportGoogleTile(std::string dir, std::string name)
GD_VIDEO_FRAME_S VideoStitch::ExportPanAddr()
GD_VIDEO_FRAME_S UnderStitch::ExportPanAddr()
{
GD_VIDEO_FRAME_S pan_out;
@ -331,7 +333,7 @@ GD_VIDEO_FRAME_S VideoStitch::ExportPanAddr()
return pan_out;
}
PointBLH VideoStitch::getBLHFromPan(cv::Point2f ptInPan, cv::Mat _H_panPara)
PointBLH UnderStitch::getBLHFromPan(cv::Point2f ptInPan, cv::Mat _H_panPara)
{
cv::Mat H_inv = _H_panPara.inv();
@ -341,7 +343,7 @@ PointBLH VideoStitch::getBLHFromPan(cv::Point2f ptInPan, cv::Mat _H_panPara)
return _GeoSolver->getBLHFromGeo(ptInGeo);
}
cv::Point2f VideoStitch::getPanXYFromBLH(PointBLH ptInBLH, cv::Mat _H_panPara)
cv::Point2f UnderStitch::getPanXYFromBLH(PointBLH ptInBLH, cv::Mat _H_panPara)
{
// 经纬度转局部地理系
cv::Point2f ptGeo = _GeoSolver->getGeoFromBLH(ptInBLH);
@ -350,7 +352,7 @@ cv::Point2f VideoStitch::getPanXYFromBLH(PointBLH ptInBLH, cv::Mat _H_panPara)
return warpPointWithH(_H_panPara, ptGeo);;
}
cv::Mat VideoStitch::getAffineFromGeo2Pan(PanInfo _pan)
cv::Mat UnderStitch::getAffineFromGeo2Pan(UPanInfo _pan)
{
Mat H = (Mat_<double>(3, 3) << _pan.scale, 0, _pan.map_shiftX,
0, -_pan.scale, _pan.m_pan_height + _pan.map_shiftY,

@ -1,4 +1,4 @@
#include "API_VideoStitch.h"
#include "API_UnderStitch.h"
#include "opencv2/opencv.hpp"
#include "Arith_GeoSolver.h"
#include "Arith_FeaMatch.h"
@ -8,14 +8,13 @@
#include "GoogleTile.h"
#include "Arith_BlendMap.h"
class VideoStitch:public API_VideoStitch
class UnderStitch :public API_UnderStitch
{
public:
VideoStitch(SINT32 nWidth, SINT32 nHeight);
~VideoStitch();
PanInfo Init(FrameInfo info);
UnderStitch(SINT32 nWidth, SINT32 nHeight);
~UnderStitch();
UPanInfo Init(FrameInfo info);
BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para);
@ -25,7 +24,6 @@ public:
// 处理帧
SINT32 ProcessFrame();
// 输出地理产品:kml png全景
bool ExportGeoPng(std::string dir, std::string name);
@ -41,8 +39,8 @@ public:
cv::Point2f getPanXYFromBLH(PointBLH ptInBLH, cv::Mat _H_panPara);
private:
cv::Mat getAffineFromGeo2Pan(PanInfo _pan);//计算全景图投影,从地理系到全景地图,统一计算
PanInfo InitMap(FrameInfo info);
cv::Mat getAffineFromGeo2Pan(UPanInfo _pan);//计算全景图投影,从地理系到全景地图,统一计算
UPanInfo InitMap(FrameInfo info);
private:
@ -65,7 +63,7 @@ private:
MapBlend* _BlendTask;// 融合模块
PanInfo _panPara;//全景图配置
UPanInfo _panPara;//全景图配置
cv::Mat _H_pan;//全景图投影矩阵:从地理系到全景地图

@ -97,10 +97,10 @@ void googleTile::ExportTile(cv::Mat _pan, TileInfo panInfo, std::string dir,std:
cv::Mat tile = _pan(tileRect);
// 标准web瓦片尺寸
if (tile.cols != 256 || tile.rows != 256)
{
cv::resize(tile, tile, cv::Size(256, 256));
}
// if (tile.cols != 256 || tile.rows != 256)
// {
// cv::resize(tile, tile, cv::Size(256, 256));
// }
// 生成文件名
std::string tileDir = dir + "/" + std::to_string(zoom);

@ -21,8 +21,8 @@ struct FrameInfo
int nHeight;
};
// 全景图配置
struct PanInfo
// 下视全景图配置
struct UPanInfo
{
int m_pan_width;
int m_pan_height;
@ -32,6 +32,17 @@ struct PanInfo
};
// 前视全景图配置
struct FPanInfo
{
int m_pan_width;
int m_pan_height;
float fAglRes;
ANGLE32F range;
ANGLE32F center;
};
// 匹配关系网BA的边
struct Match_Net
{
@ -42,7 +53,7 @@ struct Match_Net
#define IMG_CACHE_SIZE (1920 * 1080 * 2) //图像缓存尺寸
#define FEA_NUM_MAX 500 // 单帧特征点数量
#define FEA_NUM_MAX 500 // 单帧特征点数量
#define FEA_DES_SIZE 128 // 特征点描述子尺度
// 帧缓存:按照固定大小设计,便于管理
@ -56,4 +67,19 @@ struct FrameCache
cv::KeyPoint _pt[FEA_NUM_MAX];
FLOAT32 _desp[FEA_NUM_MAX * FEA_DES_SIZE];
DOUBLE64 H[9];// H矩阵
};
};
// 前视拼接相关
// 前视极坐标范围
struct PoleArea
{
float left;
float right;
float up;
float bottom;
};

Loading…
Cancel
Save