实现谷歌kml产品输出

main
wangchongwu 5 months ago
parent 3e784fb999
commit cda013b9d0

@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.5.0)
cmake_minimum_required(VERSION 3.15.0)
project(stitch VERSION 0.1.0 LANGUAGES C CXX)
set(CMAKE_CXX_STANDARD 17)

@ -149,7 +149,7 @@ void ProcessIR()
info.craft.stPos.H = Paras_IR.Paras_IR.airCraftInfo.H;
info.nEvHeight = Paras_IR.Paras_IR.airCraftInfo.H;
info.nEvHeight = Paras_IR.Paras_IR.airCraftInfo.H - 25;
info.servoInfo.fServoAz = Paras_IR.Paras_IR.servoInfo.fAz;
info.servoInfo.fServoPt = Paras_IR.Paras_IR.servoInfo.fPz + 90;
@ -183,7 +183,7 @@ void ProcessIR()
pan = stitcher->ExportPanAddr();
mat_pan = cv::Mat(pan.u32Height, pan.u32Width, CV_8UC3, pan.u64VirAddr[0]);
mat_pan = cv::Mat(pan.u32Height, pan.u32Width, CV_8UC4, pan.u64VirAddr[0]);
}
else
{
@ -215,6 +215,9 @@ void ProcessIR()
imshow("pan_opt", mat_pan);
waitKey(0);
// 输出谷歌png
stitcher->ExportGeoPng("D:/", "IR_pan");
}
void ProcessVL()
@ -228,7 +231,8 @@ void ProcessVL()
FILE* file = fopen("D:/wangchongwu_gitea_2023/20241219152643_1.video", "rb");
//FILE* file = fopen("H:/21.video", "rb");
GaussianRandom gr(0.0, 1, 0.0);
for (size_t i = 0; i < 600; i++)
@ -253,7 +257,7 @@ void ProcessVL()
info.craft.stPos.H = Paras_VL.Paras_VL.airCraftInfo.H;
info.nEvHeight = Paras_VL.Paras_VL.airCraftInfo.H;
info.nEvHeight = Paras_VL.Paras_VL.airCraftInfo.H - 25;
info.servoInfo.fServoAz = Paras_VL.Paras_VL.servoInfo.fAz;
info.servoInfo.fServoPt = Paras_VL.Paras_VL.servoInfo.fPz + 90;
@ -286,11 +290,11 @@ void ProcessVL()
pan = stitcher->ExportPanAddr();
mat_pan = cv::Mat(pan.u32Height, pan.u32Width, CV_8UC1, pan.u64VirAddr[0]);
mat_pan = cv::Mat(pan.u32Height, pan.u32Width, CV_8UC4, pan.u64VirAddr[0]);
}
else
{
if (i % 10 != 0)
if (i % 20 != 0)
{
continue;
}
@ -299,33 +303,46 @@ void ProcessVL()
<< info.servoInfo.fServoAz << " " << info.servoInfo.fServoPt
<< std::endl;
cv::TickMeter tm;
tm.start();
// 基于外参的快拼
stitcher->GeoStitch(frame, info);
tm.stop();
cout << "time:" << tm.getTimeMilli() << endl;
// 接收帧
auto a = stitcher->ReceiveFrame(frame, info);
}
imshow("VL", IMG);
imshow("pan_opt", mat_pan);
waitKey(1);
}
cv::TickMeter tm;
tm.start();
// 处理帧
stitcher->ProcessFrame();
tm.stop();
cout << "time opt:" << tm.getTimeMilli() << endl;
imshow("pan_opt", mat_pan);
waitKey(0);
// 输出谷歌png
stitcher->ExportGeoPng("D:/", "VL_pan");
// 输出谷歌tile
stitcher->ExportGoogleTile("D:/", "VL_pan");
}
int main(int, char**)
{
ProcessIR();
//ProcessVL();
//ProcessIR();
ProcessVL();
}

@ -12,14 +12,14 @@ endif()
IF(WIN32)
set(OpenCV_DIR "D:/opencv410_vc17")
set(Ceres_DIR "C:/Lib/ceres")
# set(CMAKE_TOOLCHAIN_FILE "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/scripts/buildsystems/vcpkg.cmake")
# set(Ceres_DIR "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/packages/ceres_x64-windows/share/ceres")
# set(Eigen3_DIR "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/packages/eigen3_x64-windows/share/eigen3")
# set(glog_DIR "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/packages/glog_x64-windows/share/glog")
ELSE(WIN32)
ENDIF(WIN32)
find_package(Ceres REQUIRED)
include_directories(${CERES_INCLUDE_DIRS})
@ -58,11 +58,10 @@ target_link_libraries(${LIB_STITCH}
# # # gcc0
# if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
# #
# add_definitions(-fvisibility=hidden)
# #
# add_definitions(-fvisibility=hidden)
# #
# target_compile_options(${LIB_STITCH} PRIVATE -Werror -Wreturn-type)
# #
# target_compile_options(${LIB_STITCH} PRIVATE -Werror -Wreturn-type)
# endif()
set(LIBRARY_OUTPUT_PATH ${CMAKE_SOURCE_DIR}/Bin) #

@ -34,18 +34,14 @@ public:
// 处理帧
virtual SINT32 ProcessFrame() = 0;
// 获取全景图
virtual GD_VIDEO_FRAME_S ExportPanAddr() = 0;
// 输出地理产品:kml png全景
virtual bool ExportGeoPng(std::string dir, std::string name) = 0;
//// 基于特征点拼接,全局光束平差方法,分为更新和优化调整两个接口
//virtual SINT32 BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
//virtual SINT32 BAOpt() = 0;
// 输出地理产品:谷歌标准瓦片
virtual bool ExportGoogleTile(std::string dir, std::string name) = 0;
public:
static API_VideoStitch* Create(SINT32 nWidth, SINT32 nHeight);

@ -1,8 +1,11 @@
#include "Arith_BATask.h"
#define GLOG_USE_GLOG_EXPORT
#include "ceres/ceres.h"
#include "Arith_GeoSolver.h"
#include "math.h"
using namespace ceres;
// 定义残差结构体
struct HomographyResidual
{
@ -85,7 +88,7 @@ BA_Task::BA_Task(FileCache<FrameCache>* cache)
_cache = cache;
_FeaMatcher = new FeatureMatcher(DetectorType::SIFT, MatcherType::FLANN);
google::InitGoogleLogging("ceres");
//google::InitGoogleLogging("ceres");
}
BA_Task::~BA_Task()

@ -38,18 +38,35 @@ void GeoSolver::SetOriginPoint(FrameInfo info)
origininfo = info;
}
PointBLH GeoSolver::getBLH(cv::Mat H, cv::Point2f pt)
PointBLH GeoSolver::getBLHFromFrame(cv::Mat H, cv::Point2f pt)
{
cv::Point2f pt_Geo = warpPointWithH(H, pt);
return getBLHFromGeo(pt_Geo);
}
PointXYZ pt_XYZ = { 0 };
pt_XYZ.X = pt_Geo.x;
pt_XYZ.Y = pt_Geo.y;
pt_XYZ.Z = -origininfo.nEvHeight;
PointBLH res = getBLHFromXYZ(getCGCSXYZFromNUEXYZ(pt_XYZ, originPoint));
PointBLH GeoSolver::getBLHFromGeo(cv::Point2f ptInGeo)
{
PointXYZ ptNUE = { 0 };
// 本模块使用的地理系是东(x)-北(y)-地(z),需要转换一下
ptNUE.X = ptInGeo.y;
ptNUE.Y = -origininfo.nEvHeight;
ptNUE.Z = ptInGeo.x;
PointBLH res = getBLHFromXYZ(getCGCSXYZFromNUEXYZ(ptNUE, originPoint));
return res;
}
cv::Point2f GeoSolver::getGeoFromBLH(PointBLH ptPos)
{
cv::Point2f ptInGeo = { 0 };
PointXYZ ptNUE = { 0 };
ptNUE = getNUEXYZFromCGCSXYZ(getXYZFromBLH(ptPos), originPoint);
// 本模块使用的地理系是东(x)-北(y)-地(z),需要转换一下
ptInGeo.x = ptNUE.Z;
ptInGeo.y = ptNUE.X;
return ptInGeo;
}
cv::Point2f GeoSolver::Trans_uv2Geo(cv::Point2f pos_frame, TForm form)
{
Mat point = (Mat_<double>(3, 1) << pos_frame.x, pos_frame.y, 1);

@ -3,7 +3,7 @@
* Arith_GeoStitcher.h
*
* ,,
*
* 便--使
* V0.5
* 04046wcw
* 2025/01/15
@ -46,11 +46,13 @@ public:
void SetOriginPoint(FrameInfo info);
// 根据H计算原始像方的经纬度
PointBLH getBLH(cv::Mat H, cv::Point2f pt);
PointBLH getBLHFromFrame(cv::Mat H, cv::Point2f ptInFrame);
// 计算局部地理系下目标的经纬度
PointBLH getBLHFromGeo(cv::Point2f ptInGeo);
// 根据NUE地理坐标计算
// 经纬度转换为局部地理系坐
cv::Point2f getGeoFromBLH(PointBLH ptPos);
private:
// 计算当前帧像方-地理坐标系R t反投影关系
@ -72,7 +74,6 @@ private:
private:
PointXYZ originPoint;//成图初始点的地心地固坐标,作为拼接参考
FrameInfo origininfo;

@ -27,7 +27,7 @@ VideoStitch::VideoStitch(SINT32 nWidth, SINT32 nHeight)
_FeaMatcher = new FeatureMatcher(DetectorType::SIFT, MatcherType::FLANN);
_cache = new FileCache<FrameCache>(20,"./cache");
_cache = new FileCache<FrameCache>(100,"./cache");
_BATask = new BA_Task(_cache);
@ -53,17 +53,17 @@ PanInfo VideoStitch::InitMap(FrameInfo info)
// 全景图初始化
PanInfo panPara = { 0 };
panPara.m_pan_width = 1000;//全景宽
panPara.m_pan_height = 1000;//全景高
panPara.scale = 0.5;//比例尺,1m = ?pix
panPara.m_pan_width = 1500;//全景宽
panPara.m_pan_height = 1500;//全景高
panPara.scale = 0.6;//比例尺,1m = ?pix
// 直接无平移解算
cv::Mat H_0 = getAffineFromGeo2Pan(panPara);
auto cur = warpPointWithH(H_0, ct_geo);
// 计算平移到全景图固定点的平移量,从此处开始拼接
int planX = 200;
int planY = 200;
int planX = 500;
int planY = 500;
panPara.map_shiftX = planX - (cur.x);//平移X
panPara.map_shiftY = planY - (cur.y);//平移Y
@ -82,7 +82,12 @@ PanInfo VideoStitch::Init(FrameInfo info)
{
_panPara = InitMap(info);
_panImage = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC3);
_panImage = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC4);
_panMask = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC1);
//auto p1 = getBLHFromPan(cv::Point2f(7420, 2800), _H_pan);
//auto p2 = getPanXYFromBLH(p1, _H_pan);
return _panPara;
}
@ -143,7 +148,7 @@ PanInfo VideoStitch::Init(FrameInfo info)
BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
{
cv::Mat src = getRGBMatFromGDFrame(img,img.u64VirAddr[0]);
cv::Mat src = getRGBAMatFromGDFrame(img,img.u64VirAddr[0]);
// uv2Geo H
cv::Mat H1 = _GeoSolver->findHomography(para);
@ -153,11 +158,11 @@ BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
// 利用H投影当前帧到全景
cv::Mat imagetmp(_panImage.size(), CV_8UC3, cv::Scalar(0, 0, 0));
cv::warpPerspective(src, imagetmp, H, imagetmp.size());
cv::warpPerspective(src, imagetmp, H, imagetmp.size(), cv::INTER_LINEAR, cv::BORDER_TRANSPARENT);
cv::Mat mask = cv::Mat::ones(src.size(), CV_8UC1) * 255;
cv::Mat warped_mask;
cv::warpPerspective(mask, warped_mask, H, imagetmp.size());
cv::warpPerspective(mask, warped_mask, H, imagetmp.size(), cv::INTER_LINEAR);
imagetmp.copyTo(_panImage, warped_mask);
return 0;
@ -200,7 +205,7 @@ SINT32 VideoStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
cv::Mat descriptors;
cv::Mat src = getRGBMatFromGDFrame(img, img.u64VirAddr[0]);
cv::Mat src = getRGBAMatFromGDFrame(img, img.u64VirAddr[0]);
_FeaMatcher->extractFeatures(src, keypoints, descriptors);
size_t keyNum = MIN(keypoints.size(), FEA_NUM_MAX);
@ -238,35 +243,31 @@ SINT32 VideoStitch::ProcessFrame()
vector<KeyType> vec_opt;
// 优化所有帧
_BATask->OptFrame(_recvFrameKey, _H_pan);
//_BATask->OptFrame(_recvFrameKey, _H_pan);
// 重投影所有帧到全景
mapFrame(_recvFrameKey);
// 正反算验证
// auto _t_frame_cache = std::make_shared<FrameCache>();
//bool flag = _cache->get(10, _t_frame_cache);
auto _t_frame_cache = std::make_shared<FrameCache>();
bool flag = _cache->get(10, _t_frame_cache);
// 读取当前H
cv::Mat H1 = cv::Mat(3, 3, CV_64FC1, _t_frame_cache->H);
// 与全景图的H相乘
cv::Mat H = _H_pan * H1;
cv::Point2f p_GEO = CalGeoInfo(warpPointWithH(H, cv::Point2f(0, 0)), _H_pan);
//// 读取当前H
//cv::Mat H1 = cv::Mat(3, 3, CV_64FC1, _t_frame_cache->H);
//// 与全景图的H相乘
//cv::Mat H = _H_pan * H1;
PointBLH BLHpT = _GeoSolver->getBLH(H1, cv::Point2f(640, 512));
//PointBLH res = getBLHFromPan(warpPointWithH(H, cv::Point2f(519, 483)), _H_pan);
//PointBLH BLHpT = _GeoSolver->getBLHFromFrame(H1, cv::Point2f(519, 483));
return 0;
}
void VideoStitch::mapFrame(vector<KeyType> frameInd)
{
_panImage.setTo(255);
_panImage.setTo(0);
// 从文件缓存获取帧
auto _t_frame_cache = std::make_shared<FrameCache>();
@ -290,20 +291,82 @@ void VideoStitch::mapFrame(vector<KeyType> frameInd)
cv::Mat imagetmp(_panImage.size(), CV_8UC3, cv::Scalar(0, 0, 0));
// 获取图像数据
cv::Mat src = getRGBMatFromGDFrame(_t_frame_cache->_frame_info, _t_frame_cache->_data);
cv::Mat src = getRGBAMatFromGDFrame(_t_frame_cache->_frame_info, _t_frame_cache->_data);
cv::warpPerspective(src, imagetmp, H, imagetmp.size());
cv::warpPerspective(src, imagetmp, H, imagetmp.size(), cv::INTER_LINEAR, cv::BORDER_TRANSPARENT);
cv::Mat mask = cv::Mat::ones(src.size(), CV_8UC1) * 255;
cv::Mat mask = cv::Mat::ones(cv::Size(src.cols - 200,src.rows - 200), CV_8UC1) * 255;
cv::Mat warped_mask;
cv::warpPerspective(mask, warped_mask, H, imagetmp.size());
cv::warpPerspective(mask, warped_mask, H, imagetmp.size(), cv::INTER_LINEAR);
imagetmp.copyTo(_panImage, warped_mask);
// 保存遮罩
warped_mask.copyTo(_panMask, warped_mask);
//imshow("mask", _panMask);
//waitKey(1);
}
}
bool VideoStitch::ExportGeoPng(std::string dir, std::string name)
{
// 计算全景图的坐标范围
auto P1 = getBLHFromPan(cv::Point2f(0, 0), _H_pan);
auto P2 = getBLHFromPan(cv::Point2f(_panImage.cols, 0), _H_pan);
auto P3 = getBLHFromPan(cv::Point2f(_panImage.cols, _panImage.rows), _H_pan);
auto P4 = getBLHFromPan(cv::Point2f(0, _panImage.rows), _H_pan);
auto minL = min(min(min(P1.L, P2.L), P3.L), P4.L);
auto maxL = max(max(max(P1.L, P2.L), P3.L), P4.L);
auto minB = min(min(min(P1.B, P2.B), P3.B), P4.B);
auto maxB = max(max(max(P1.B, P2.B), P3.B), P4.B);
TileInfo info = { 0 };
info.north = maxB;
info.south = minB;
info.east = minL;
info.west = maxL;
// 输出谷歌地图产品
_googleProduct.ExportGeoPng(_panImage, info, dir, name);
return 0;
}
bool VideoStitch::ExportGoogleTile(std::string dir, std::string name)
{
// 计算全景图的坐标范围
auto P1 = getBLHFromPan(cv::Point2f(0, 0), _H_pan);
auto P2 = getBLHFromPan(cv::Point2f(_panImage.cols, 0), _H_pan);
auto P3 = getBLHFromPan(cv::Point2f(_panImage.cols, _panImage.rows), _H_pan);
auto P4 = getBLHFromPan(cv::Point2f(0, _panImage.rows), _H_pan);
auto minL = min(min(min(P1.L, P2.L), P3.L), P4.L);
auto maxL = max(max(max(P1.L, P2.L), P3.L), P4.L);
auto minB = min(min(min(P1.B, P2.B), P3.B), P4.B);
auto maxB = max(max(max(P1.B, P2.B), P3.B), P4.B);
TileInfo info = { 0 };
info.north = maxB;
info.south = minB;
info.east = minL;
info.west = maxL;
_googleProduct.ExportTile(_panImage, info, dir, name);
return 0;
}
GD_VIDEO_FRAME_S VideoStitch::ExportPanAddr()
@ -318,14 +381,23 @@ GD_VIDEO_FRAME_S VideoStitch::ExportPanAddr()
return pan_out;
}
cv::Point2f VideoStitch::CalGeoInfo(cv::Point2f ptInPan, cv::Mat _H_panPara)
PointBLH VideoStitch::getBLHFromPan(cv::Point2f ptInPan, cv::Mat _H_panPara)
{
cv::Mat H_inv = _H_panPara.inv();
// 解算到局部地理系
cv::Point2f ptInGeo = warpPointWithH(H_inv,ptInPan);
return _GeoSolver->getBLHFromGeo(ptInGeo);
}
cv::Point2f VideoStitch::getPanXYFromBLH(PointBLH ptInBLH, cv::Mat _H_panPara)
{
// 经纬度转局部地理系
cv::Point2f ptGeo = _GeoSolver->getGeoFromBLH(ptInBLH);
return ptInGeo;
// 投影到全景图
return warpPointWithH(_H_panPara, ptGeo);;
}
cv::Mat VideoStitch::getAffineFromGeo2Pan(PanInfo _pan)

@ -5,6 +5,7 @@
#include "StitchStruct.h"
#include "Arith_BATask.h"
#include "FileCache.h"
#include "GoogleTile.h"
class VideoStitch:public API_VideoStitch
{
@ -25,13 +26,19 @@ public:
// 投影到全景图
void mapFrame(vector<KeyType> frameInd);
// 输出地理产品:kml png全景
bool ExportGeoPng(std::string dir, std::string name);
//
bool ExportGoogleTile(std::string dir, std::string name);
public:
GD_VIDEO_FRAME_S ExportPanAddr();
// 全景图地理信息计算
cv::Point2f CalGeoInfo(cv::Point2f ptInPan, cv::Mat _H_panPara);
PointBLH getBLHFromPan(cv::Point2f ptInPan, cv::Mat _H_panPara);
cv::Point2f getPanXYFromBLH(PointBLH ptInBLH, cv::Mat _H_panPara);
private:
@ -45,6 +52,7 @@ private:
private:
googleTile _googleProduct;
GeoSolver* _GeoSolver;//外参计算
@ -52,15 +60,18 @@ private:
BA_Task* _BATask;//BA
PanInfo _panPara;//全景图配置
cv::Mat _H_pan;//全景图投影矩阵:从地理系到全景地图
cv::Mat getAffineFromGeo2Pan(PanInfo _pan);//计算全景图投影,从地理系到全景地图,统一计算
cv::Mat _panImage;
cv::Mat _panMask; //覆盖区域遮罩
int _totalFrameCnt;//处理帧计数
};

@ -6,14 +6,63 @@
#include <cmath>
#include <filesystem>
googleTile::googleTile(const cv::Mat& img, const std::vector<std::pair<double, double>>& geoCorners, int zoom, const std::string& outputDir)
using std::string;
googleTile::googleTile()
{
}
googleTile::~googleTile()
{
image_ = img.clone();
geoCorners_ = geoCorners;
zoom_ = zoom;
outputDir_ = outputDir;
}
void googleTile::ExportGeoPng(cv::Mat _pan, TileInfo info, std::string dir, std::string name)
{
// 保存全景图
string png_path = dir + "/" + name + ".png";
cv::imwrite(png_path, _pan);
// 写入kml
string kml_path = dir + "/" + name + ".kml";
std::ofstream kml_file(kml_path);
// 写入 KML 文件的头部
kml_file << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" << std::endl;
kml_file << "<kml xmlns=\"http://www.opengis.net/kml/2.2\">" << std::endl;
kml_file << "<Document>" << std::endl;
// 写入 GroundOverlay 元素
kml_file << "<GroundOverlay>" << std::endl;
kml_file << " <name>My Panoramic Image</name>" << std::endl;
kml_file << " <Icon>" << std::endl;
kml_file << " <href>" << name + ".png" << "</href>" << std::endl;
kml_file << " </Icon>" << std::endl;
// 使用四个角点计算 LatLonBox
kml_file << " <LatLonBox>" << std::endl;
kml_file << " <north>" << info.north << "</north>" << std::endl; // 经度范围
kml_file << " <south>" << info.south << "</south>" << std::endl;
kml_file << " <east>" << info.east << "</east>" << std::endl; // 纬度范围
kml_file << " <west>" << info.west << "</west>" << std::endl;
kml_file << " </LatLonBox>" << std::endl;
// 写入 KML 文件的尾部
kml_file << "</GroundOverlay>" << std::endl;
kml_file << "</Document>" << std::endl;
kml_file << "</kml>" << std::endl;
kml_file.close();
}
void googleTile::ExportTile(cv::Mat _pan, TileInfo info, std::string dir, std::string name)
{
}
cv::Point2f googleTile::latLonToGlobal(double lat, double lon)
{
double tileSize = 256.0;
@ -52,103 +101,68 @@ cv::Mat googleTile::computeHomography()
return cv::getPerspectiveTransform(srcPts, dstPts);
}
void googleTile::generateKML(const std::vector<TileInfo>& tiles)
{
std::string kmlPath = outputDir_ + "/tiles.kml";
std::ofstream ofs(kmlPath);
if (!ofs)
{
std::cerr << "无法创建 KML 文件: " << kmlPath << std::endl;
return;
}
ofs << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
ofs << "<kml xmlns=\"http://www.opengis.net/kml/2.2\">\n";
ofs << " <Document>\n";
for (const auto& tile : tiles)
{
ofs << " <GroundOverlay>\n";
ofs << " <name>" << zoom_ << "/" << tile.x << "/" << tile.y << ".png</name>\n";
ofs << " <Icon>\n";
ofs << " <href>" << zoom_ << "/" << tile.x << "/" << tile.y << ".png</href>\n";
ofs << " </Icon>\n";
ofs << " <LatLonBox>\n";
ofs << " <north>" << tile.north << "</north>\n";
ofs << " <south>" << tile.south << "</south>\n";
ofs << " <east>" << tile.east << "</east>\n";
ofs << " <west>" << tile.west << "</west>\n";
ofs << " </LatLonBox>\n";
ofs << " </GroundOverlay>\n";
}
ofs << " </Document>\n";
ofs << "</kml>\n";
ofs.close();
std::cout << "生成 KML 文件:" << kmlPath << std::endl;
}
void googleTile::generateTiles()
{
std::filesystem::create_directories(outputDir_);
cv::Mat H = computeHomography();
std::vector<cv::Point2f> globalCorners;
for (const auto& corner : geoCorners_)
{
globalCorners.push_back(latLonToGlobal(corner.first, corner.second));
}
float minX = globalCorners[0].x, maxX = globalCorners[0].x;
float minY = globalCorners[0].y, maxY = globalCorners[0].y;
for (const auto& p : globalCorners)
{
minX = std::min(minX, p.x);
maxX = std::max(maxX, p.x);
minY = std::min(minY, p.y);
maxY = std::max(maxY, p.y);
}
int tileSize = 256;
int xTileMin = static_cast<int>(std::floor(minX / tileSize));
int xTileMax = static_cast<int>(std::floor(maxX / tileSize));
int yTileMin = static_cast<int>(std::floor(minY / tileSize));
int yTileMax = static_cast<int>(std::floor(maxY / tileSize));
std::vector<TileInfo> tileInfos;
for (int tx = xTileMin; tx <= xTileMax; tx++)
{
for (int ty = yTileMin; ty <= yTileMax; ty++)
{
std::vector<cv::Point2f> tileGlobalPts = {
{cv::Point2f(tx * tileSize, ty * tileSize)},
{cv::Point2f((tx + 1) * tileSize, ty * tileSize)},
{cv::Point2f((tx + 1) * tileSize, (ty + 1) * tileSize)},
{cv::Point2f(tx * tileSize, (ty + 1) * tileSize)} };
std::vector<cv::Point2f> tileImgPts;
cv::perspectiveTransform(tileGlobalPts, tileImgPts, H);
//cv::Mat tileTransform = cv::getPerspectiveTransform(tileImgPts, {
// {0, 0}, {tileSize, 0}, {tileSize, tileSize}, {0, tileSize} });
cv::Mat tileImg;
//cv::warpPerspective(image_, tileImg, tileTransform, cv::Size(tileSize, tileSize));
std::string tileDir = outputDir_ + "/" + std::to_string(zoom_) + "/" + std::to_string(tx);
std::filesystem::create_directories(tileDir);
std::string tilePath = tileDir + "/" + std::to_string(ty) + ".png";
cv::imwrite(tilePath, tileImg);
tileInfos.push_back({ tx, ty, globalToLatLon({cv::Point2f(tx * tileSize, ty * tileSize)}).first,
globalToLatLon({cv::Point2f((tx + 1) * tileSize, (ty + 1) * tileSize)}).first,
globalToLatLon({cv::Point2f((tx + 1) * tileSize, (ty + 1) * tileSize)}).second,
globalToLatLon({cv::Point2f(tx * tileSize, ty * tileSize)}).second });
}
}
generateKML(tileInfos);
//std::filesystem::create_directories(outputDir_);
//cv::Mat H = computeHomography();
//std::vector<cv::Point2f> globalCorners;
//for (const auto& corner : geoCorners_)
//{
// globalCorners.push_back(latLonToGlobal(corner.first, corner.second));
//}
//float minX = globalCorners[0].x, maxX = globalCorners[0].x;
//float minY = globalCorners[0].y, maxY = globalCorners[0].y;
//for (const auto& p : globalCorners)
//{
// minX = std::min(minX, p.x);
// maxX = std::max(maxX, p.x);
// minY = std::min(minY, p.y);
// maxY = std::max(maxY, p.y);
//}
//int tileSize = 256;
//int xTileMin = static_cast<int>(std::floor(minX / tileSize));
//int xTileMax = static_cast<int>(std::floor(maxX / tileSize));
//int yTileMin = static_cast<int>(std::floor(minY / tileSize));
//int yTileMax = static_cast<int>(std::floor(maxY / tileSize));
//std::vector<TileInfo> tileInfos;
//for (int tx = xTileMin; tx <= xTileMax; tx++)
//{
// for (int ty = yTileMin; ty <= yTileMax; ty++)
// {
// std::vector<cv::Point2f> tileGlobalPts = {
// {cv::Point2f(tx * tileSize, ty * tileSize)},
// {cv::Point2f((tx + 1) * tileSize, ty * tileSize)},
// {cv::Point2f((tx + 1) * tileSize, (ty + 1) * tileSize)},
// {cv::Point2f(tx * tileSize, (ty + 1) * tileSize)} };
// std::vector<cv::Point2f> tileImgPts;
// cv::perspectiveTransform(tileGlobalPts, tileImgPts, H);
// //cv::Mat tileTransform = cv::getPerspectiveTransform(tileImgPts, {
// // {0, 0}, {tileSize, 0}, {tileSize, tileSize}, {0, tileSize} });
// cv::Mat tileImg;
// //cv::warpPerspective(image_, tileImg, tileTransform, cv::Size(tileSize, tileSize));
// std::string tileDir = outputDir_ + "/" + std::to_string(zoom_) + "/" + std::to_string(tx);
// std::filesystem::create_directories(tileDir);
// std::string tilePath = tileDir + "/" + std::to_string(ty) + ".png";
// cv::imwrite(tilePath, tileImg);
// tileInfos.push_back({ tx, ty, globalToLatLon({cv::Point2f(tx * tileSize, ty * tileSize)}).first,
// globalToLatLon({cv::Point2f((tx + 1) * tileSize, (ty + 1) * tileSize)}).first,
// globalToLatLon({cv::Point2f((tx + 1) * tileSize, (ty + 1) * tileSize)}).second,
// globalToLatLon({cv::Point2f(tx * tileSize, ty * tileSize)}).second });
// }
//}
//generateKML(tileInfos);
}

@ -5,20 +5,31 @@
#include <string>
#include <vector>
struct TileInfo
{
int x;
int y;
double north;
double south;
double east;
double west;
};
class googleTile
{
public:
struct TileInfo
{
int x;
int y;
double north;
double south;
double east;
double west;
};
googleTile(const cv::Mat& img, const std::vector<std::pair<double, double>>& geoCorners, int zoom, const std::string& outputDir);
googleTile();
~googleTile();
public:
// 输出kml png
void ExportGeoPng(cv::Mat _pan, TileInfo info, std::string dir, std::string name);
// 输出tile
void ExportTile(cv::Mat _pan, TileInfo info, std::string dir, std::string name);
public:
void generateTiles();
@ -26,12 +37,12 @@ private:
cv::Mat image_;
std::vector<std::pair<double, double>> geoCorners_; // 左上、右上、右下、左下
int zoom_;
std::string outputDir_;
cv::Point2f latLonToGlobal(double lat, double lon);
std::pair<double, double> globalToLatLon(const cv::Point2f& pt);
cv::Mat computeHomography();
void generateKML(const std::vector<TileInfo>& tiles);
};
#endif // GOOGLE_TILE_H

@ -303,16 +303,16 @@ std::string GetDynamicLibraryPath()
cv::Mat getRGBMatFromGDFrame(GD_VIDEO_FRAME_S frame, void* pArr)
cv::Mat getRGBAMatFromGDFrame(GD_VIDEO_FRAME_S frame, void* pArr)
{
// 针对拼接目前支持Y8和NV12
cv::Mat dst(frame.u32Height, frame.u32Width, CV_8UC3);
cv::Mat dst(frame.u32Height, frame.u32Width, CV_8UC4);
//y8类型
if (frame.enPixelFormat == GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y8)
{
cv::Mat mat(frame.u32Height, frame.u32Width, CV_8UC1, pArr);
cv::cvtColor(mat, dst, cv::COLOR_GRAY2BGR);
cv::cvtColor(mat, dst, cv::COLOR_GRAY2BGRA);
return dst.clone();
}
@ -320,7 +320,7 @@ cv::Mat getRGBMatFromGDFrame(GD_VIDEO_FRAME_S frame, void* pArr)
if (frame.enPixelFormat == GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_NV12)
{
cv::Mat mat(frame.u32Height * 1.5, frame.u32Width, CV_8UC1, pArr);
cv::cvtColor(mat, dst, cv::COLOR_YUV2BGR_NV12);
cv::cvtColor(mat, dst, cv::COLOR_YUV2BGRA_NV12);
return dst.clone();
}
@ -328,10 +328,10 @@ cv::Mat getRGBMatFromGDFrame(GD_VIDEO_FRAME_S frame, void* pArr)
if (frame.enPixelFormat == GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_RGB_PACKED)
{
cv::Mat mat(frame.u32Height, frame.u32Width, CV_8UC3, pArr);
cv::cvtColor(mat, dst, cv::COLOR_BGR2BGRA);
return mat.clone();
}
return cv::Mat();
}

@ -9,7 +9,7 @@ using cv::Point2d;
using cv::Point2f;
// 将GD帧转为 cv::Mat RGB
cv::Mat getRGBMatFromGDFrame(GD_VIDEO_FRAME_S frame,void* pArr);
cv::Mat getRGBAMatFromGDFrame(GD_VIDEO_FRAME_S frame,void* pArr);
unsigned char FourPointInterpolation(unsigned char* pSrcImage, int iWidth, int iHeight, float x, float y);

Loading…
Cancel
Save