wangchongwu 5 months ago
parent c5abb82899
commit f3f05bebed

@ -214,15 +214,9 @@ void ProcessIR()
imshow("pan_opt", mat_pan);
waitKey(0);
// 输出谷歌png
stitcher->ExportGeoPng("D:/google_tiles", "IR_pan");
// 输出谷歌tile
stitcher->ExportGoogleTile("D:/google_tiles", "IR_tiles");
}
void ProcessVL()
void ProcessVL(string filePath,string outname)
{
auto stitcher = API_VideoStitch::Create(IMAGE_WIDTH_VL, IMAGE_HEIGHT_VL);
@ -231,14 +225,15 @@ void ProcessVL()
cv::Mat mat_pan;//全景显示
FILE* file = fopen("F:/S729/20241219152643_1.video", "rb");
FILE* file = fopen(filePath.c_str(), "rb");
GaussianRandom gr(0.0, 1, 0.0);
for (size_t i = 0; i < 600; i++)
int i = 0;
while (!feof(file))
{
SINT32 nVLFrameSize = 1.5 * IMAGE_WIDTH_VL * IMAGE_HEIGHT_VL + IMAGE_WIDTH_VL * PARA_IR_LINE;
fread(pFrameVL, 1, nVLFrameSize, file);
@ -296,8 +291,9 @@ void ProcessVL()
}
else
{
if (i % 20 != 0)
if (i % 50 != 0)
{
i = i + 1;
continue;
}
std::cout << info.craft.stPos.B << " " << info.craft.stPos.L << " " << info.craft.stPos.H << " "
@ -319,9 +315,14 @@ void ProcessVL()
}
imshow("pan_opt", mat_pan);
cv::Mat res;
cv::resize(mat_pan, res, cv::Size(pan.u32Width / 4, pan.u32Height / 4));
imshow("pan_opt", res);
waitKey(1);
i = i + 1;
}
cv::TickMeter tm;
tm.start();
// 处理帧
@ -331,20 +332,28 @@ void ProcessVL()
cout << "time opt:" << tm.getTimeMilli() << endl;
imshow("pan_opt", mat_pan);
waitKey(0);
cv::Mat res;
cv::resize(mat_pan, res, cv::Size(pan.u32Width / 4, pan.u32Height / 4));
imshow("pan_opt", res);
waitKey(1);
// 输出谷歌png
stitcher->ExportGeoPng("D:/google_tiles", "VL_pan");
//stitcher->ExportGeoPng("D:/google_tiles", outname);
// 输出谷歌tile
stitcher->ExportGoogleTile("D:/google_tiles", "VL_tiles");
stitcher->ExportGoogleTile("D:/google_tiles", outname);
}
int main(int, char**)
{
//ProcessIR();
ProcessVL();
ProcessVL("H:/vl_1920_1080_para40_y8/22.video","22");
ProcessVL("H:/vl_1920_1080_para40_y8/20241219152643_1.video", "20241219152643_1");
ProcessVL("H:/vl_1920_1080_para40_y8/20241219152917_4.video", "20241219152917_4");
ProcessVL("H:/vl_1920_1080_para40_y8/20241219153515_10.video", "20241219153515_10");
//
//ProcessVL("H:/vl_1920_1080_para40_y8/55.video", "5");
}

@ -13,10 +13,6 @@ endif()
IF(WIN32)
set(OpenCV_DIR "D:/opencv410_vc17")
set(CMAKE_TOOLCHAIN_FILE "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/scripts/buildsystems/vcpkg.cmake")
# set(CMAKE_TOOLCHAIN_FILE "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/scripts/buildsystems/vcpkg.cmake")
# set(Ceres_DIR "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/packages/ceres_x64-windows/share/ceres")
# set(Eigen3_DIR "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/packages/eigen3_x64-windows/share/eigen3")
# set(glog_DIR "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/packages/glog_x64-windows/share/glog")
ELSE(WIN32)
ENDIF(WIN32)

@ -1,15 +1,15 @@
#include "Arith_FusionMap.h"
#include "Arith_BlendMap.h"
#include "Arith_Utils.h"
MapFusion::MapFusion(FileCache<FrameCache>* cache)
MapBlend::MapBlend(FileCache<FrameCache>* cache)
{
_cache = cache;
}
MapFusion::~MapFusion()
MapBlend::~MapBlend()
{
}
void MapFusion::DirectMap(vector<KeyType> frameInd, cv::Mat mapH, cv::Mat pan, cv::Mat pan_mask)
void MapBlend::DirectMap(vector<KeyType> frameInd, cv::Mat mapH, cv::Mat pan, cv::Mat pan_mask)
{
pan.setTo(0);
@ -55,6 +55,6 @@ void MapFusion::DirectMap(vector<KeyType> frameInd, cv::Mat mapH, cv::Mat pan, c
}
void MapFusion::FusionMap(vector<KeyType> frameInd, cv::Mat mapH, cv::Mat pan, cv::Mat pan_mask)
void MapBlend::BlendMap(vector<KeyType> frameInd, cv::Mat mapH, cv::Mat pan, cv::Mat pan_mask)
{
}

@ -1,5 +1,5 @@
/*********版权所有C2025武汉高德红外股份有限公司***************************************
* Arith_FusionMap.h
* Arith_BlendMap.h
*
*
*
@ -10,22 +10,22 @@
*****************************************************************************************/
#pragma once
#ifndef FUSION_MAP_H
#define FUSION_MAP_H
#ifndef Blend_MAP_H
#define Blend_MAP_H
#include "FileCache.h"
class MapFusion
class MapBlend
{
public:
MapFusion(FileCache<FrameCache>* cache);
~MapFusion();
MapBlend(FileCache<FrameCache>* cache);
~MapBlend();
public:
void DirectMap(vector<KeyType> frameInd, cv::Mat mapH, cv::Mat pan, cv::Mat pan_mask); //直接投影
void FusionMap(vector<KeyType> frameInd, cv::Mat mapH, cv::Mat pan, cv::Mat pan_mask);//融合投影
void BlendMap(vector<KeyType> frameInd, cv::Mat mapH, cv::Mat pan, cv::Mat pan_mask);//融合投影
private:

@ -27,11 +27,11 @@ VideoStitch::VideoStitch(SINT32 nWidth, SINT32 nHeight)
_FeaMatcher = new FeatureMatcher(DetectorType::SIFT, MatcherType::FLANN);
_cache = new FileCache<FrameCache>(100,"./cache");
_cache = new FileCache<FrameCache>(50,"./cache");
_BATask = new BA_Task(_cache);
_fusionTask = new MapFusion(_cache);
_BlendTask = new MapBlend(_cache);
_panPara = { 0 };
@ -51,21 +51,28 @@ PanInfo VideoStitch::InitMap(FrameInfo info)
// 设置拼接原点
_GeoSolver->SetOriginPoint(info);
cv::Point2f ct_geo = warpPointWithH(_GeoSolver->findHomography(info), cv::Point2f(info.nWidth / 2, info.nHeight / 2));
cv::Mat H0 = _GeoSolver->findHomography(info);
cv::Point2f ct_geo = warpPointWithH(H0, cv::Point2f(info.nWidth / 2, info.nHeight / 2));// 视场中心的地理坐标
// 中心原始空间分辨率 P/m
double gsd = (info.nEvHeight * info.camInfo.fPixelSize) / (info.camInfo.nFocus * 1000);
// 全景图初始化
PanInfo panPara = { 0 };
panPara.m_pan_width = 1500;//全景宽
panPara.m_pan_height = 1500;//全景高
panPara.scale = 0.6;//比例尺,1m = ?pix
panPara.m_pan_width = MIN(info.nWidth * 5,5000);//全景宽
panPara.m_pan_height = MIN(info.nWidth * 5,5000);//全景高
panPara.scale = gsd / 2 ;//比例尺,1m = ?pix
int zoom = _googleProduct.getZoomLevel(panPara.scale);
// 直接无平移解算
cv::Mat H_0 = getAffineFromGeo2Pan(panPara);
auto cur = warpPointWithH(H_0, ct_geo);
// 计算平移到全景图固定点的平移量,从此处开始拼接
int planX = 500;
int planY = 500;
int planX = panPara.m_pan_width/2;
int planY = panPara.m_pan_height/2;
panPara.map_shiftX = planX - (cur.x);//平移X
panPara.map_shiftY = planY - (cur.y);//平移Y
@ -88,8 +95,6 @@ PanInfo VideoStitch::Init(FrameInfo info)
_panMask = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC1);
//auto p1 = getBLHFromPan(cv::Point2f(7420, 2800), _H_pan);
//auto p2 = getPanXYFromBLH(p1, _H_pan);
return _panPara;
}
@ -166,7 +171,6 @@ BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
cv::Mat warped_mask;
cv::warpPerspective(mask, warped_mask, H, imagetmp.size(), cv::INTER_LINEAR);
imagetmp.copyTo(_panImage, warped_mask);
return 0;
}
@ -240,15 +244,13 @@ SINT32 VideoStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
SINT32 VideoStitch::ProcessFrame()
{
vector<KeyType> vec;
vector<KeyType> vec_opt;
// 优化所有帧
//_BATask->OptFrame(_recvFrameKey, _H_pan);
_BATask->OptFrame(_recvFrameKey, _H_pan);
// 重投影所有帧到全景
_fusionTask->DirectMap(_recvFrameKey, _H_pan, _panImage,_panMask);
_BlendTask->DirectMap(_recvFrameKey, _H_pan, _panImage,_panMask);
return 0;
@ -278,7 +280,7 @@ bool VideoStitch::ExportGeoPng(std::string dir, std::string name)
info.box.west = minL;
info.box.east = maxL;
info.tileName = name;
info.href = name + ".png";
info.href = name + "_pan.png";
// 输出谷歌地图产品
_googleProduct.ExportGeoPng(_panImage, info, dir);
@ -309,9 +311,9 @@ bool VideoStitch::ExportGoogleTile(std::string dir, std::string name)
info.box.south = minB;
info.box.west = minL;
info.box.east = maxL;
info.ind.z = 16;
info.ind.z = _googleProduct.getZoomLevel(_panPara.scale);
_googleProduct.ExportTile(_panImage, info, dir);
_googleProduct.ExportTile(_panImage, info, dir, name);
return 0;
}

@ -6,7 +6,7 @@
#include "Arith_BATask.h"
#include "FileCache.h"
#include "GoogleTile.h"
#include "Arith_FusionMap.h"
#include "Arith_BlendMap.h"
class VideoStitch:public API_VideoStitch
{
@ -16,6 +16,7 @@ public:
PanInfo Init(FrameInfo info);
BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para);
// 接收帧
@ -39,10 +40,12 @@ public:
PointBLH getBLHFromPan(cv::Point2f ptInPan, cv::Mat _H_panPara);
cv::Point2f getPanXYFromBLH(PointBLH ptInBLH, cv::Mat _H_panPara);
private:
cv::Mat getAffineFromGeo2Pan(PanInfo _pan);//计算全景图投影,从地理系到全景地图,统一计算
PanInfo InitMap(FrameInfo info);
private:
PanInfo InitMap(FrameInfo info);
vector<KeyType> _recvFrameKey;// 接收帧总表
@ -59,15 +62,13 @@ private:
BA_Task* _BATask;//BA
MapFusion* _fusionTask;// 融合模块
MapBlend* _BlendTask;// 融合模块
PanInfo _panPara;//全景图配置
cv::Mat _H_pan;//全景图投影矩阵:从地理系到全景地图
cv::Mat getAffineFromGeo2Pan(PanInfo _pan);//计算全景图投影,从地理系到全景地图,统一计算
cv::Mat _panImage;

@ -40,7 +40,7 @@ void googleTile::ExportGeoPng(cv::Mat _pan, TileInfo info, std::string dir)
}
void googleTile::ExportTile(cv::Mat _pan, TileInfo panInfo, std::string dir)
void googleTile::ExportTile(cv::Mat _pan, TileInfo panInfo, std::string dir,std::string fileString)
{
int zoom = panInfo.ind.z;
TileBox panBox = panInfo.box;
@ -96,6 +96,12 @@ void googleTile::ExportTile(cv::Mat _pan, TileInfo panInfo, std::string dir)
cv::Rect tileRect(x1, y1, x2 - x1, y2 - y1);
cv::Mat tile = _pan(tileRect);
// 标准web瓦片尺寸
if (tile.cols != 256 || tile.rows != 256)
{
cv::resize(tile, tile, cv::Size(256, 256));
}
// 生成文件名
std::string tileDir = dir + "/" + std::to_string(zoom);
std::string fileName = tileDir + "/" + std::to_string(j) + "_" + std::to_string(i) + ".png";
@ -122,7 +128,7 @@ void googleTile::ExportTile(cv::Mat _pan, TileInfo panInfo, std::string dir)
// 输出KML
WriteKml(taskTiles,dir + "/" + "tile" + std::to_string(zoom) + ".kml");
WriteKml(taskTiles,dir + "/" + fileString + "_z" + std::to_string(zoom) + ".kml");
}
@ -130,11 +136,11 @@ int googleTile::getZoomLevel(float mp)
{
int nLev = 0;
// 计算单位瓦片的实际覆盖
int meters_cover = TILE_SIZE * mp;
int meters_cover = TILE_SIZE * 1.0/mp;
int earthLen = 40075017;
nLev = std::round(std::log2(earthLen / meters_cover));
nLev = std::floor(std::log2(earthLen / meters_cover));
return nLev;
}

@ -44,7 +44,7 @@ public:
void ExportGeoPng(cv::Mat _pan, TileInfo info, std::string dir);
// 输出tile
void ExportTile(cv::Mat _pan, TileInfo info, std::string dir);
void ExportTile(cv::Mat _pan, TileInfo info, std::string dir, std::string fileString);
// 计算给定分辨率最接近的瓦片等级,mp单位 米/像素
int getZoomLevel(float mp);

Loading…
Cancel
Save