main
wangchongwu 3 months ago
parent b94cc137c7
commit 3b8d55e36f

3
.gitignore vendored

@ -48,4 +48,5 @@ build
install
*.xraw
.vscode
.vs

@ -26,5 +26,14 @@ add_executable(stitch main.cpp "3rdParty/my_ceres_vc17/include/ceres/version.h
target_link_libraries(stitch ${OpenCV_LIBS} ${LIB_STITCH})
add_executable(stitch_DJ "ProcDJ.cpp")
target_link_libraries(stitch_DJ ${OpenCV_LIBS} ${LIB_STITCH})
#
set(EXECUTABLE_OUTPUT_PATH ${CMAKE_SOURCE_DIR}/Bin)

@ -0,0 +1,336 @@
#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <string>
#include <iostream>
#include "API_UnderStitch.h"
#include "API_FrontStitch.h"
#include "PlatformDefine.h"
#include <string.h>
#include "opencv2/opencv.hpp"
#include <random>
struct FrameData {
int frameNumber;
std::string timeRange;
int frameCnt;
std::string timestamp;
float focal_len = 0;
float dzoom_ratio = 0;
double latitude = 0;
double longitude = 0;
float rel_alt = 0;
float abs_alt = 0;
float gb_yaw = 0;
float gb_pitch = 0;
float gb_roll = 0;
// 衍生参数
float real_focal_mm = 0;
float pixel_size_um = 0;
};
float extractValueAfter(const std::string& line, const std::string& key) {
size_t pos = line.find(key);
if (pos == std::string::npos) return 0;
pos += key.length();
size_t end = line.find(' ', pos);
if (end == std::string::npos) end = line.length();
return std::stof(line.substr(pos, end - pos));
}
double extractDoubleAfter(const std::string& line, const std::string& key) {
size_t pos = line.find(key);
if (pos == std::string::npos) return 0;
pos += key.length();
size_t end = line.find(' ', pos);
if (end == std::string::npos) end = line.length();
return std::stod(line.substr(pos, end - pos));
}
// 获取真实焦距和像元尺寸
void inferCameraParams_H20(FrameData& frame, const std::string& filename)
{
const float full_frame_diag = 43.3f;
float sensor_diag = 0.0f;
if (filename.find("_W") != std::string::npos) {
sensor_diag = 9.4f;
frame.pixel_size_um = 1.98f;
}
else if (filename.find("_Z") != std::string::npos) {
sensor_diag = 7.7f;
frame.pixel_size_um = 2.4f;
}
else if (filename.find("_T") != std::string::npos) {
sensor_diag = 10.9f;
frame.pixel_size_um = 12.0f;
}
else {
sensor_diag = 9.4f;
frame.pixel_size_um = 1.98f;
}
frame.real_focal_mm = frame.focal_len * (sensor_diag / full_frame_diag);
}
// 获取真实焦距和像元尺寸
void inferCameraParams_H30(FrameData& frame, const std::string& filename)
{
const float full_frame_diag = 43.3f;
float sensor_diag = 0.0f;
if (filename.find("_W") != std::string::npos) {
frame.real_focal_mm = 6.72;
frame.pixel_size_um = 2.4;
}
else if (filename.find("_Z") != std::string::npos) {
}
else if (filename.find("_T") != std::string::npos) {
frame.real_focal_mm = 24;
frame.pixel_size_um = 12;
}
else {
}
//frame.real_focal_mm = frame.focal_len * (sensor_diag / full_frame_diag);
}
std::vector<FrameData> parseDJISRT(const std::string& filename) {
std::vector<FrameData> frames;
std::ifstream file(filename);
if (!file.is_open()) {
std::cerr << "Error: Could not open file " << filename << std::endl;
return frames;
}
std::string lines[5];
while (std::getline(file, lines[0])) { // Frame number line
// Read next 4 lines
for (int i = 1; i < 5; ++i) {
if (!std::getline(file, lines[i])) break;
}
FrameData frame;
// Line 1: Frame number
frame.frameNumber = std::stoi(lines[0]);
// Line 2: Time range (00:00:00,000 --> 00:00:00,033)
frame.timeRange = lines[1];
// Line 3: FrameCnt and timestamp
std::istringstream iss(lines[2]);
std::string temp;
iss >> temp >> frame.frameCnt >> frame.timestamp;
// Line 4: Metadata (direct field extraction)
const std::string& meta = lines[3];
frame.focal_len = extractValueAfter(meta, "focal_len: ");
frame.dzoom_ratio = extractValueAfter(meta, "dzoom_ratio: ");
frame.latitude = extractDoubleAfter(meta, "latitude: ");
frame.longitude = extractDoubleAfter(meta, "longitude: ");
frame.rel_alt = extractValueAfter(meta, "rel_alt: ");
frame.abs_alt = extractValueAfter(meta, "abs_alt: ");
frame.gb_yaw = extractValueAfter(meta, "gb_yaw: ");
frame.gb_pitch = extractValueAfter(meta, "gb_pitch: ");
frame.gb_roll = extractValueAfter(meta, "gb_roll: ");
inferCameraParams_H30(frame, filename);
frames.push_back(frame);
// Skip the empty line (line 5)
}
file.close();
return frames;
}
void ProcDJVideo(vector<std::string> videoPathList, vector<std::string> srtPathList)
{
auto stitcher = API_UnderStitch::Create();
stitcher->SetOutput("DJI", "D:/google_tiles");
// dsample
int nDownSample = 1;
GD_VIDEO_FRAME_S frame = { 0 };//输入帧
GD_VIDEO_FRAME_S pan = { 0 };//输出全景
cv::Mat mat_pan;//全景显示
cv::VideoCapture cap(videoPathList[0]);
// Get video properties
double fps = cap.get(cv::CAP_PROP_FPS);
int width = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_WIDTH));
int height = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_HEIGHT));
int frame_count = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_COUNT));
std::vector<FrameData> srt_init = parseDJISRT(srtPathList[0]);
FrameInfo info = { 0 };
info.nFrmID = 0;
info.camInfo.nFocus = srt_init[0].real_focal_mm;
info.camInfo.fPixelSize = srt_init[0].pixel_size_um * nDownSample;
info.craft.stAtt.fYaw = srt_init[0].gb_yaw;
info.craft.stAtt.fPitch = srt_init[0].gb_pitch + 90;
info.craft.stAtt.fRoll = srt_init[0].gb_roll;
info.craft.stPos.B = srt_init[0].latitude;
info.craft.stPos.L = srt_init[0].longitude;
info.craft.stPos.H = srt_init[0].abs_alt;
info.nEvHeight = srt_init[0].abs_alt - srt_init[0].rel_alt;
info.servoInfo.fServoAz = 0;
info.servoInfo.fServoPt = 0;
info.nWidth = width / nDownSample;
info.nHeight = height / nDownSample;
// 初始化
stitcher->Init(info);
cap.release();
for (size_t vid = 0; vid < videoPathList.size(); vid++)
{
printf("Proc %s\n", videoPathList[vid].c_str());
cv::VideoCapture cap(videoPathList[vid]);
std::vector<FrameData> srt = parseDJISRT(srtPathList[vid]);
int frmID = 0;
while (true)
{
cv::Mat mat;
// Read a new frame
cap >> mat;
// Check if frame is empty (end of video)
if (mat.empty()) {
std::cout << "End of video\n";
cap.release();
break;
}
frmID++;
Mat mat_ds2;
cv::resize(mat, mat_ds2, cv::Size(width / nDownSample, height / nDownSample));
FrameInfo info = { 0 };
info.nFrmID = frmID;
info.camInfo.nFocus = srt[frmID].real_focal_mm;
info.camInfo.fPixelSize = srt[frmID].pixel_size_um * nDownSample;
info.craft.stAtt.fYaw = srt[frmID].gb_yaw;
info.craft.stAtt.fPitch = srt[frmID].gb_pitch + 90;
info.craft.stAtt.fRoll = srt[frmID].gb_roll;
info.craft.stPos.B = srt[frmID].latitude;
info.craft.stPos.L = srt[frmID].longitude;
info.craft.stPos.H = srt[frmID].abs_alt;
info.nEvHeight = srt[frmID].abs_alt;
info.servoInfo.fServoAz = 0;
info.servoInfo.fServoPt = 0;
info.nWidth = mat_ds2.cols;
info.nHeight = mat_ds2.rows;
frame.enPixelFormat = GD_PIXEL_FORMAT_RGB_PACKED;
frame.u32Width = mat_ds2.cols;
frame.u32Height = mat_ds2.rows;
frame.u64VirAddr[0] = mat_ds2.data;
if (frmID % 100 != 0)
{
continue;
}
std::cout << info.craft.stPos.B << " " << info.craft.stPos.L << " " << info.craft.stPos.H << " "
<< info.craft.stAtt.fYaw << " " << info.craft.stAtt.fPitch << " " << info.craft.stAtt.fRoll << " "
<< info.servoInfo.fServoAz << " " << info.servoInfo.fServoPt
<< std::endl;
cv::TickMeter tm;
tm.start();
// 基于外参的快拼
stitcher->Run(frame, info);
tm.stop();
mat_pan = stitcher->ExportPanMat();
Mat pan_rgb, pan_rgb_ds;
cv::cvtColor(mat_pan, pan_rgb, cv::COLOR_BGRA2BGR);
cv::resize(pan_rgb, pan_rgb_ds, cv::Size(pan_rgb.cols / 8, pan_rgb.rows / 8));
imshow("pan_rgb", pan_rgb_ds);
cv::waitKey(1);
}
cv::TickMeter tm;
tm.start();
// 处理帧
stitcher->OptAndOutCurrPan();
tm.stop();
std::cout << "time opt:" << tm.getTimeMilli() << std::endl;
}
}
using std::string;
int main()
{
vector<std::string> videoPathList;
vector<std::string> srtPathList;
string folder = "F:/DJI_202504181507_016/";
//
videoPathList.push_back(folder + "DJI_20250418152649_0005_W.MP4");
srtPathList.push_back(folder + "DJI_20250418152649_0005_W.srt");
//videoPathList.push_back(folder + "DJI_20250418153043_0006_T.MP4");
//srtPathList.push_back(folder + "DJI_20250418153043_0006_T.srt");
//videoPathList.push_back(folder + "DJI_20250418152649_0005_T.MP4");
//srtPathList.push_back(folder + "DJI_20250418152649_0005_T.srt");
ProcDJVideo(videoPathList, srtPathList);
return 0;
}

@ -0,0 +1,9 @@
import ffmpeg
input_file = "I:/DJI_20250418144441_0002_W.MP4"
output_file = "I:/DJI_20250418144441_0002_W.srt"
ffmpeg.input(input_file).output(output_file).run()

@ -466,7 +466,7 @@ typedef struct tagAirCraftInfo
// 相机信息
typedef struct tagCamInfo
{
int nFocus; //实时焦距值
float nFocus; //实时焦距值
float fPixelSize; //像元尺寸
GLB_VIDEO_TYPE unVideoType; //视频源类型
int dCamx; //像主点偏移x0- 图像中心 不知道就填0

@ -7,7 +7,7 @@ configure_file(${CMAKE_CURRENT_SOURCE_DIR}/src/Version.h.in ${CMAKE_CURRENT_SOUR
add_definitions(-D_SILENCE_EXPERIMENTAL_FILESYSTEM_DEPRECATION_WARNING)
SET(ENABLE_CUDA TRUE)
SET(ENABLE_CUDA FALSE)
IF(ENABLE_CUDA)

@ -38,6 +38,8 @@ public:
virtual GD_VIDEO_FRAME_S ExportPanAddr() = 0;
public:
static API_FrontStitch* Create(std::string cachedir = "./cache");
static void Destroy(API_FrontStitch* obj);

@ -41,6 +41,11 @@ public:
// 获取全景图
virtual GD_VIDEO_FRAME_S ExportPanAddr() = 0;
// 获取全景图mat
virtual cv::Mat ExportPanMat() = 0;
public:
static API_UnderStitch* Create(std::string cachedir = "./cache");
static void Destroy(API_UnderStitch* obj);

@ -3,6 +3,7 @@
#include "ceres/ceres.h"
#include "Arith_GeoSolver.h"
#include "math.h"
#include "Arith_Utils.h"
using namespace ceres;
@ -173,7 +174,7 @@ void BA_Task::OptFrame(vector<KeyType> frameInd,cv::Mat H_map)
cv::KeyPoint keypoint_i = _FeaPtVec[i][mc.queryIdx];
cv::KeyPoint keypoint_j = _FeaPtVec[j][mc.trainIdx];
ceres::LossFunction* loss_function = new ceres::HuberLoss(35);
ceres::LossFunction* loss_function = new ceres::HuberLoss(30);
cv::Mat H1 = _origMatrix[i];
cv::Mat H2 = _origMatrix[j];
@ -275,7 +276,7 @@ int BA_Task::readFrameInfo(vector<KeyType> frameInd)
#ifdef SHOW_MATCH
// 读取图像
cv::Mat img = cv::Mat(_t_frame_cache->_frame_info.u32Height, _t_frame_cache->_frame_info.u32Width,CV_8UC1, _t_frame_cache->_data);
cv::Mat img = getRGBAMatFromGDFrame(_t_frame_cache->_frame_info, _t_frame_cache->_data);
_imgVec.push_back(img.clone());
#endif

@ -67,7 +67,7 @@ void FeatureMatcher::matchFeatures_WithH(vector<KeyPoint> keypoints1, cv::Mat& d
auto warp_pt1 = warpPointWithH(H1, keypoints1[mc.queryIdx].pt);
auto warp_pt2 = warpPointWithH(H2, keypoints2[mc.trainIdx].pt);
if (fabs(warp_pt1.x - warp_pt2.x) + fabs(warp_pt1.y - warp_pt2.y) < 800)
if (fabs(warp_pt1.x - warp_pt2.x) + fabs(warp_pt1.y - warp_pt2.y) < 20)
{
matches.push_back(mc);
}

@ -6,6 +6,18 @@
#include <opencv2/opencv.hpp>
#include <omp.h>
#ifdef _WIN32
#include <direct.h>
#include <io.h>
#define MKDIR(dir) _mkdir(dir)
#define ACCESS _access
#else
#include <sys/stat.h>
#include <unistd.h>
#define MKDIR(dir) mkdir(dir, 0777)
#define ACCESS access
#endif
using namespace std;
using namespace cv;
@ -62,9 +74,9 @@ UPanInfo UnderStitch::InitMap(FrameInfo info)
// 全景图初始化
UPanInfo panPara = { 0 };
panPara.m_pan_width = MIN(info.nWidth * 5,3000);//全景宽
panPara.m_pan_height = MIN(info.nWidth * 5, 3000);//全景高
panPara.scale = gsd;//比例尺,1m = ?pix
panPara.m_pan_width = MIN(info.nWidth * 5,8000);//全景宽
panPara.m_pan_height = MIN(info.nWidth * 5, 8000);//全景高
panPara.scale = 6;//比例尺,1m = ?pix
// 直接无平移解算
@ -72,8 +84,8 @@ UPanInfo UnderStitch::InitMap(FrameInfo info)
auto cur = warpPointWithH(H_0, ct_geo);
// 计算平移到全景图固定点的平移量,从此处开始拼接
int planX = panPara.m_pan_width/2;
int planY = panPara.m_pan_height/2;
int planX = panPara.m_pan_width/2 + 500;
int planY = panPara.m_pan_height - 500;
panPara.map_shiftX = planX - (cur.x);//平移X
@ -130,6 +142,12 @@ void UnderStitch::SetOutput(std::string filename, std::string outdir)
_filename = filename;
_outDir = outdir;
_kmlPath = _outDir + "/" + _filename + ".kml";
// 创建输出目录
if (ACCESS(_outDir.c_str(), 0) != 0)
{
MKDIR(_outDir.c_str());
}
}
void UnderStitch::SetConfig(UPanConfig config)
@ -140,16 +158,8 @@ void UnderStitch::SetConfig(UPanConfig config)
SINT32 UnderStitch::Run(GD_VIDEO_FRAME_S img, FrameInfo para)
{
// 快拼
auto dst = GeoStitch(img, para);
GeoStitch(img, para);
//imshow("dst", dst);
//waitKey(1);
if (_config.bOutGoogleTile)
{
// 实时裁切瓦片输出
CutTileRealTime();
}
if (_config.bUseBA)
{
@ -174,7 +184,7 @@ SINT32 UnderStitch::OptAndOutCurrPan()
if (_config.bUseBA)
{
// 优化所有帧
ProcessFrame(_recvFrameKey);
//ProcessFrame(_recvFrameKey);
}
// 输出当前所有瓦片
@ -250,7 +260,13 @@ SINT32 UnderStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
// 结合全景图投影H
cv::Mat H = _H_pan * H1;
cv::Mat Orth = getOrthImage(img, para);
TileInfo info = { 0 };
cv::Mat Orth_Image = getOrthImage(src, para, info);
// 输出单帧谷歌瓦片
_googleProduct.ExportGeoPng(Orth_Image, info, _outDir);
// 利用H投影当前帧到全景
cv::Mat imagetmp(_panImage.size(), CV_8UC3, cv::Scalar(0, 0, 0));
@ -258,8 +274,6 @@ SINT32 UnderStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
cv::Mat mask = cv::Mat::ones(src.size(), CV_8UC1);
mask.colRange(0, 200).setTo(0);
cv::Mat warped_mask;
cv::warpPerspective(mask, warped_mask, H, imagetmp.size(), cv::INTER_LINEAR);
@ -268,12 +282,11 @@ SINT32 UnderStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
// 覆盖区域掩码更新
_panMask = _panMask | warped_mask;
// 生产快拼瓦片
CutTileRealTime();
return 0;
}
// 废弃,改为输出单帧正射后的瓦片
void UnderStitch::CutTileRealTime()
{
// 在4倍降采样图上计算瓦片覆盖情况
@ -361,6 +374,7 @@ SINT32 UnderStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
cv::Mat src = getRGBAMatFromGDFrame(img, img.u64VirAddr[0]);
_FeaMatcher->extractFeatures(src, keypoints, descriptors);
size_t keyNum = MIN(keypoints.size(), FEA_NUM_MAX);
@ -448,15 +462,13 @@ bool UnderStitch::ExportGoogleTile()
return 0;
}
cv::Mat UnderStitch::getOrthImage(GD_VIDEO_FRAME_S img, FrameInfo para)
cv::Mat UnderStitch::getOrthImage(cv::Mat src, FrameInfo para,TileInfo& tile)
{
cv::Mat H = _GeoSolver->findHomography(para);
cv::Mat src = getRGBAMatFromGDFrame(img, img.u64VirAddr[0]);
cv::Rect2f roi = warpRectWithH_2Rect(H, src.size());
float scale = img.u32Width / MAX(roi.width, roi.height);
float scale = src.cols / MAX(roi.width, roi.height);
float shiftX = -roi.x * scale;
float shiftY = (roi.y + roi.height)* scale;
@ -469,14 +481,32 @@ cv::Mat UnderStitch::getOrthImage(GD_VIDEO_FRAME_S img, FrameInfo para)
cv::Mat dst;
warpPerspective(src, dst, H_proj * H, cv::Size(roi2.width, roi2.height));
//imshow("Orth", dst);
//imshow("src", src);
//waitKey(0);
imwrite("Orth.jpg", dst);
imwrite("src.jpg", src);
// 计算全景图的坐标范围
auto P1 = _GeoSolver->getBLHFromFrame(H,cv::Point2f(0, 0));
auto P2 = _GeoSolver->getBLHFromFrame(H, cv::Point2f(src.cols, 0));
return cv::Mat();
auto P3 = _GeoSolver->getBLHFromFrame(H, cv::Point2f(src.cols, src.rows));
auto P4 = _GeoSolver->getBLHFromFrame(H, cv::Point2f(0, src.rows));
auto minL = min(min(min(P1.L, P2.L), P3.L), P4.L);
auto maxL = max(max(max(P1.L, P2.L), P3.L), P4.L);
auto minB = min(min(min(P1.B, P2.B), P3.B), P4.B);
auto maxB = max(max(max(P1.B, P2.B), P3.B), P4.B);
tile.boxLatLon.north = maxB;
tile.boxLatLon.south = minB;
tile.boxLatLon.west = minL;
tile.boxLatLon.east = maxL;
tile.tileName = _filename + std::to_string(para.nFrmID);
tile.href = _filename + std::to_string(para.nFrmID) + ".png";
return dst;
}
GD_VIDEO_FRAME_S UnderStitch::ExportPanAddr()
@ -491,6 +521,11 @@ GD_VIDEO_FRAME_S UnderStitch::ExportPanAddr()
return pan_out;
}
cv::Mat UnderStitch::ExportPanMat()
{
return _panImage;
}
PointBLH UnderStitch::getBLHFromPan(cv::Point2f ptInPan, cv::Mat _H_panPara)
{
cv::Mat H_inv = _H_panPara.inv();

@ -46,12 +46,16 @@ private:
bool ExportGoogleTile();
// 正射校正
cv::Mat getOrthImage(GD_VIDEO_FRAME_S img, FrameInfo para);
cv::Mat getOrthImage(cv::Mat src, FrameInfo para, TileInfo& tile);
public:
GD_VIDEO_FRAME_S ExportPanAddr();
cv::Mat ExportPanMat();
// 全景图地理信息计算
PointBLH getBLHFromPan(cv::Point2f ptInPan, cv::Mat _H_panPara);
cv::Point2f getPanXYFromBLH(PointBLH ptInBLH, cv::Mat _H_panPara);

@ -49,7 +49,7 @@ struct Match_Net
};
#define IMG_CACHE_SIZE (1920 * 1080 * 2) //图像缓存尺寸
#define IMG_CACHE_SIZE (3840 * 2160 * 3) //图像缓存尺寸
#define FEA_NUM_MAX 500 // 单帧特征点数量
#define FEA_DES_SIZE 128 // 特征点描述子尺度

@ -2,5 +2,5 @@
#pragma once
#include <string>
std::string BUILD_TIME = "BUILD_TIME 2025_03_26-10.45.34";
std::string BUILD_TIME = "BUILD_TIME 2025_04_29-08.56.38";
std::string VERSION = "BUILD_VERSION 1.0.1";

@ -329,7 +329,7 @@ cv::Mat getRGBAMatFromGDFrame(GD_VIDEO_FRAME_S frame, void* pArr)
{
cv::Mat mat(frame.u32Height, frame.u32Width, CV_8UC3, pArr);
cv::cvtColor(mat, dst, cv::COLOR_BGR2BGRA);
return mat.clone();
return dst.clone();
}
return cv::Mat();

Loading…
Cancel
Save