H矩阵解析式更新

main
wangchongwu 2 months ago
parent 9f3474bb60
commit d9491b4f8f

@ -1,27 +0,0 @@
{
"configurations": [
{
"name": "x64-Debug",
"generator": "Ninja",
"configurationType": "Debug",
"inheritEnvironments": [ "msvc_x64_x64" ],
"buildRoot": "${projectDir}\\out\\build\\${name}",
"installRoot": "${projectDir}\\out\\install\\${name}",
"cmakeCommandArgs": "",
"buildCommandArgs": "",
"ctestCommandArgs": ""
},
{
"name": "x64-Release",
"generator": "Ninja",
"configurationType": "RelWithDebInfo",
"buildRoot": "${projectDir}\\out\\build\\${name}",
"installRoot": "${projectDir}\\out\\install\\${name}",
"cmakeCommandArgs": "",
"buildCommandArgs": "",
"ctestCommandArgs": "",
"inheritEnvironments": [ "msvc_x64_x64" ],
"variables": []
}
]
}

@ -68,7 +68,49 @@ private:
}; };
// H矩阵Frobenius范数正则化残差约束整个H矩阵与初始值的差异
// 使用Frobenius范数衡量H矩阵的整体变化
struct HomographyRegularizationResidual
{
HomographyRegularizationResidual(const double* initial_h, double weight = 1.0)
: initial_h_(initial_h), weight_(weight)
{
}
template <typename T>
bool operator()(const T* const h, T* residual) const
{
// 计算H矩阵与初始H矩阵的Frobenius范数差异
// Frobenius范数||H - H0||_F = sqrt(sum((h_i - h_i_0)^2))
// 由于H矩阵最后一个元素固定为1.0所以只计算前8个参数的差异
T diff_sum = T(0);
for (int i = 0; i < 8; ++i)
{
T diff = h[i] - T(initial_h_[i]);
diff_sum += diff * diff;
}
// 添加小的epsilon避免数值不稳定确保diff_sum >= epsilon
const T epsilon = T(1e-10);
diff_sum = diff_sum < epsilon ? epsilon : diff_sum;
// Frobenius范数平方根
T frobenius_norm = sqrt(diff_sum);
// 返回加权后的Frobenius范数作为残差
// 添加检查避免NaN和Inf
T weighted_norm = T(weight_) * frobenius_norm;
// 使用条件表达式避免NaN/Inf传播
residual[0] = (weighted_norm != weighted_norm || weighted_norm > T(1e10)) ? T(0) : weighted_norm;
return true;
}
private:
const double* initial_h_; // 初始H矩阵参数值8个参数
double weight_; // 正则化权重
};
// SE3群优化 // SE3群优化
// SE3群优化残差将左图三角化后投影到右图最小化相对投影误差 // SE3群优化残差将左图三角化后投影到右图最小化相对投影误差

@ -158,6 +158,27 @@ void BA_Task::OptFrame(vector<KeyType> frameInd,cv::Mat H_map, std::unordered_ma
} }
} }
#ifdef OPT_H
// 添加正则化残差块使用Frobenius范数约束整个H矩阵
// 正则化权重:可以根据需要调整,值越大约束越强
// 初始设置较强的约束,后续可以根据效果慢慢放松
double h_reg_weight = 20.0; // H矩阵Frobenius范数正则化权重加强约束
// 为每个帧添加H矩阵正则化
for (int i = 0; i < _currMatrix.size(); i++)
{
// 获取初始H矩阵的8个参数
double* initial_h = (double*)_origMatrix[i].data;
ceres::CostFunction* h_reg_cost =
new ceres::AutoDiffCostFunction<HomographyRegularizationResidual, 1, 8>(
new HomographyRegularizationResidual(initial_h, h_reg_weight));
// 使用HuberLoss使正则化项对异常值更鲁棒
// 可以调整阈值:值越小约束越严格,值越大约束越宽松
ceres::LossFunction* reg_loss = new ceres::HuberLoss(0.2);
problemH.AddResidualBlock(h_reg_cost, reg_loss, h_list[i]);
}
#endif
#ifdef OPT_SE3 #ifdef OPT_SE3
// 添加正则化残差块,约束所有参数不要偏离初始值太多 // 添加正则化残差块,约束所有参数不要偏离初始值太多
// 正则化权重:可以根据需要调整,值越大约束越强 // 正则化权重:可以根据需要调整,值越大约束越强

@ -3,9 +3,8 @@
#include "Arith_Utils.h" #include "Arith_Utils.h"
#include "Arith_CoordModule.h" #include "Arith_CoordModule.h"
#include "Arith_SysStruct.h" #include "Arith_SysStruct.h"
#define GLOG_USE_GLOG_EXPORT //#define GLOG_USE_GLOG_EXPORT
#include "ceres/ceres.h"
#include "ceres/rotation.h"
using namespace cv; using namespace cv;
GeoSolver::GeoSolver() GeoSolver::GeoSolver()
@ -82,36 +81,36 @@ cv::Point2f GeoSolver::getGeoFromBLH(PointBLH ptPos)
return ptInGeo; return ptInGeo;
} }
cv::Point2f GeoSolver::Trans_uv2Geo(cv::Point2f pos_frame, TForm form) // cv::Point2f GeoSolver::Trans_uv2Geo(cv::Point2f pos_frame, TForm form)
{ // {
Mat point = (Mat_<double>(3, 1) << pos_frame.x, pos_frame.y, 1); // Mat point = (Mat_<double>(3, 1) << pos_frame.x, pos_frame.y, 1);
Mat result = form.R * point; // Mat result = form.R * point;
// 转局部地理系 // // 转局部地理系
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0); // double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0); // double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
// 平移到原点地理系 // // 平移到原点地理系
warpedX += form.T.at<double>(0, 0); // warpedX += form.T.at<double>(0, 0);
warpedY += form.T.at<double>(1, 0); // warpedY += form.T.at<double>(1, 0);
return cv::Point2f(warpedX, warpedY); // return cv::Point2f(warpedX, warpedY);
} // }
cv::Point2f GeoSolver::Trans_Geo2uv(cv::Point2f pos_geo, TForm form_inv) // cv::Point2f GeoSolver::Trans_Geo2uv(cv::Point2f pos_geo, TForm form_inv)
{ // {
// 先平移到当前相机位置 // // 先平移到当前相机位置
cv::Point2f pos_cam = pos_geo; // cv::Point2f pos_cam = pos_geo;
pos_cam.x = pos_geo.x + form_inv.T.at<double>(0, 0); // pos_cam.x = pos_geo.x + form_inv.T.at<double>(0, 0);
pos_cam.y = pos_geo.y + form_inv.T.at<double>(1, 0); // pos_cam.y = pos_geo.y + form_inv.T.at<double>(1, 0);
Mat point = (Mat_<double>(3, 1) << pos_cam.x, pos_cam.y, 1); // Mat point = (Mat_<double>(3, 1) << pos_cam.x, pos_cam.y, 1);
Mat result = form_inv.R * point; // Mat result = form_inv.R * point;
// 转像方 // // 转像方
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0); // double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0); // double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
return cv::Point2f(warpedX, warpedY); // return cv::Point2f(warpedX, warpedY);
} // }
Mat GeoSolver::Mat_TransENGMove(FrameInfo info) Mat GeoSolver::Mat_TransENGMove(FrameInfo info)
@ -180,6 +179,8 @@ cv::Mat GeoSolver::Mat_TransENG2Cam(FrameInfo info)
); );
float alaph = info.servoInfo.fServoPt; float alaph = info.servoInfo.fServoPt;
Mat M_alaph = (Mat_<double>(3, 3) << 1, 0, 0, Mat M_alaph = (Mat_<double>(3, 3) << 1, 0, 0,
@ -223,21 +224,43 @@ cv::Mat TransRtKToH(RtK rtk, double Depth)
0, 0, Depth 0, 0, Depth
); );
cv::Mat R_p2g = (rtk.K * rtk.R * M_het).inv(); // cv::Mat R_p2g = (rtk.K * rtk.R * M_het).inv();
//cv::Mat R_p2g = R_g2p.inv(); // //cv::Mat R_p2g = R_g2p.inv();
cv::Mat t_MAT = rtk.T; // cv::Mat t_MAT = rtk.T;
// double R[9] = { 0 };
// double T[3] = { 0 };
// memcpy(R, R_p2g.data, sizeof(double) * 9);
// memcpy(T, t_MAT.data, sizeof(double) * 3);
// Mat H = (Mat_<double>(3, 3) << (R[0] + T[0] * R[6]), (R[1] + T[0] * R[7]), (R[2] + T[0] * R[8]),
// (R[3] + T[1] * R[6]), (R[4] + T[1] * R[7]), (R[5] + T[1] * R[8]),
// R[6], R[7], R[8]);
// // 归一化
// H = H / H.at<double>(2, 2);
// 地面法向量 n_w向上方向Z轴正方向在地理坐标系中
cv::Mat n_w = (cv::Mat_<double>(3, 1) << 0, 0, 1);
// 平面到相机的距离(有效高度)
double d = Depth;
cv::Mat I = cv::Mat::eye(3, 3, CV_64F);
// 计算 (T_geo * n_w^T) / d
cv::Mat TnT = (rtk.R * rtk.T *n_w.t()) / d;
double R[9] = { 0 };
double T[3] = { 0 };
memcpy(R, R_p2g.data, sizeof(double) * 9);
memcpy(T, t_MAT.data, sizeof(double) * 3);
Mat H = (Mat_<double>(3, 3) << (R[0] + T[0] * R[6]), (R[1] + T[0] * R[7]), (R[2] + T[0] * R[8]), // 计算从地理坐标到像素坐标的单应性矩阵
(R[3] + T[1] * R[6]), (R[4] + T[1] * R[7]), (R[5] + T[1] * R[8]), cv::Mat H_geo_to_pixel = rtk.K * (rtk.R - TnT) * M_het;
R[6], R[7], R[8]);
// 归一化 cv::Mat H = H_geo_to_pixel.inv();
// 归一化(使 H[2,2] = 1
H = H / H.at<double>(2, 2); H = H / H.at<double>(2, 2);
return H.clone(); return H.clone();
@ -264,83 +287,69 @@ cv::Mat Transe3ToH(double* se3, cv::Mat K, double Depth)
} }
// 检验H的计算
cv::Mat GeoSolver::findHomography2(FrameInfo info)
{
TForm tf_p2g;
tf_p2g.R = Mat_TransENG2uv(info).inv();
tf_p2g.T = Mat_TransENGMove(info);
std::vector<cv::Point2f> srcPoints;
srcPoints.push_back(cv::Point2f(0, 0));
srcPoints.push_back(cv::Point2f(1000, 0));
srcPoints.push_back(cv::Point2f(1000, 1000));
srcPoints.push_back(cv::Point2f(0, 1000));
// 同名点计算,从像方到全景
cv::Point2f leftTop_map = Trans_uv2Geo(srcPoints[0], tf_p2g);
cv::Point2f rightTop_map = Trans_uv2Geo(srcPoints[1], tf_p2g);
cv::Point2f rightBottom_map = Trans_uv2Geo(srcPoints[2], tf_p2g);
cv::Point2f leftBottom_map = Trans_uv2Geo(srcPoints[3], tf_p2g);
// 目标图像(全景图)的四个顶点坐标
std::vector<cv::Point2f> dstPoints;
dstPoints.push_back(leftTop_map); // 左
dstPoints.push_back(rightTop_map); // 右上
dstPoints.push_back(rightBottom_map); // 右下
dstPoints.push_back(leftBottom_map); // 左下
// 计算单应性矩阵 H
cv::Mat H = cv::findHomography(srcPoints, dstPoints);
return H;
}
// 根据R和t解析H // 根据R和t解析H
cv::Mat GeoSolver::findHomography(FrameInfo info) // cv::Mat GeoSolver::findHomography(FrameInfo info)
{ // {
TForm tf_p2g; // cv::Mat Rmat = Mat_TransENG2uv(info).inv();
tf_p2g.R = Mat_TransENG2uv(info).inv(); // cv::Mat Tmat = Mat_TransENGMove(info);
tf_p2g.T = Mat_TransENGMove(info);
double R[9] = { 0 }; // double R[9] = { 0 };
double T[3] = { 0 }; // double T[3] = { 0 };
memcpy(R, tf_p2g.R.data, sizeof(double) * 9); // memcpy(R, Rmat.data, sizeof(double) * 9);
memcpy(T, tf_p2g.T.data, sizeof(double) * 3); // memcpy(T, Tmat.data, sizeof(double) * 3);
Mat H = (Mat_<double>(3, 3) << (R[0] + T[0] * R[6]), (R[1] + T[0] * R[7]), (R[2] + T[0] * R[8]), // Mat H = (Mat_<double>(3, 3) << (R[0] + T[0] * R[6]), (R[1] + T[0] * R[7]), (R[2] + T[0] * R[8]),
(R[3] + T[1] * R[6]), (R[4] + T[1] * R[7]), (R[5] + T[1] * R[8]), // (R[3] + T[1] * R[6]), (R[4] + T[1] * R[7]), (R[5] + T[1] * R[8]),
R[6], R[7], R[8]); // R[6], R[7], R[8]);
// 归一化 // // 归一化
H = H / H.at<double>(2, 2); // H = H / H.at<double>(2, 2);
return H; // return H;
} // }
cv::Mat GeoSolver::findHomography_GPT(FrameInfo info) cv::Mat GeoSolver::findHomography(FrameInfo info)
{ {
// world: Z=0 plane // 获取相机内参矩阵 K
cv::Mat R = Mat_TransENG2uv(info).inv(); // 3x3 cv::Mat Kmat = Mat_GetCamK(info);
cv::Mat t = Mat_TransENGMove(info); // 3x1
// ground plane normal in world coord // 获取从地理坐标系到相机坐标系的旋转矩阵 R_geo_to_cam
cv::Mat n_w = (cv::Mat_<double>(3,1) << 0, 0, 1); cv::Mat R_geo_to_cam = Mat_TransENG2Cam(info);
// normal in camera coord // 获取平移向量 T从原点地理坐标系到当前相机位置地理坐标系
cv::Mat n = R * n_w; // 3x1 cv::Mat T_geo = Mat_TransENGMove(info);
// distance to ground plane // 深度矩阵
Mat M_het = (Mat_<double>(3, 3) << 1, 0, 0,
0, 1, 0,
0, 0, info.nEvHeight
);
// 地面法向量 n_w向上方向Z轴正方向在地理坐标系中
cv::Mat n_w = (cv::Mat_<double>(3, 1) << 0, 0, 1);
// 平面到相机的距离(有效高度)
double d = info.nEvHeight; double d = info.nEvHeight;
cv::Mat I = cv::Mat::eye(3, 3, CV_64F);
cv::Mat H = R - (t * n.t()) / d; // 计算 (T_geo * n_w^T) / d
cv::Mat TnT = (R_geo_to_cam * T_geo *n_w.t()) / d;
H /= H.at<double>(2,2);
return H; // 计算从地理坐标到像素坐标的单应性矩阵
cv::Mat H_geo_to_pixel = Kmat * (R_geo_to_cam - TnT) * M_het;
cv::Mat H = H_geo_to_pixel.inv();
// 归一化(使 H[2,2] = 1
H = H / H.at<double>(2, 2);
return H.clone();
} }

@ -12,12 +12,6 @@
#pragma once #pragma once
#include "StitchStruct.h" #include "StitchStruct.h"
// 像方-物方转换关系
struct TForm
{
cv::Mat R;
cv::Mat T;
};
// 李群+K // 李群+K
@ -35,16 +29,12 @@ public:
GeoSolver(); GeoSolver();
~GeoSolver(); ~GeoSolver();
// 建立像方与归一化地理系的H矩阵解析出H // 建立像方与归一化地理系的H矩阵解析出H (H用于从像方投影到地理定位矩阵)
cv::Mat findHomography(FrameInfo info); cv::Mat findHomography(FrameInfo info);
// 计算标准李群,并返回H矩阵 // 计算标准李群,并返回H矩阵
cv::Mat findHomography(FrameInfo info, RtK& se3Para); cv::Mat findHomography(FrameInfo info, RtK& se3Para);
// 测试
cv::Mat findHomography2(FrameInfo info);
cv::Mat findHomography_GPT(FrameInfo info);
// 设置起始拼接点外参,返回当前光轴大地指向 // 设置起始拼接点外参,返回当前光轴大地指向
void SetOriginPoint(FrameInfo info); void SetOriginPoint(FrameInfo info);
@ -69,24 +59,24 @@ public:
// 计算当前帧李群SE3和投影K // 计算当前帧李群SE3和投影K
RtK AnlayseRtK(FrameInfo info); RtK AnlayseRtK(FrameInfo info);
// 帧像方-地理坐标 // // 帧像方-地理坐标
cv::Point2f Trans_uv2Geo(cv::Point2f pos_frame, TForm form); // cv::Point2f Trans_uv2Geo(cv::Point2f pos_frame, TForm form);
// 地理坐标-帧像方 // // 地理坐标-帧像方
cv::Point2f Trans_Geo2uv(cv::Point2f pos_geo, TForm form); // cv::Point2f Trans_Geo2uv(cv::Point2f pos_geo, TForm form);
// 机体ENG(东北地)到像方的 旋转矩阵 // 机体ENG(东北地)到像方的 旋转矩阵
cv::Mat Mat_TransENG2uv(FrameInfo info); cv::Mat Mat_TransENG2uv(FrameInfo info);
public: public:
// 平移矩阵,以初始化点为基准,计算当前位置在初始点的地理坐标,那么当前帧所有点的坐标做此平移 // 平移矩阵t,以初始化点为基准,计算当前位置在初始点的地理坐标,那么当前帧所有点的坐标做此平移
cv::Mat Mat_TransENGMove(FrameInfo info); cv::Mat Mat_TransENGMove(FrameInfo info);
// 机体ENG(东北地)到相机系 旋转矩阵 // 机体ENG(东北地)到相机系 旋转矩阵R
cv::Mat Mat_TransENG2Cam(FrameInfo info); cv::Mat Mat_TransENG2Cam(FrameInfo info);
// 内参 // 内参 K
cv::Mat Mat_GetCamK(FrameInfo info); cv::Mat Mat_GetCamK(FrameInfo info);
PointXYZ originCGCSPoint;//成图初始点的地心地固坐标,作为拼接参考 PointXYZ originCGCSPoint;//成图初始点的地心地固坐标,作为拼接参考

@ -377,8 +377,6 @@ void UnderStitch::run_test(FrameInfo para)
RtK rtk; RtK rtk;
cv::Mat H2 = _GeoSolver->findHomography(para, rtk); cv::Mat H2 = _GeoSolver->findHomography(para, rtk);
cv::Vec3d rvec; cv::Vec3d rvec;
cv::Rodrigues(rtk.R, rvec); cv::Rodrigues(rtk.R, rvec);
@ -538,7 +536,7 @@ SINT32 UnderStitch::Run(GD_VIDEO_FRAME_S frame, FrameInfo para)
para.craft.stPos.B, para.craft.stPos.L, para.craft.stPos.H, para.craft.stAtt.fYaw, para.craft.stAtt.fPitch, para.craft.stAtt.fRoll, para.craft.stPos.B, para.craft.stPos.L, para.craft.stPos.H, para.craft.stAtt.fYaw, para.craft.stAtt.fPitch, para.craft.stAtt.fRoll,
para.servoInfo.fServoAz, para.servoInfo.fServoPt,para.camInfo.nFocus,para.camInfo.fPixelSize); para.servoInfo.fServoAz, para.servoInfo.fServoPt,para.camInfo.nFocus,para.camInfo.fPixelSize);
//run_test(para); run_test(para);
// 统计扫描方向 // 统计扫描方向
int dir = StatScanDir(para); int dir = StatScanDir(para);
@ -697,15 +695,15 @@ SINT32 UnderStitch::Run(GD_VIDEO_FRAME_S frame, FrameInfo para)
if (_config.bUseBA && TriggerBA(para)) if (_config.bUseBA && TriggerBA(para))
{ {
// 异步执行BA优化 // // 异步执行BA优化
_pThreadPool->commit( // _pThreadPool->commit(
[this]() { // [this]() {
OptAndOutCurrPan(_baWindowKey); // OptAndOutCurrPan(_baWindowKey);
}, // },
"BA_Optimization" // "BA_Optimization"
); // );
OptAndOutCurrPan(_baWindowKey);
} }
@ -1079,7 +1077,7 @@ SINT32 UnderStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
// 保持滑动窗口大小 // 保持滑动窗口大小
const size_t WINDOW_SIZE = 10; // 滑动窗口大小 const size_t WINDOW_SIZE = 6; // 滑动窗口大小
if (_baWindowKey.size() > WINDOW_SIZE) if (_baWindowKey.size() > WINDOW_SIZE)
{ {
_baWindowKey.erase(_baWindowKey.begin()); _baWindowKey.erase(_baWindowKey.begin());

@ -14,7 +14,7 @@
// 定义扫描模式,使用扫描专用的拼接地图策略 // 定义扫描模式,使用扫描专用的拼接地图策略
//#define SCAN_MODE #define SCAN_MODE
// 关键帧间邻接信息 // 关键帧间邻接信息

@ -2,5 +2,5 @@
#pragma once #pragma once
#include <string> #include <string>
std::string BUILD_TIME = "BUILD_TIME 2025_12_07-12.02.16"; std::string BUILD_TIME = "BUILD_TIME 2025_12_12-09.36.33";
std::string VERSION = "BUILD_VERSION 1.0.1"; std::string VERSION = "BUILD_VERSION 1.0.1";

@ -151,8 +151,8 @@ public:
else else
{ {
UPanConfig config = {0}; UPanConfig config = {0};
config.bUseBA = 1; config.bUseBA = 0;
config.bOutGoogleTile = 1; config.bOutGoogleTile = 0;
config.bOutLocalPanGeoTIFF = 1; config.bOutLocalPanGeoTIFF = 1;
pUnderStitch->SetConfig(config); pUnderStitch->SetConfig(config);
@ -179,14 +179,14 @@ public:
cv::imshow("",src); cv::imshow("",src);
cv::waitKey(1); cv::waitKey(1);
if (info.nFrmID % 100 == 0) // if (info.nFrmID % 100 == 0)
{ // {
char filename[1024] = {0}; // char filename[1024] = {0};
sprintf(filename, "%d_%02d_%02d_%02d_%04d.tif", aTelem.mSensorID, aTelem.mHour, aTelem.mMinute, aTelem.mSecond, aTelem.mMillSecond); // sprintf(filename, "%d_%02d_%02d_%02d_%04d.tif", aTelem.mSensorID, aTelem.mHour, aTelem.mMinute, aTelem.mSecond, aTelem.mMillSecond);
exportFrameGeoTIFF(src, info, filename); // exportFrameGeoTIFF(src, info, filename);
} // }
} }

@ -6,7 +6,9 @@
int main(int, char**) int main(int, char**)
{ {
std::string file = std::string("F:/20251105140846_chn_10_25Hz.ts"); //std::string file = std::string("F:/20251105140846_chn_10_25Hz.ts");
std::string file = std::string("/media/wang/data/20251105140846_chn_10_25Hz.ts");
TsDecoder* dec = new TsDecoder(file); TsDecoder* dec = new TsDecoder(file);
dec->decoder(); dec->decoder();

@ -1,78 +0,0 @@
import os
import sys
import ctypes
from pathlib import Path
import numpy as np
# 获取项目根目录(当前文件所在目录的父目录)
project_root = Path(__file__).parent.parent.parent
bin_dir = project_root / "Bin"
# 添加Bin目录到Python模块搜索路径
if str(bin_dir) not in sys.path:
sys.path.insert(0, str(bin_dir))
# 预加载依赖库,确保动态链接器能找到它们
lib_guide_stitch = bin_dir / "libGuideStitch.so"
if lib_guide_stitch.exists():
try:
ctypes.CDLL(str(lib_guide_stitch), mode=ctypes.RTLD_GLOBAL)
except Exception as e:
print(f"警告: 预加载libGuideStitch.so失败: {e}")
# 导入模块
from UStitcher import API_UnderStitch, FrameInfo, getHomography
import cv2
frame_info = FrameInfo()
frame_info.nFrmID = 1
frame_info.craft.stPos.B = 39
frame_info.craft.stPos.L = 120
frame_info.craft.stPos.H = 1000
frame_info.camInfo.nFocus = 40
frame_info.camInfo.fPixelSize = 12
frame_info.servoInfo.fServoAz = 90
frame_info.servoInfo.fServoPt = -45
frame_info.nEvHeight = 1200
frame_info.nWidth = 1280
frame_info.nHeight = 1024
stitcher = API_UnderStitch.Create()
# 先初始化(设置原点)
pan_info = stitcher.Init(frame_info)
print(f"初始化成功,全景图尺寸: {pan_info.m_pan_width} x {pan_info.m_pan_height}")
def warpPointWithH(H, pt):
wp = H @ np.array([pt[0],pt[1],1]).T
return wp / wp[2]
for i in range(100):
frame_info.nFrmID = i
frame_info.craft.stPos.B = 39
frame_info.craft.stPos.L = 120
frame_info.craft.stPos.H = 1000
frame_info.camInfo.nFocus = 40
frame_info.camInfo.fPixelSize = 12
frame_info.servoInfo.fServoAz = 90
frame_info.servoInfo.fServoPt = -45
H = getHomography(frame_info)
print(f"单应性矩阵 H:\n{H}")
wp = warpPointWithH(H, np.array([100,111]))
print(f"物方坐标:\n{wp}")
print("done")

@ -8,10 +8,19 @@ import numpy as np
from dataclasses import dataclass from dataclasses import dataclass
from typing import List, Tuple from typing import List, Tuple
import os
sys.path.append("/media/wang/WORK/wangchongwu_gitea_2023/StitchVideo/Bin") sys.path.append("../../Bin/Release")
# CUDA_BIN_DIR = r"C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v12.4\bin"
# CERES_DEP_DIR = r"D:\wangchongwu_gitea_2023\StitchVideo\3rdParty\ceres_2.2\bin"
# OPEN_CV_DEP_DIR = r"C:\Lib\opencv455\build\x64\vc14\bin"
# os.add_dll_directory(CUDA_BIN_DIR)
# os.add_dll_directory(CERES_DEP_DIR)
# os.add_dll_directory(OPEN_CV_DEP_DIR)
# os.add_dll_directory("D:/wangchongwu_gitea_2023/StitchVideo/Bin/Release")
#os.add_dll_directory("../../Bin/Release")
# 导入模块 # 导入模块
from UStitcher import API_UnderStitch, FrameInfo, UPanInfo, UPanConfig from UStitcher import API_UnderStitch, FrameInfo, UPanInfo, UPanConfig
@ -411,7 +420,7 @@ def main():
srt_path_list = [] srt_path_list = []
# 修改为你的数据路径 # 修改为你的数据路径
folder = "/media/wang/data/K2D_data/" folder = "F:/K2D_data/"
video_path_list.append(folder + "DJI_20250418153043_0006_W.MP4") video_path_list.append(folder + "DJI_20250418153043_0006_W.MP4")
srt_path_list.append(folder + "DJI_20250418153043_0006_W.srt") srt_path_list.append(folder + "DJI_20250418153043_0006_W.srt")

@ -15,6 +15,10 @@ import time
from dataclasses import dataclass from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from typing import Iterable, List, Optional, Tuple from typing import Iterable, List, Optional, Tuple
import pprint
pprint.pprint(sys.path)
import numpy as np import numpy as np
@ -23,17 +27,31 @@ import numpy as np
# Environment bootstrap (reuse logic from tests/python/1.py) # Environment bootstrap (reuse logic from tests/python/1.py)
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
PROJECT_ROOT = Path(__file__).resolve().parents[2] PROJECT_ROOT = Path(__file__).resolve().parents[2]
BIN_DIR = PROJECT_ROOT / "Bin" # BIN_DIR = PROJECT_ROOT / "Bin/Release"
# if str(BIN_DIR) not in sys.path:
# sys.path.insert(0, str(BIN_DIR))
# LIB_GUIDE = BIN_DIR / "GuideStitch.dll"
# if LIB_GUIDE.exists():
# try:
# ctypes.CDLL(str(LIB_GUIDE), mode=ctypes.RTLD_GLOBAL)
# except OSError as exc:
# print(f"[WARN] Failed to preload {LIB_GUIDE.name}: {exc}")
import os
sys.path.append("D:/wangchongwu_gitea_2023/StitchVideo/Bin/Release")
CUDA_BIN_DIR = r"C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v12.4\bin"
CERES_DEP_DIR = r"D:\wangchongwu_gitea_2023\StitchVideo\3rdParty\ceres_2.2\bin"
OPEN_CV_DEP_DIR = r"C:\Lib\opencv455\build\x64\vc14\bin"
os.add_dll_directory(CUDA_BIN_DIR)
os.add_dll_directory(CERES_DEP_DIR)
os.add_dll_directory(OPEN_CV_DEP_DIR)
os.add_dll_directory("D:/wangchongwu_gitea_2023/StitchVideo/Bin/Release")
if str(BIN_DIR) not in sys.path:
sys.path.insert(0, str(BIN_DIR))
LIB_GUIDE = BIN_DIR / "libGuideStitch.so"
if LIB_GUIDE.exists():
try:
ctypes.CDLL(str(LIB_GUIDE), mode=ctypes.RTLD_GLOBAL)
except OSError as exc:
print(f"[WARN] Failed to preload {LIB_GUIDE.name}: {exc}")
from UStitcher import API_UnderStitch, FrameInfo, UPanConfig from UStitcher import API_UnderStitch, FrameInfo, UPanConfig
import cv2 import cv2

Loading…
Cancel
Save