H矩阵解析式更新

main
wangchongwu 2 months ago
parent 9f3474bb60
commit d9491b4f8f

@ -1,27 +0,0 @@
{
"configurations": [
{
"name": "x64-Debug",
"generator": "Ninja",
"configurationType": "Debug",
"inheritEnvironments": [ "msvc_x64_x64" ],
"buildRoot": "${projectDir}\\out\\build\\${name}",
"installRoot": "${projectDir}\\out\\install\\${name}",
"cmakeCommandArgs": "",
"buildCommandArgs": "",
"ctestCommandArgs": ""
},
{
"name": "x64-Release",
"generator": "Ninja",
"configurationType": "RelWithDebInfo",
"buildRoot": "${projectDir}\\out\\build\\${name}",
"installRoot": "${projectDir}\\out\\install\\${name}",
"cmakeCommandArgs": "",
"buildCommandArgs": "",
"ctestCommandArgs": "",
"inheritEnvironments": [ "msvc_x64_x64" ],
"variables": []
}
]
}

@ -68,7 +68,49 @@ private:
};
// H矩阵Frobenius范数正则化残差约束整个H矩阵与初始值的差异
// 使用Frobenius范数衡量H矩阵的整体变化
struct HomographyRegularizationResidual
{
HomographyRegularizationResidual(const double* initial_h, double weight = 1.0)
: initial_h_(initial_h), weight_(weight)
{
}
template <typename T>
bool operator()(const T* const h, T* residual) const
{
// 计算H矩阵与初始H矩阵的Frobenius范数差异
// Frobenius范数||H - H0||_F = sqrt(sum((h_i - h_i_0)^2))
// 由于H矩阵最后一个元素固定为1.0所以只计算前8个参数的差异
T diff_sum = T(0);
for (int i = 0; i < 8; ++i)
{
T diff = h[i] - T(initial_h_[i]);
diff_sum += diff * diff;
}
// 添加小的epsilon避免数值不稳定确保diff_sum >= epsilon
const T epsilon = T(1e-10);
diff_sum = diff_sum < epsilon ? epsilon : diff_sum;
// Frobenius范数平方根
T frobenius_norm = sqrt(diff_sum);
// 返回加权后的Frobenius范数作为残差
// 添加检查避免NaN和Inf
T weighted_norm = T(weight_) * frobenius_norm;
// 使用条件表达式避免NaN/Inf传播
residual[0] = (weighted_norm != weighted_norm || weighted_norm > T(1e10)) ? T(0) : weighted_norm;
return true;
}
private:
const double* initial_h_; // 初始H矩阵参数值8个参数
double weight_; // 正则化权重
};
// SE3群优化
// SE3群优化残差将左图三角化后投影到右图最小化相对投影误差

@ -158,6 +158,27 @@ void BA_Task::OptFrame(vector<KeyType> frameInd,cv::Mat H_map, std::unordered_ma
}
}
#ifdef OPT_H
// 添加正则化残差块使用Frobenius范数约束整个H矩阵
// 正则化权重:可以根据需要调整,值越大约束越强
// 初始设置较强的约束,后续可以根据效果慢慢放松
double h_reg_weight = 20.0; // H矩阵Frobenius范数正则化权重加强约束
// 为每个帧添加H矩阵正则化
for (int i = 0; i < _currMatrix.size(); i++)
{
// 获取初始H矩阵的8个参数
double* initial_h = (double*)_origMatrix[i].data;
ceres::CostFunction* h_reg_cost =
new ceres::AutoDiffCostFunction<HomographyRegularizationResidual, 1, 8>(
new HomographyRegularizationResidual(initial_h, h_reg_weight));
// 使用HuberLoss使正则化项对异常值更鲁棒
// 可以调整阈值:值越小约束越严格,值越大约束越宽松
ceres::LossFunction* reg_loss = new ceres::HuberLoss(0.2);
problemH.AddResidualBlock(h_reg_cost, reg_loss, h_list[i]);
}
#endif
#ifdef OPT_SE3
// 添加正则化残差块,约束所有参数不要偏离初始值太多
// 正则化权重:可以根据需要调整,值越大约束越强
@ -231,11 +252,11 @@ void BA_Task::OptFrame(vector<KeyType> frameInd,cv::Mat H_map, std::unordered_ma
auto Key = _frameInd[i];
// 将优化se3写入H
#ifdef OPT_SE3
#ifdef OPT_SE3
// 使用优化后的高度值
double optimized_height = height_params[i];
_currMatrix[i] = Transe3ToH((double*)_currse3[i].data, constK, optimized_height);
#endif
#endif
// 更新缓存
updateCacheH(Key, _currMatrix[i], global_H);

@ -1,469 +1,478 @@
#include "Arith_GeoSolver.h"
#include "Arith_UnderStitch.h"
#include "Arith_Utils.h"
#include "Arith_CoordModule.h"
#include "Arith_SysStruct.h"
#define GLOG_USE_GLOG_EXPORT
#include "ceres/ceres.h"
#include "ceres/rotation.h"
using namespace cv;
GeoSolver::GeoSolver()
{
}
GeoSolver::~GeoSolver()
{
}
//cv::Point2f GeoSolver::project(cv::Point2f pos_pan, Proj m_Proj, PanInfo pan)
//{
// cv::Point2f pos_geo = Trans_pan2Geo(pos_pan, pan);
// cv::Point2f pos_frame = Trans_Geo2uv(pos_geo, m_Proj.tf_g2p);
// return pos_frame;
//}
//
//cv::Point2f GeoSolver::back_project(cv::Point2f pos_frame, Proj m_Proj, PanInfo pan)
//{
// cv::Point2f pos_geo = Trans_uv2Geo(pos_frame, m_Proj.tf_p2g);
//
// cv::Point2f pos_pan = Trans_Geo2pan(pos_geo, pan);
//
// return pos_pan;
//}
void GeoSolver::SetOriginPoint(FrameInfo info)
{
originCGCSPoint = getXYZFromBLH(info.craft.stPos);
origininfo = info;
return;
}
PointXYZ GeoSolver::GetOriginPoint()
{
return originCGCSPoint;
}
int GeoSolver::GetOriginDHeight()
{
return origininfo.nEvHeight;
}
PointBLH GeoSolver::getBLHFromFrame(cv::Mat H, cv::Point2f pt)
{
cv::Point2f pt_Geo = warpPointWithH(H, pt);
return getBLHFromGeo(pt_Geo);
}
PointBLH GeoSolver::getBLHFromGeo(cv::Point2f ptInGeo)
{
PointXYZ ptNUE = { 0 };
// 本模块使用的地理系是东(x)-北(y)-地(z),需要转换一下
ptNUE.X = ptInGeo.y;
ptNUE.Y = -origininfo.nEvHeight;
ptNUE.Z = ptInGeo.x;
PointBLH res = getBLHFromXYZ(getCGCSXYZFromNUEXYZ(ptNUE, originCGCSPoint));
return res;
}
cv::Point2f GeoSolver::getGeoFromBLH(PointBLH ptPos)
{
cv::Point2f ptInGeo = { 0 };
PointXYZ ptNUE = { 0 };
ptNUE = getNUEXYZFromCGCSXYZ(getXYZFromBLH(ptPos), originCGCSPoint);
// 本模块使用的地理系是东(x)-北(y)-地(z),需要转换一下
ptInGeo.x = ptNUE.Z;
ptInGeo.y = ptNUE.X;
return ptInGeo;
}
cv::Point2f GeoSolver::Trans_uv2Geo(cv::Point2f pos_frame, TForm form)
{
Mat point = (Mat_<double>(3, 1) << pos_frame.x, pos_frame.y, 1);
Mat result = form.R * point;
// 转局部地理系
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
// 平移到原点地理系
warpedX += form.T.at<double>(0, 0);
warpedY += form.T.at<double>(1, 0);
return cv::Point2f(warpedX, warpedY);
}
cv::Point2f GeoSolver::Trans_Geo2uv(cv::Point2f pos_geo, TForm form_inv)
{
// 先平移到当前相机位置
cv::Point2f pos_cam = pos_geo;
pos_cam.x = pos_geo.x + form_inv.T.at<double>(0, 0);
pos_cam.y = pos_geo.y + form_inv.T.at<double>(1, 0);
Mat point = (Mat_<double>(3, 1) << pos_cam.x, pos_cam.y, 1);
Mat result = form_inv.R * point;
// 转像方
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
return cv::Point2f(warpedX, warpedY);
}
Mat GeoSolver::Mat_TransENGMove(FrameInfo info)
{
PointXYZ ptCurr = getXYZFromBLH(info.craft.stPos);
PointXYZ diff = getNUEXYZFromCGCSXYZ(ptCurr, originCGCSPoint);
Mat move = Mat::zeros(3, 1, CV_64F);
move.at<double>(0, 0) = diff.Z;
move.at<double>(1, 0) = diff.X;
move.at<double>(2, 0) = -diff.Y;
return move;
}
Mat GeoSolver::Mat_TransENG2uv(FrameInfo info)
{
//从地理坐标系转像素坐标
//[u,v,1]'=Z*M*[X,Y,DH]' = Z*M*[1,0,0;0,1,0;0,0,DH]'*[X,Y,1]'
//[u,v,1]'=Z*M(内参)*M(alaph)*M(beta)*M(roll)*M(pitch)*M(yaw)*[X,Y,DH]
// 深度矩阵
Mat M_het = (Mat_<double>(3, 3) << 1, 0, 0,
0, 1, 0,
0, 0, info.nEvHeight
);
Mat M = Mat_GetCamK(info) * Mat_TransENG2Cam(info) * M_het;
return M;
}
using namespace std;
cv::Mat GeoSolver::Mat_TransENG2Cam(FrameInfo info)
{
float yaw = info.craft.stAtt.fYaw;
Mat M_yaw = (Mat_<double>(3, 3) << cosd(yaw), -sind(yaw), 0,
sind(yaw), cosd(yaw), 0,
0, 0, 1
);
float pit = info.craft.stAtt.fPitch;
Mat M_pitch = (Mat_<double>(3, 3) << 1, 0, 0,
0, cosd(pit), -sind(pit),
0, sind(pit), cosd(pit)
);
/* 1 0 0
0 cos sin
0 -sin cos
*/
float roll = info.craft.stAtt.fRoll;
Mat M_roll = (Mat_<double>(3, 3) << cosd(roll), 0, sind(roll),
0, 1, 0,
-sind(roll), 0, cosd(roll)
);
float beta = info.servoInfo.fServoAz;
Mat M_beta = (Mat_<double>(3, 3) << cosd(beta), -sind(beta), 0,
sind(beta), cosd(beta), 0,
0, 0, 1
);
float alaph = info.servoInfo.fServoPt;
Mat M_alaph = (Mat_<double>(3, 3) << 1, 0, 0,
0, cosd(alaph), -sind(alaph),
0, sind(alaph), cosd(alaph)
);
Mat M =M_alaph * M_beta * M_roll * M_pitch * M_yaw;
return M;
}
cv::Mat GeoSolver::Mat_GetCamK(FrameInfo info)
{
// 内参
FLOAT32 fd = info.camInfo.nFocus / info.camInfo.fPixelSize * 1000;
Mat M_cam = (Mat_<double>(3, 3) << fd, 0, info.nWidth / 2,
0, -fd, info.nHeight / 2,
0, 0, 1
);
return M_cam;
}
RtK GeoSolver::AnlayseRtK(FrameInfo info)
{
RtK rtk;
rtk.R = Mat_TransENG2Cam(info);
rtk.T = Mat_TransENGMove(info);
rtk.K = Mat_GetCamK(info);
return rtk;
}
cv::Mat TransRtKToH(RtK rtk, double Depth)
{
// 深度矩阵
Mat M_het = (Mat_<double>(3, 3) << 1, 0, 0,
0, 1, 0,
0, 0, Depth
);
cv::Mat R_p2g = (rtk.K * rtk.R * M_het).inv();
//cv::Mat R_p2g = R_g2p.inv();
cv::Mat t_MAT = rtk.T;
double R[9] = { 0 };
double T[3] = { 0 };
memcpy(R, R_p2g.data, sizeof(double) * 9);
memcpy(T, t_MAT.data, sizeof(double) * 3);
Mat H = (Mat_<double>(3, 3) << (R[0] + T[0] * R[6]), (R[1] + T[0] * R[7]), (R[2] + T[0] * R[8]),
(R[3] + T[1] * R[6]), (R[4] + T[1] * R[7]), (R[5] + T[1] * R[8]),
R[6], R[7], R[8]);
// 归一化
H = H / H.at<double>(2, 2);
return H.clone();
}
cv::Mat Transe3ToH(double* se3, cv::Mat K, double Depth)
{
RtK rtk;
cv::Rodrigues(cv::Mat(3, 1, CV_64FC1, se3), rtk.R);
//cv::Mat w, u, vt;
//cv::SVD::compute(rtk.R, w, u, vt);
//// 核心:确保 u * vt 是精确的正交矩阵
//// 而不是直接对R进行尺度归一化
//rtk.R = u * vt;
rtk.T = cv::Mat(3, 1, CV_64FC1, se3 + 3);
rtk.K = K;
return TransRtKToH(rtk, Depth);
}
// 检验H的计算
cv::Mat GeoSolver::findHomography2(FrameInfo info)
{
TForm tf_p2g;
tf_p2g.R = Mat_TransENG2uv(info).inv();
tf_p2g.T = Mat_TransENGMove(info);
std::vector<cv::Point2f> srcPoints;
srcPoints.push_back(cv::Point2f(0, 0));
srcPoints.push_back(cv::Point2f(1000, 0));
srcPoints.push_back(cv::Point2f(1000, 1000));
srcPoints.push_back(cv::Point2f(0, 1000));
// 同名点计算,从像方到全景
cv::Point2f leftTop_map = Trans_uv2Geo(srcPoints[0], tf_p2g);
cv::Point2f rightTop_map = Trans_uv2Geo(srcPoints[1], tf_p2g);
cv::Point2f rightBottom_map = Trans_uv2Geo(srcPoints[2], tf_p2g);
cv::Point2f leftBottom_map = Trans_uv2Geo(srcPoints[3], tf_p2g);
// 目标图像(全景图)的四个顶点坐标
std::vector<cv::Point2f> dstPoints;
dstPoints.push_back(leftTop_map); // 左
dstPoints.push_back(rightTop_map); // 右上
dstPoints.push_back(rightBottom_map); // 右下
dstPoints.push_back(leftBottom_map); // 左下
// 计算单应性矩阵 H
cv::Mat H = cv::findHomography(srcPoints, dstPoints);
return H;
}
// 根据R和t解析H
cv::Mat GeoSolver::findHomography(FrameInfo info)
{
TForm tf_p2g;
tf_p2g.R = Mat_TransENG2uv(info).inv();
tf_p2g.T = Mat_TransENGMove(info);
double R[9] = { 0 };
double T[3] = { 0 };
memcpy(R, tf_p2g.R.data, sizeof(double) * 9);
memcpy(T, tf_p2g.T.data, sizeof(double) * 3);
Mat H = (Mat_<double>(3, 3) << (R[0] + T[0] * R[6]), (R[1] + T[0] * R[7]), (R[2] + T[0] * R[8]),
(R[3] + T[1] * R[6]), (R[4] + T[1] * R[7]), (R[5] + T[1] * R[8]),
R[6], R[7], R[8]);
// 归一化
H = H / H.at<double>(2, 2);
return H;
}
cv::Mat GeoSolver::findHomography_GPT(FrameInfo info)
{
// world: Z=0 plane
cv::Mat R = Mat_TransENG2uv(info).inv(); // 3x3
cv::Mat t = Mat_TransENGMove(info); // 3x1
// ground plane normal in world coord
cv::Mat n_w = (cv::Mat_<double>(3,1) << 0, 0, 1);
// normal in camera coord
cv::Mat n = R * n_w; // 3x1
// distance to ground plane
double d = info.nEvHeight;
cv::Mat H = R - (t * n.t()) / d;
H /= H.at<double>(2,2);
return H;
}
cv::Mat GeoSolver::findHomography(FrameInfo info, RtK& rtk)
{
rtk = AnlayseRtK(info);
cv::Mat H = TransRtKToH(rtk,info.nEvHeight);
return H;
}
// 计算多边形的面积
double polygonArea(const vector<cv::Point2f>& points)
{
double area = 0.0;
int n = points.size();
for (int i = 0; i < n; i++) {
int j = (i + 1) % n;
area += points[i].x * points[j].y - points[j].x * points[i].y;
}
return abs(area) / 2.0;
}
// 计算两个四边形的 IOU
double computeQuadrilateralIOU(const vector<cv::Point2f>& quad1, const vector<cv::Point2f>& quad2)
{
// 将四边形转换为多边形
vector<Point2f> poly1 = quad1;
vector<Point2f> poly2 = quad2;
// 计算交集多边形
vector<Point2f> intersectionPolygon;
intersectConvexConvex(poly1, poly2, intersectionPolygon);
// 计算面积
double area1 = polygonArea(poly1);
double area2 = polygonArea(poly2);
double intersectionArea = polygonArea(intersectionPolygon);
double unionArea = area1 + area2 - intersectionArea;
// 计算 IOU
if (unionArea == 0) return 0.0; // 避免除零错误
return intersectionArea / unionArea;
}
cv::Point2f warpPointWithH(cv::Mat H, cv::Point2f srcPt)
{
Mat point = (Mat_<double>(3, 1) << srcPt.x, srcPt.y, 1);
Mat result = H * point;
// 转局部地理系
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
return cv::Point2f(warpedX,warpedY);
}
vector<cv::Point2f> warpRectWithH(cv::Mat H,cv::Size size)
{
vector<cv::Point2f> _res;
_res.push_back(warpPointWithH(H, cv::Point2f(0,0)));
_res.push_back(warpPointWithH(H, cv::Point2f(size.width,0)));
_res.push_back(warpPointWithH(H, cv::Point2f(size.width,size.height)));
_res.push_back(warpPointWithH(H, cv::Point2f(0,size.height)));
return _res;
}
cv::Rect2f warpRectWithH_2Rect(cv::Mat H, cv::Size size)
{
vector<cv::Point2f> dstCorners = warpRectWithH(H, size);
// 找到变换后图像的边界
float minX = MIN(MIN(dstCorners[0].x, dstCorners[1].x), MIN(dstCorners[2].x, dstCorners[3].x));
float minY = MIN(MIN(dstCorners[0].y, dstCorners[1].y), MIN(dstCorners[2].y, dstCorners[3].y));
float maxX = MAX(MAX(dstCorners[0].x, dstCorners[1].x), MAX(dstCorners[2].x, dstCorners[3].x));
float maxY = MAX(MAX(dstCorners[0].y, dstCorners[1].y), MAX(dstCorners[2].y, dstCorners[3].y));
// 变换后结果的rect
cv::Rect2f roi(minX, minY, maxX - minX, maxY - minY);
return roi;
}
cv::Point2f warpPointWithse3(const double* se3, double depth, cv::Mat K, cv::Point2f srcPt)
{
Mat point = (Mat_<double>(3, 1) << srcPt.x, srcPt.y, 1.0);
// 轴角转R
cv::Mat rvec = (Mat_<double>(3, 1) << -se3[0], -se3[1], -se3[2]);
cv::Mat tvec = (Mat_<double>(3, 1) << se3[3], se3[4], se3[5]);
cv::Mat R;
cv::Rodrigues(rvec, R);
// 深度矩阵
Mat M_het_inv = (Mat_<double>(3, 3) << 1, 0, 0,
0, 1, 0,
0, 0, 1.0/depth
);
// 转地理系
Mat result = M_het_inv * R * K.inv() * point;
// 说明ceres::AngleAxisRotatePoint有效结果等效于cv::Rodrigues
//double resP[3] = { 0 };
//cv::Mat pt = (K.inv() * point);
//ceres::AngleAxisRotatePoint((double*)rvec.data, (double*)pt.data, (double*)resP);
//result = M_het_inv * cv::Mat(3, 1, CV_64FC1, resP);
// 转局部地理系
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
warpedX += tvec.at<double>(0, 0);
warpedY += tvec.at<double>(1, 0);
return cv::Point2f(warpedX,warpedY);
}
#include "Arith_GeoSolver.h"
#include "Arith_UnderStitch.h"
#include "Arith_Utils.h"
#include "Arith_CoordModule.h"
#include "Arith_SysStruct.h"
//#define GLOG_USE_GLOG_EXPORT
using namespace cv;
GeoSolver::GeoSolver()
{
}
GeoSolver::~GeoSolver()
{
}
//cv::Point2f GeoSolver::project(cv::Point2f pos_pan, Proj m_Proj, PanInfo pan)
//{
// cv::Point2f pos_geo = Trans_pan2Geo(pos_pan, pan);
// cv::Point2f pos_frame = Trans_Geo2uv(pos_geo, m_Proj.tf_g2p);
// return pos_frame;
//}
//
//cv::Point2f GeoSolver::back_project(cv::Point2f pos_frame, Proj m_Proj, PanInfo pan)
//{
// cv::Point2f pos_geo = Trans_uv2Geo(pos_frame, m_Proj.tf_p2g);
//
// cv::Point2f pos_pan = Trans_Geo2pan(pos_geo, pan);
//
// return pos_pan;
//}
void GeoSolver::SetOriginPoint(FrameInfo info)
{
originCGCSPoint = getXYZFromBLH(info.craft.stPos);
origininfo = info;
return;
}
PointXYZ GeoSolver::GetOriginPoint()
{
return originCGCSPoint;
}
int GeoSolver::GetOriginDHeight()
{
return origininfo.nEvHeight;
}
PointBLH GeoSolver::getBLHFromFrame(cv::Mat H, cv::Point2f pt)
{
cv::Point2f pt_Geo = warpPointWithH(H, pt);
return getBLHFromGeo(pt_Geo);
}
PointBLH GeoSolver::getBLHFromGeo(cv::Point2f ptInGeo)
{
PointXYZ ptNUE = { 0 };
// 本模块使用的地理系是东(x)-北(y)-地(z),需要转换一下
ptNUE.X = ptInGeo.y;
ptNUE.Y = -origininfo.nEvHeight;
ptNUE.Z = ptInGeo.x;
PointBLH res = getBLHFromXYZ(getCGCSXYZFromNUEXYZ(ptNUE, originCGCSPoint));
return res;
}
cv::Point2f GeoSolver::getGeoFromBLH(PointBLH ptPos)
{
cv::Point2f ptInGeo = { 0 };
PointXYZ ptNUE = { 0 };
ptNUE = getNUEXYZFromCGCSXYZ(getXYZFromBLH(ptPos), originCGCSPoint);
// 本模块使用的地理系是东(x)-北(y)-地(z),需要转换一下
ptInGeo.x = ptNUE.Z;
ptInGeo.y = ptNUE.X;
return ptInGeo;
}
// cv::Point2f GeoSolver::Trans_uv2Geo(cv::Point2f pos_frame, TForm form)
// {
// Mat point = (Mat_<double>(3, 1) << pos_frame.x, pos_frame.y, 1);
// Mat result = form.R * point;
// // 转局部地理系
// double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
// double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
// // 平移到原点地理系
// warpedX += form.T.at<double>(0, 0);
// warpedY += form.T.at<double>(1, 0);
// return cv::Point2f(warpedX, warpedY);
// }
// cv::Point2f GeoSolver::Trans_Geo2uv(cv::Point2f pos_geo, TForm form_inv)
// {
// // 先平移到当前相机位置
// cv::Point2f pos_cam = pos_geo;
// pos_cam.x = pos_geo.x + form_inv.T.at<double>(0, 0);
// pos_cam.y = pos_geo.y + form_inv.T.at<double>(1, 0);
// Mat point = (Mat_<double>(3, 1) << pos_cam.x, pos_cam.y, 1);
// Mat result = form_inv.R * point;
// // 转像方
// double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
// double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
// return cv::Point2f(warpedX, warpedY);
// }
Mat GeoSolver::Mat_TransENGMove(FrameInfo info)
{
PointXYZ ptCurr = getXYZFromBLH(info.craft.stPos);
PointXYZ diff = getNUEXYZFromCGCSXYZ(ptCurr, originCGCSPoint);
Mat move = Mat::zeros(3, 1, CV_64F);
move.at<double>(0, 0) = diff.Z;
move.at<double>(1, 0) = diff.X;
move.at<double>(2, 0) = -diff.Y;
return move;
}
Mat GeoSolver::Mat_TransENG2uv(FrameInfo info)
{
//从地理坐标系转像素坐标
//[u,v,1]'=Z*M*[X,Y,DH]' = Z*M*[1,0,0;0,1,0;0,0,DH]'*[X,Y,1]'
//[u,v,1]'=Z*M(内参)*M(alaph)*M(beta)*M(roll)*M(pitch)*M(yaw)*[X,Y,DH]
// 深度矩阵
Mat M_het = (Mat_<double>(3, 3) << 1, 0, 0,
0, 1, 0,
0, 0, info.nEvHeight
);
Mat M = Mat_GetCamK(info) * Mat_TransENG2Cam(info) * M_het;
return M;
}
using namespace std;
cv::Mat GeoSolver::Mat_TransENG2Cam(FrameInfo info)
{
float yaw = info.craft.stAtt.fYaw;
Mat M_yaw = (Mat_<double>(3, 3) << cosd(yaw), -sind(yaw), 0,
sind(yaw), cosd(yaw), 0,
0, 0, 1
);
float pit = info.craft.stAtt.fPitch;
Mat M_pitch = (Mat_<double>(3, 3) << 1, 0, 0,
0, cosd(pit), -sind(pit),
0, sind(pit), cosd(pit)
);
/* 1 0 0
0 cos sin
0 -sin cos
*/
float roll = info.craft.stAtt.fRoll;
Mat M_roll = (Mat_<double>(3, 3) << cosd(roll), 0, sind(roll),
0, 1, 0,
-sind(roll), 0, cosd(roll)
);
float beta = info.servoInfo.fServoAz;
Mat M_beta = (Mat_<double>(3, 3) << cosd(beta), -sind(beta), 0,
sind(beta), cosd(beta), 0,
0, 0, 1
);
float alaph = info.servoInfo.fServoPt;
Mat M_alaph = (Mat_<double>(3, 3) << 1, 0, 0,
0, cosd(alaph), -sind(alaph),
0, sind(alaph), cosd(alaph)
);
Mat M =M_alaph * M_beta * M_roll * M_pitch * M_yaw;
return M;
}
cv::Mat GeoSolver::Mat_GetCamK(FrameInfo info)
{
// 内参
FLOAT32 fd = info.camInfo.nFocus / info.camInfo.fPixelSize * 1000;
Mat M_cam = (Mat_<double>(3, 3) << fd, 0, info.nWidth / 2,
0, -fd, info.nHeight / 2,
0, 0, 1
);
return M_cam;
}
RtK GeoSolver::AnlayseRtK(FrameInfo info)
{
RtK rtk;
rtk.R = Mat_TransENG2Cam(info);
rtk.T = Mat_TransENGMove(info);
rtk.K = Mat_GetCamK(info);
return rtk;
}
cv::Mat TransRtKToH(RtK rtk, double Depth)
{
// 深度矩阵
Mat M_het = (Mat_<double>(3, 3) << 1, 0, 0,
0, 1, 0,
0, 0, Depth
);
// cv::Mat R_p2g = (rtk.K * rtk.R * M_het).inv();
// //cv::Mat R_p2g = R_g2p.inv();
// cv::Mat t_MAT = rtk.T;
// double R[9] = { 0 };
// double T[3] = { 0 };
// memcpy(R, R_p2g.data, sizeof(double) * 9);
// memcpy(T, t_MAT.data, sizeof(double) * 3);
// Mat H = (Mat_<double>(3, 3) << (R[0] + T[0] * R[6]), (R[1] + T[0] * R[7]), (R[2] + T[0] * R[8]),
// (R[3] + T[1] * R[6]), (R[4] + T[1] * R[7]), (R[5] + T[1] * R[8]),
// R[6], R[7], R[8]);
// // 归一化
// H = H / H.at<double>(2, 2);
// 地面法向量 n_w向上方向Z轴正方向在地理坐标系中
cv::Mat n_w = (cv::Mat_<double>(3, 1) << 0, 0, 1);
// 平面到相机的距离(有效高度)
double d = Depth;
cv::Mat I = cv::Mat::eye(3, 3, CV_64F);
// 计算 (T_geo * n_w^T) / d
cv::Mat TnT = (rtk.R * rtk.T *n_w.t()) / d;
// 计算从地理坐标到像素坐标的单应性矩阵
cv::Mat H_geo_to_pixel = rtk.K * (rtk.R - TnT) * M_het;
cv::Mat H = H_geo_to_pixel.inv();
// 归一化(使 H[2,2] = 1
H = H / H.at<double>(2, 2);
return H.clone();
}
cv::Mat Transe3ToH(double* se3, cv::Mat K, double Depth)
{
RtK rtk;
cv::Rodrigues(cv::Mat(3, 1, CV_64FC1, se3), rtk.R);
//cv::Mat w, u, vt;
//cv::SVD::compute(rtk.R, w, u, vt);
//// 核心:确保 u * vt 是精确的正交矩阵
//// 而不是直接对R进行尺度归一化
//rtk.R = u * vt;
rtk.T = cv::Mat(3, 1, CV_64FC1, se3 + 3);
rtk.K = K;
return TransRtKToH(rtk, Depth);
}
// 根据R和t解析H
// cv::Mat GeoSolver::findHomography(FrameInfo info)
// {
// cv::Mat Rmat = Mat_TransENG2uv(info).inv();
// cv::Mat Tmat = Mat_TransENGMove(info);
// double R[9] = { 0 };
// double T[3] = { 0 };
// memcpy(R, Rmat.data, sizeof(double) * 9);
// memcpy(T, Tmat.data, sizeof(double) * 3);
// Mat H = (Mat_<double>(3, 3) << (R[0] + T[0] * R[6]), (R[1] + T[0] * R[7]), (R[2] + T[0] * R[8]),
// (R[3] + T[1] * R[6]), (R[4] + T[1] * R[7]), (R[5] + T[1] * R[8]),
// R[6], R[7], R[8]);
// // 归一化
// H = H / H.at<double>(2, 2);
// return H;
// }
cv::Mat GeoSolver::findHomography(FrameInfo info)
{
// 获取相机内参矩阵 K
cv::Mat Kmat = Mat_GetCamK(info);
// 获取从地理坐标系到相机坐标系的旋转矩阵 R_geo_to_cam
cv::Mat R_geo_to_cam = Mat_TransENG2Cam(info);
// 获取平移向量 T从原点地理坐标系到当前相机位置地理坐标系
cv::Mat T_geo = Mat_TransENGMove(info);
// 深度矩阵
Mat M_het = (Mat_<double>(3, 3) << 1, 0, 0,
0, 1, 0,
0, 0, info.nEvHeight
);
// 地面法向量 n_w向上方向Z轴正方向在地理坐标系中
cv::Mat n_w = (cv::Mat_<double>(3, 1) << 0, 0, 1);
// 平面到相机的距离(有效高度)
double d = info.nEvHeight;
cv::Mat I = cv::Mat::eye(3, 3, CV_64F);
// 计算 (T_geo * n_w^T) / d
cv::Mat TnT = (R_geo_to_cam * T_geo *n_w.t()) / d;
// 计算从地理坐标到像素坐标的单应性矩阵
cv::Mat H_geo_to_pixel = Kmat * (R_geo_to_cam - TnT) * M_het;
cv::Mat H = H_geo_to_pixel.inv();
// 归一化(使 H[2,2] = 1
H = H / H.at<double>(2, 2);
return H.clone();
}
cv::Mat GeoSolver::findHomography(FrameInfo info, RtK& rtk)
{
rtk = AnlayseRtK(info);
cv::Mat H = TransRtKToH(rtk,info.nEvHeight);
return H;
}
// 计算多边形的面积
double polygonArea(const vector<cv::Point2f>& points)
{
double area = 0.0;
int n = points.size();
for (int i = 0; i < n; i++) {
int j = (i + 1) % n;
area += points[i].x * points[j].y - points[j].x * points[i].y;
}
return abs(area) / 2.0;
}
// 计算两个四边形的 IOU
double computeQuadrilateralIOU(const vector<cv::Point2f>& quad1, const vector<cv::Point2f>& quad2)
{
// 将四边形转换为多边形
vector<Point2f> poly1 = quad1;
vector<Point2f> poly2 = quad2;
// 计算交集多边形
vector<Point2f> intersectionPolygon;
intersectConvexConvex(poly1, poly2, intersectionPolygon);
// 计算面积
double area1 = polygonArea(poly1);
double area2 = polygonArea(poly2);
double intersectionArea = polygonArea(intersectionPolygon);
double unionArea = area1 + area2 - intersectionArea;
// 计算 IOU
if (unionArea == 0) return 0.0; // 避免除零错误
return intersectionArea / unionArea;
}
cv::Point2f warpPointWithH(cv::Mat H, cv::Point2f srcPt)
{
Mat point = (Mat_<double>(3, 1) << srcPt.x, srcPt.y, 1);
Mat result = H * point;
// 转局部地理系
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
return cv::Point2f(warpedX,warpedY);
}
vector<cv::Point2f> warpRectWithH(cv::Mat H,cv::Size size)
{
vector<cv::Point2f> _res;
_res.push_back(warpPointWithH(H, cv::Point2f(0,0)));
_res.push_back(warpPointWithH(H, cv::Point2f(size.width,0)));
_res.push_back(warpPointWithH(H, cv::Point2f(size.width,size.height)));
_res.push_back(warpPointWithH(H, cv::Point2f(0,size.height)));
return _res;
}
cv::Rect2f warpRectWithH_2Rect(cv::Mat H, cv::Size size)
{
vector<cv::Point2f> dstCorners = warpRectWithH(H, size);
// 找到变换后图像的边界
float minX = MIN(MIN(dstCorners[0].x, dstCorners[1].x), MIN(dstCorners[2].x, dstCorners[3].x));
float minY = MIN(MIN(dstCorners[0].y, dstCorners[1].y), MIN(dstCorners[2].y, dstCorners[3].y));
float maxX = MAX(MAX(dstCorners[0].x, dstCorners[1].x), MAX(dstCorners[2].x, dstCorners[3].x));
float maxY = MAX(MAX(dstCorners[0].y, dstCorners[1].y), MAX(dstCorners[2].y, dstCorners[3].y));
// 变换后结果的rect
cv::Rect2f roi(minX, minY, maxX - minX, maxY - minY);
return roi;
}
cv::Point2f warpPointWithse3(const double* se3, double depth, cv::Mat K, cv::Point2f srcPt)
{
Mat point = (Mat_<double>(3, 1) << srcPt.x, srcPt.y, 1.0);
// 轴角转R
cv::Mat rvec = (Mat_<double>(3, 1) << -se3[0], -se3[1], -se3[2]);
cv::Mat tvec = (Mat_<double>(3, 1) << se3[3], se3[4], se3[5]);
cv::Mat R;
cv::Rodrigues(rvec, R);
// 深度矩阵
Mat M_het_inv = (Mat_<double>(3, 3) << 1, 0, 0,
0, 1, 0,
0, 0, 1.0/depth
);
// 转地理系
Mat result = M_het_inv * R * K.inv() * point;
// 说明ceres::AngleAxisRotatePoint有效结果等效于cv::Rodrigues
//double resP[3] = { 0 };
//cv::Mat pt = (K.inv() * point);
//ceres::AngleAxisRotatePoint((double*)rvec.data, (double*)pt.data, (double*)resP);
//result = M_het_inv * cv::Mat(3, 1, CV_64FC1, resP);
// 转局部地理系
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
warpedX += tvec.at<double>(0, 0);
warpedY += tvec.at<double>(1, 0);
return cv::Point2f(warpedX,warpedY);
}

@ -1,121 +1,111 @@
/*********版权所有C2025武汉高德红外股份有限公司***************************************
* Arith_GeoStitcher.h
*
* ,,
* 便--使
* V0.5
* 04046wcw
* 2025/01/15
*
*****************************************************************************************/
#pragma once
#include "StitchStruct.h"
// 像方-物方转换关系
struct TForm
{
cv::Mat R;
cv::Mat T;
};
// 李群+K
struct RtK
{
cv::Mat R; // world->pix
cv::Mat T; // world
cv::Mat K; // cam
};
class GeoSolver
{
public:
// 构造函数
GeoSolver();
~GeoSolver();
// 建立像方与归一化地理系的H矩阵解析出H
cv::Mat findHomography(FrameInfo info);
// 计算标准李群,并返回H矩阵
cv::Mat findHomography(FrameInfo info, RtK& se3Para);
// 测试
cv::Mat findHomography2(FrameInfo info);
cv::Mat findHomography_GPT(FrameInfo info);
// 设置起始拼接点外参,返回当前光轴大地指向
void SetOriginPoint(FrameInfo info);
// 返回当前拼接起始大地原点
PointXYZ GetOriginPoint();
// 返回起始点高差
int GetOriginDHeight();
// 根据H计算原始像方的经纬度
PointBLH getBLHFromFrame(cv::Mat H, cv::Point2f ptInFrame);
// 计算局部地理系下目标的经纬度
PointBLH getBLHFromGeo(cv::Point2f ptInGeo);
// 经纬度转换为局部地理系坐标
cv::Point2f getGeoFromBLH(PointBLH ptPos);
public:
// 计算当前帧李群SE3和投影K
RtK AnlayseRtK(FrameInfo info);
// 帧像方-地理坐标
cv::Point2f Trans_uv2Geo(cv::Point2f pos_frame, TForm form);
// 地理坐标-帧像方
cv::Point2f Trans_Geo2uv(cv::Point2f pos_geo, TForm form);
// 机体ENG(东北地)到像方的 旋转矩阵
cv::Mat Mat_TransENG2uv(FrameInfo info);
public:
// 平移矩阵,以初始化点为基准,计算当前位置在初始点的地理坐标,那么当前帧所有点的坐标做此平移
cv::Mat Mat_TransENGMove(FrameInfo info);
// 机体ENG(东北地)到相机系 旋转矩阵
cv::Mat Mat_TransENG2Cam(FrameInfo info);
// 内参
cv::Mat Mat_GetCamK(FrameInfo info);
PointXYZ originCGCSPoint;//成图初始点的地心地固坐标,作为拼接参考
FrameInfo origininfo;
};
// 李群转H矩阵地平假设,利用深度
cv::Mat TransRtKToH(RtK se3, double Depth);
// 李代数转H矩阵参见上面 李群转H矩阵
cv::Mat Transe3ToH(double* se3, cv::Mat K, double Depth);
// 多边形面积
double polygonArea(const vector<cv::Point2f>& points);
// 计算两个四边形的 IOU
double computeQuadrilateralIOU(const vector<cv::Point2f>& quad1, const vector<cv::Point2f>& quad2);
// H映射点
cv::Point2f warpPointWithH(cv::Mat H,cv::Point2f srcPt);
// H映射多边形按照顺时针
vector<cv::Point2f> warpRectWithH(cv::Mat H,cv::Size size);
// H映射多边形转rect
cv::Rect2f warpRectWithH_2Rect(cv::Mat H, cv::Size size);
// 用SE3映射
cv::Point2f warpPointWithse3(const double* se3, double depth, cv::Mat K, cv::Point2f srcPt);
/*********版权所有C2025武汉高德红外股份有限公司***************************************
* Arith_GeoStitcher.h
*
* ,,
* 便--使
* V0.5
* 04046wcw
* 2025/01/15
*
*****************************************************************************************/
#pragma once
#include "StitchStruct.h"
// 李群+K
struct RtK
{
cv::Mat R; // world->pix
cv::Mat T; // world
cv::Mat K; // cam
};
class GeoSolver
{
public:
// 构造函数
GeoSolver();
~GeoSolver();
// 建立像方与归一化地理系的H矩阵解析出H (H用于从像方投影到地理定位矩阵)
cv::Mat findHomography(FrameInfo info);
// 计算标准李群,并返回H矩阵
cv::Mat findHomography(FrameInfo info, RtK& se3Para);
// 设置起始拼接点外参,返回当前光轴大地指向
void SetOriginPoint(FrameInfo info);
// 返回当前拼接起始大地原点
PointXYZ GetOriginPoint();
// 返回起始点高差
int GetOriginDHeight();
// 根据H计算原始像方的经纬度
PointBLH getBLHFromFrame(cv::Mat H, cv::Point2f ptInFrame);
// 计算局部地理系下目标的经纬度
PointBLH getBLHFromGeo(cv::Point2f ptInGeo);
// 经纬度转换为局部地理系坐标
cv::Point2f getGeoFromBLH(PointBLH ptPos);
public:
// 计算当前帧李群SE3和投影K
RtK AnlayseRtK(FrameInfo info);
// // 帧像方-地理坐标
// cv::Point2f Trans_uv2Geo(cv::Point2f pos_frame, TForm form);
// // 地理坐标-帧像方
// cv::Point2f Trans_Geo2uv(cv::Point2f pos_geo, TForm form);
// 机体ENG(东北地)到像方的 旋转矩阵
cv::Mat Mat_TransENG2uv(FrameInfo info);
public:
// 平移矩阵t以初始化点为基准计算当前位置在初始点的地理坐标那么当前帧所有点的坐标做此平移
cv::Mat Mat_TransENGMove(FrameInfo info);
// 机体ENG(东北地)到相机系 旋转矩阵R
cv::Mat Mat_TransENG2Cam(FrameInfo info);
// 内参 K
cv::Mat Mat_GetCamK(FrameInfo info);
PointXYZ originCGCSPoint;//成图初始点的地心地固坐标,作为拼接参考
FrameInfo origininfo;
};
// 李群转H矩阵地平假设,利用深度
cv::Mat TransRtKToH(RtK se3, double Depth);
// 李代数转H矩阵参见上面 李群转H矩阵
cv::Mat Transe3ToH(double* se3, cv::Mat K, double Depth);
// 多边形面积
double polygonArea(const vector<cv::Point2f>& points);
// 计算两个四边形的 IOU
double computeQuadrilateralIOU(const vector<cv::Point2f>& quad1, const vector<cv::Point2f>& quad2);
// H映射点
cv::Point2f warpPointWithH(cv::Mat H,cv::Point2f srcPt);
// H映射多边形按照顺时针
vector<cv::Point2f> warpRectWithH(cv::Mat H,cv::Size size);
// H映射多边形转rect
cv::Rect2f warpRectWithH_2Rect(cv::Mat H, cv::Size size);
// 用SE3映射
cv::Point2f warpPointWithse3(const double* se3, double depth, cv::Mat K, cv::Point2f srcPt);

@ -377,8 +377,6 @@ void UnderStitch::run_test(FrameInfo para)
RtK rtk;
cv::Mat H2 = _GeoSolver->findHomography(para, rtk);
cv::Vec3d rvec;
cv::Rodrigues(rtk.R, rvec);
@ -538,7 +536,7 @@ SINT32 UnderStitch::Run(GD_VIDEO_FRAME_S frame, FrameInfo para)
para.craft.stPos.B, para.craft.stPos.L, para.craft.stPos.H, para.craft.stAtt.fYaw, para.craft.stAtt.fPitch, para.craft.stAtt.fRoll,
para.servoInfo.fServoAz, para.servoInfo.fServoPt,para.camInfo.nFocus,para.camInfo.fPixelSize);
//run_test(para);
run_test(para);
// 统计扫描方向
int dir = StatScanDir(para);
@ -697,15 +695,15 @@ SINT32 UnderStitch::Run(GD_VIDEO_FRAME_S frame, FrameInfo para)
if (_config.bUseBA && TriggerBA(para))
{
// 异步执行BA优化
_pThreadPool->commit(
[this]() {
OptAndOutCurrPan(_baWindowKey);
},
"BA_Optimization"
);
// // 异步执行BA优化
// _pThreadPool->commit(
// [this]() {
// OptAndOutCurrPan(_baWindowKey);
// },
// "BA_Optimization"
// );
OptAndOutCurrPan(_baWindowKey);
}
@ -1079,7 +1077,7 @@ SINT32 UnderStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
// 保持滑动窗口大小
const size_t WINDOW_SIZE = 10; // 滑动窗口大小
const size_t WINDOW_SIZE = 6; // 滑动窗口大小
if (_baWindowKey.size() > WINDOW_SIZE)
{
_baWindowKey.erase(_baWindowKey.begin());

@ -14,7 +14,7 @@
// 定义扫描模式,使用扫描专用的拼接地图策略
//#define SCAN_MODE
#define SCAN_MODE
// 关键帧间邻接信息

@ -2,5 +2,5 @@
#pragma once
#include <string>
std::string BUILD_TIME = "BUILD_TIME 2025_12_07-12.02.16";
std::string BUILD_TIME = "BUILD_TIME 2025_12_12-09.36.33";
std::string VERSION = "BUILD_VERSION 1.0.1";

@ -151,8 +151,8 @@ public:
else
{
UPanConfig config = {0};
config.bUseBA = 1;
config.bOutGoogleTile = 1;
config.bUseBA = 0;
config.bOutGoogleTile = 0;
config.bOutLocalPanGeoTIFF = 1;
pUnderStitch->SetConfig(config);
@ -179,14 +179,14 @@ public:
cv::imshow("",src);
cv::waitKey(1);
if (info.nFrmID % 100 == 0)
{
// if (info.nFrmID % 100 == 0)
// {
char filename[1024] = {0};
sprintf(filename, "%d_%02d_%02d_%02d_%04d.tif", aTelem.mSensorID, aTelem.mHour, aTelem.mMinute, aTelem.mSecond, aTelem.mMillSecond);
exportFrameGeoTIFF(src, info, filename);
// char filename[1024] = {0};
// sprintf(filename, "%d_%02d_%02d_%02d_%04d.tif", aTelem.mSensorID, aTelem.mHour, aTelem.mMinute, aTelem.mSecond, aTelem.mMillSecond);
// exportFrameGeoTIFF(src, info, filename);
}
// }
}

@ -6,7 +6,9 @@
int main(int, char**)
{
std::string file = std::string("F:/20251105140846_chn_10_25Hz.ts");
//std::string file = std::string("F:/20251105140846_chn_10_25Hz.ts");
std::string file = std::string("/media/wang/data/20251105140846_chn_10_25Hz.ts");
TsDecoder* dec = new TsDecoder(file);
dec->decoder();

@ -1,78 +0,0 @@
import os
import sys
import ctypes
from pathlib import Path
import numpy as np
# 获取项目根目录(当前文件所在目录的父目录)
project_root = Path(__file__).parent.parent.parent
bin_dir = project_root / "Bin"
# 添加Bin目录到Python模块搜索路径
if str(bin_dir) not in sys.path:
sys.path.insert(0, str(bin_dir))
# 预加载依赖库,确保动态链接器能找到它们
lib_guide_stitch = bin_dir / "libGuideStitch.so"
if lib_guide_stitch.exists():
try:
ctypes.CDLL(str(lib_guide_stitch), mode=ctypes.RTLD_GLOBAL)
except Exception as e:
print(f"警告: 预加载libGuideStitch.so失败: {e}")
# 导入模块
from UStitcher import API_UnderStitch, FrameInfo, getHomography
import cv2
frame_info = FrameInfo()
frame_info.nFrmID = 1
frame_info.craft.stPos.B = 39
frame_info.craft.stPos.L = 120
frame_info.craft.stPos.H = 1000
frame_info.camInfo.nFocus = 40
frame_info.camInfo.fPixelSize = 12
frame_info.servoInfo.fServoAz = 90
frame_info.servoInfo.fServoPt = -45
frame_info.nEvHeight = 1200
frame_info.nWidth = 1280
frame_info.nHeight = 1024
stitcher = API_UnderStitch.Create()
# 先初始化(设置原点)
pan_info = stitcher.Init(frame_info)
print(f"初始化成功,全景图尺寸: {pan_info.m_pan_width} x {pan_info.m_pan_height}")
def warpPointWithH(H, pt):
wp = H @ np.array([pt[0],pt[1],1]).T
return wp / wp[2]
for i in range(100):
frame_info.nFrmID = i
frame_info.craft.stPos.B = 39
frame_info.craft.stPos.L = 120
frame_info.craft.stPos.H = 1000
frame_info.camInfo.nFocus = 40
frame_info.camInfo.fPixelSize = 12
frame_info.servoInfo.fServoAz = 90
frame_info.servoInfo.fServoPt = -45
H = getHomography(frame_info)
print(f"单应性矩阵 H:\n{H}")
wp = warpPointWithH(H, np.array([100,111]))
print(f"物方坐标:\n{wp}")
print("done")

@ -8,10 +8,19 @@ import numpy as np
from dataclasses import dataclass
from typing import List, Tuple
import os
sys.path.append("/media/wang/WORK/wangchongwu_gitea_2023/StitchVideo/Bin")
sys.path.append("../../Bin/Release")
# CUDA_BIN_DIR = r"C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v12.4\bin"
# CERES_DEP_DIR = r"D:\wangchongwu_gitea_2023\StitchVideo\3rdParty\ceres_2.2\bin"
# OPEN_CV_DEP_DIR = r"C:\Lib\opencv455\build\x64\vc14\bin"
# os.add_dll_directory(CUDA_BIN_DIR)
# os.add_dll_directory(CERES_DEP_DIR)
# os.add_dll_directory(OPEN_CV_DEP_DIR)
# os.add_dll_directory("D:/wangchongwu_gitea_2023/StitchVideo/Bin/Release")
#os.add_dll_directory("../../Bin/Release")
# 导入模块
from UStitcher import API_UnderStitch, FrameInfo, UPanInfo, UPanConfig
@ -411,7 +420,7 @@ def main():
srt_path_list = []
# 修改为你的数据路径
folder = "/media/wang/data/K2D_data/"
folder = "F:/K2D_data/"
video_path_list.append(folder + "DJI_20250418153043_0006_W.MP4")
srt_path_list.append(folder + "DJI_20250418153043_0006_W.srt")

@ -15,6 +15,10 @@ import time
from dataclasses import dataclass
from pathlib import Path
from typing import Iterable, List, Optional, Tuple
import pprint
pprint.pprint(sys.path)
import numpy as np
@ -23,17 +27,31 @@ import numpy as np
# Environment bootstrap (reuse logic from tests/python/1.py)
# -----------------------------------------------------------------------------
PROJECT_ROOT = Path(__file__).resolve().parents[2]
BIN_DIR = PROJECT_ROOT / "Bin"
# BIN_DIR = PROJECT_ROOT / "Bin/Release"
# if str(BIN_DIR) not in sys.path:
# sys.path.insert(0, str(BIN_DIR))
# LIB_GUIDE = BIN_DIR / "GuideStitch.dll"
# if LIB_GUIDE.exists():
# try:
# ctypes.CDLL(str(LIB_GUIDE), mode=ctypes.RTLD_GLOBAL)
# except OSError as exc:
# print(f"[WARN] Failed to preload {LIB_GUIDE.name}: {exc}")
import os
sys.path.append("D:/wangchongwu_gitea_2023/StitchVideo/Bin/Release")
CUDA_BIN_DIR = r"C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v12.4\bin"
CERES_DEP_DIR = r"D:\wangchongwu_gitea_2023\StitchVideo\3rdParty\ceres_2.2\bin"
OPEN_CV_DEP_DIR = r"C:\Lib\opencv455\build\x64\vc14\bin"
os.add_dll_directory(CUDA_BIN_DIR)
os.add_dll_directory(CERES_DEP_DIR)
os.add_dll_directory(OPEN_CV_DEP_DIR)
os.add_dll_directory("D:/wangchongwu_gitea_2023/StitchVideo/Bin/Release")
if str(BIN_DIR) not in sys.path:
sys.path.insert(0, str(BIN_DIR))
LIB_GUIDE = BIN_DIR / "libGuideStitch.so"
if LIB_GUIDE.exists():
try:
ctypes.CDLL(str(LIB_GUIDE), mode=ctypes.RTLD_GLOBAL)
except OSError as exc:
print(f"[WARN] Failed to preload {LIB_GUIDE.name}: {exc}")
from UStitcher import API_UnderStitch, FrameInfo, UPanConfig
import cv2

Loading…
Cancel
Save