You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

622 lines
18 KiB

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

#include "Arith_BATask.h"
#include "ceres/ceres.h"
#include <ceres/rotation.h>
#include "Arith_GeoSolver.h"
#include "math.h"
#include "Arith_Utils.h"
using namespace ceres;
#include "Logger.h"
#define STABLE_X1 320
#define STABLE_Y1 256
#define STABLE_X2 0
#define STABLE_Y2 0
#define STABLE_X3 1920
#define STABLE_Y3 1080
// H投影残差将左右图全都投影到全景图最小化同名点全景图上的投影误差。
// 问题缺少尺度约束H矩阵优化后失去正交性 优点:形式简单
struct HomographyResidual
{
HomographyResidual(const cv::KeyPoint& keypoint_i, const cv::KeyPoint& keypoint_j, const cv::Mat H1, const cv::Mat H2)
: keypoint_i_(keypoint_i), keypoint_j_(keypoint_j), Hi0_(H1), Hj0_(H2)
{
}
template <typename T>
bool operator()(const T* const h_i, const T* const h_j, T* residual) const
{
// 残差计算逻辑
T H_i[9] = { h_i[0], h_i[1], h_i[2],
h_i[3], h_i[4], h_i[5],
h_i[6], h_i[7], T(1.0) };
T H_j[9] = { h_j[0], h_j[1], h_j[2],
h_j[3], h_j[4], h_j[5],
h_j[6], h_j[7], T(1.0) };
T p_i[3] = { T(keypoint_i_.pt.x), T(keypoint_i_.pt.y), T(1.0) };
T p_j[3] = { T(keypoint_j_.pt.x), T(keypoint_j_.pt.y), T(1.0) };
T P_i[3] = { T(0), T(0), T(0) };
T P_j[3] = { T(0), T(0), T(0) };
for (int row = 0; row < 3; row++)
{
for (int col = 0; col < 3; col++)
{
P_i[row] += H_i[row * 3 + col] * p_i[col];
P_j[row] += H_j[row * 3 + col] * p_j[col];
}
}
P_i[0] /= P_i[2];
P_i[1] /= P_i[2];
P_j[0] /= P_j[2];
P_j[1] /= P_j[2];
// 1.添加重投影误差
residual[0] = P_i[0] - P_j[0];
residual[1] = P_i[1] - P_j[1];
// 2. 不动点位置约束
cv::Point2f pS1 = warpPointWithH(Hi0_, cv::Point2f(STABLE_X1, STABLE_Y1));
cv::Point2f pS2 = warpPointWithH(Hi0_, cv::Point2f(STABLE_X2, STABLE_Y2));
cv::Point2f pS3 = warpPointWithH(Hi0_, cv::Point2f(STABLE_X3, STABLE_Y3));
T p_1[3] = { T(STABLE_X1), T(STABLE_Y1), T(1.0) };
T p_2[3] = { T(STABLE_X2), T(STABLE_Y2), T(1.0) };
T p_3[3] = { T(STABLE_X3), T(STABLE_Y3), T(1.0) };
T P_r1[3] = { T(0), T(0), T(0) };
T P_r2[3] = { T(0), T(0), T(0) };
T P_r3[3] = { T(0), T(0), T(0) };
for (int row = 0; row < 3; row++)
{
for (int col = 0; col < 3; col++)
{
P_r1[row] += H_i[row * 3 + col] * p_1[col];
P_r2[row] += H_i[row * 3 + col] * p_2[col];
P_r3[row] += H_i[row * 3 + col] * p_3[col];
}
}
P_r1[0] /= P_r1[2];
P_r1[1] /= P_r1[2];
P_r2[0] /= P_r2[2];
P_r2[1] /= P_r2[2];
P_r3[0] /= P_r3[2];
P_r3[1] /= P_r3[2];
// 约束中心点投影位置不变
residual[2] = (P_r1[0] - (T)pS1.x);
residual[3] = P_r1[1] - (T)pS1.y;
residual[4] = (P_r2[0] - (T)pS2.x);
residual[5] = P_r2[1] - (T)pS2.y;
residual[6] = (P_r3[0] - (T)pS3.x);
residual[7] = P_r3[1] - (T)pS3.y;
return true;
}
private:
const cv::KeyPoint keypoint_i_; // 第 i 帧图像中的特征点
const cv::KeyPoint keypoint_j_; // 第 j 帧图像中的特征点
const cv::Mat Hi0_;
const cv::Mat Hj0_;
};
// SE3群优化
// SE3群优化残差将左图三角化后投影到右图最小化相对投影误差
// 问题:不是真正三角化,无深度优化。 优点有尺度约束标准做法可对21个参数优化。
struct SE3Residual
{
SE3Residual(cv::KeyPoint keypoint_i, cv::KeyPoint keypoint_j,cv::Mat _K, double _fEvHeight)
: keypoint_i_(keypoint_i), keypoint_j_(keypoint_j),K(_K),EvHeight(_fEvHeight)
{
}
template <typename T>
bool operator()(const T* const se3_i, const T* const se3_j, T* residual) const
{
// 左图投影到物方
// T ptL[3] = {T(keypoint_i_.pt.x), T(keypoint_i_.pt.y)}; // 原始点
// 深度矩阵
Eigen::Matrix<T, 3, 3> M_het_inv;
M_het_inv << T(1), T(0), T(0),
T(0), T(1), T(0),
T(0), T(0), T(1.0 / EvHeight);
// K-1
Eigen::Matrix<T, 3, 3> K_1;
Eigen::Matrix<T, 3, 3> K_inv;
double fx = K.at<double>(0, 0);
double fy = K.at<double>(1, 1);
double cx = K.at<double>(0, 2);
double cy = K.at<double>(1, 2);
K_1 << T(fx), T(0), T(cx),
T(0), T(fy), T(cy),
T(0), T(0), T(1);
K_inv << T(1) / T(fx), T(0), -T(cx) / T(fx),
T(0), T(1) / T(fy), -T(cy) / T(fy),
T(0), T(0), T(1);
Eigen::Matrix<T, 3, 1> ptL(T(keypoint_i_.pt.x), T(keypoint_i_.pt.y), T(1.0));
// 投影也可以用这个
//cv::Point2f warpPL = warpPointWithse3((const double*)se3_i, (const double)EvHeight, K, ptL);
Eigen::Matrix<T, 3, 1> pt = K_inv * ptL;
Eigen::Matrix<T, 3, 1> LeftRota;
T rvec_i[3] = { -se3_i[0], -se3_i[1], -se3_i[2] };
ceres::AngleAxisRotatePoint(rvec_i, pt.data(), LeftRota.data());
Eigen::Matrix<T, 3, 1> result = M_het_inv * LeftRota;
// 恢复尺度
T GeoX = result[0] / result[2];
T GeoY = result[1] / result[2];
// 平移,获得地理坐标
GeoX += se3_i[3];
GeoY += se3_i[4];
// 正算,投影到右图
GeoX += -se3_j[3];
GeoY += -se3_j[4];
T rvec_j[3] = { se3_j[0], se3_j[1], se3_j[2] };
Eigen::Matrix<T, 3, 1> pGeoR(GeoX, GeoY, T(EvHeight));
Eigen::Matrix<T, 3, 1> RightRota;
ceres::AngleAxisRotatePoint(rvec_j, pGeoR.data(), RightRota.data());
// 投影到右图像方
Eigen::Matrix<T, 3, 1> pUVR = K_1* RightRota;
// 压缩尺度
T uR = pUVR[0] / pUVR[2];
T vR = pUVR[1] / pUVR[2];
// 计算重投影误差
residual[0] = uR - T(keypoint_j_.pt.x);
residual[1] = vR - T(keypoint_j_.pt.y);
return true;
}
private:
const cv::KeyPoint keypoint_i_; // 第 i 帧图像中的特征点
const cv::KeyPoint keypoint_j_; // 第 j 帧图像中的特征点
cv::Mat_<double> K; //内参
double EvHeight;//高差,用于三角化
};
BA_Task::BA_Task(FileCache<FrameCache>* cache)
{
_cache = cache;
_FeaMatcher = new FeatureMatcher(DetectorType::SIFT, MatcherType::FLANN);
//google::InitGoogleLogging("ceres");
}
BA_Task::~BA_Task()
{
}
void BA_Task::InitTask()
{
_imgVec.clear();
_frameInd.clear();
_origMatrix.clear();
_currMatrix.clear();
_polygon.clear();
_FeaPtVec.clear();
_FeaDespVec.clear();
_paraVec.clear();
_origRtK.clear();
_currRtK.clear();
_origse3.clear();
_currse3.clear();
}
void BA_Task::OptFrame(vector<KeyType> frameInd,cv::Mat H_map)
{
// 任务容器初始化
InitTask();
// 读取帧信息
readFrameInfo(frameInd);
// 邻接关系计算
CalMatchMat();
// 开始BA
// 将 cv::Mat 转换为 Ceres 需要的数组形式
std::vector<double*> h_list(_currMatrix.size());
for (int i = 0; i < _currMatrix.size(); i++)
{
h_list[i] = (double*)_currMatrix[i].data;
}
// 整合为6D参数
std::vector<double*> se3_list(_currse3.size());
for (int i = 0; i < _currse3.size(); i++)
{
se3_list[i] = (double*)_currse3[i].data;
}
constK = cv::Mat(3, 3, CV_64FC1, ((double*)_origRtK[0].data) + 12);
// 创建 Ceres 问题
ceres::Problem problemH;
ceres::Problem problemSE3;
// 添加残差块
int nParaCnt = 0;//参数组数
for (int i = 0; i < _MatchMat.cols; i++)
{
for (int j = i + 1; j < _MatchMat.rows; j++)
{
// IOU满足条件才匹配特征点
if (_IOUMat.at<float>(i, j) < 0.3)
{
continue;
}
std::vector<cv::DMatch> matches;
//_FeaMatcher->matchFeatures(_FeaDespVec[i],_FeaDespVec[j],matches);
_FeaMatcher->matchFeatures_WithH(_FeaPtVec[i], _FeaDespVec[i], _FeaPtVec[j], _FeaDespVec[j], _origMatrix[i], _origMatrix[j], matches);
// 图像特征匹配点对超过N对才认为有效
if (matches.size() > 50)
{
_MatchMat.at<int>(i, j) = matches.size();
_MatchMat.at<int>(j, i) = matches.size();
}
else
{
continue;
}
#ifdef SHOW_MATCH
// 绘制匹配结果
drawMatch(i, j, matches, H_map);
#endif
// 添加匹配点对的残差块
for (int m = 0; m < matches.size(); m++)
{
auto mc = matches[m];
// 注意:这里不对,应该找匹配点!! todo 节后完成
cv::KeyPoint keypoint_i = _FeaPtVec[i][mc.queryIdx];
cv::KeyPoint keypoint_j = _FeaPtVec[j][mc.trainIdx];
//ceres::LossFunction* loss_function = new ceres::HuberLoss(1.0);
ceres::LossFunction* scale_loss = new ceres::ScaledLoss(
new ceres::HuberLoss(1.0),
1.0, // 降低尺度约束的权重
ceres::TAKE_OWNERSHIP);
cv::Mat Hi0 = _origMatrix[i];
cv::Mat Hj0 = _origMatrix[j];
#ifdef OPT_H
ceres::CostFunction* cost_function =
new ceres::AutoDiffCostFunction<HomographyResidual, 8, 8, 8>(
new HomographyResidual(keypoint_i, keypoint_j, Hi0, Hj0));
problemH.AddResidualBlock(cost_function, scale_loss, h_list[i], h_list[j]);
#else
ceres::CostFunction* cost_function =
new ceres::AutoDiffCostFunction<SE3Residual,2,6,6>(
new SE3Residual(keypoint_i, keypoint_j, constK, constHeight));
problemSE3.AddResidualBlock(cost_function, scale_loss, se3_list[i],se3_list[j]);
#endif
}
nParaCnt += matches.size();
}
}
// 配置求解器
ceres::Solver::Options options;
options.max_num_iterations = 10; // 增加最大迭代次数
options.function_tolerance = 1e-5; // 设置更严格的函数值容忍度
options.gradient_tolerance = 1e-5; // 设置更严格的梯度容忍度
options.parameter_tolerance = 1e-5; // 设置更严格的参数容忍度
options.minimizer_progress_to_stdout = true;
//options.linear_solver_type = ceres::SPARSE_NORMAL_CHOLESKY; // 使用稀疏求解器
options.num_threads = 12; // 使用多线程
ceres::Solver::Summary summary;
// 求解
#ifdef OPT_H
ceres::Solve(options, &problemH, &summary);
#else
ceres::Solve(options, &problemSE3, &summary);
#endif
//for (int i = 0; i < _origse3.size(); i++)
//{
// std::cout << "------------" << std::endl;
// std::cout << _origse3[i] << std::endl;
// std::cout << _currse3[i] << std::endl;
// std::cout << "------------" << std::endl;
//}
// 输出结果
//std::cout << summary.BriefReport() << std::endl;
#ifndef OPT_H
// 将优化se3写入H
updateSe3();
#endif
//for (int i = 0; i < _origMatrix.size(); i++)
//{
// std::cout << "------------" << std::endl;
// std::cout << _origMatrix[i] << std::endl;
// std::cout << _currMatrix[i] << std::endl;
// std::cout << "------------" << std::endl;
//}
//
// 优化后H写入文件缓存
writeFrameInfo();
}
bool BA_Task::updateCacheH(KeyType Key, cv::Mat H)
{
auto _t_frame_cache = std::make_shared<FrameCache>();
if (_cache->get(Key, _t_frame_cache))
{
// 更新H
memcpy(_t_frame_cache->H, H.data, sizeof(double) * 9);
// 存储
_cache->set(Key, _t_frame_cache);
return true;
}
return false;
}
int BA_Task::readFrameInfo(vector<KeyType> frameInd)
{
auto _t_frame_cache = std::make_shared<FrameCache>();
_cache->get(frameInd[0], _t_frame_cache);
constHeight = _t_frame_cache->_para.nEvHeight;
for (size_t i = 0; i < frameInd.size(); i++)
{
KeyType key = frameInd[i];
if (_cache->get(key, _t_frame_cache))
{
// 记录key
_frameInd.push_back(key);
// 注意Mat 浅拷贝陷阱!!!!!!
// 特征点
vector<cv::KeyPoint> keypoints(_t_frame_cache->_pt, _t_frame_cache->_pt + _t_frame_cache->ptNum);
_FeaPtVec.push_back(keypoints);
// 描述子
cv::Mat descriptors(_t_frame_cache->ptNum, FEA_DES_SIZE, CV_8UC1, _t_frame_cache->_desp);
_FeaDespVec.push_back(descriptors.clone());
// 原始外参
_paraVec.push_back(_t_frame_cache->_para);
// 初始H
cv::Mat H = cv::Mat(3, 3, CV_64FC1, _t_frame_cache->H);
_origMatrix.push_back(H.clone());
_currMatrix.push_back(H.clone());
// 初始SE3
_origRtK.push_back(cv::Mat(21, 1, CV_64F, _t_frame_cache->RtK).clone());
_currRtK.push_back(cv::Mat(21, 1, CV_64F, _t_frame_cache->RtK).clone());
_origse3.push_back(cv::Mat(6, 1, CV_64F, _t_frame_cache->se3).clone());
_currse3.push_back(cv::Mat(6, 1, CV_64F, _t_frame_cache->se3).clone());
// 缓存包围多边形
_polygon.push_back(warpRectWithH(H, cv::Size(_t_frame_cache->_frame_info.u32Width, _t_frame_cache->_frame_info.u32Height)));
#ifdef SHOW_MATCH
// 读取图像
cv::Mat img = getRGBAMatFromGDFrame(_t_frame_cache->_frame_info, _t_frame_cache->_data);
_imgVec.push_back(img.clone());
#endif
}
}
return 0;
}
#include <Arith_GeoSolver.h>
void BA_Task::updateSe3()
{
for (size_t i = 0; i < _origse3.size(); i++)
{
_currMatrix[i] = Transe3ToH((double*)_origse3[i].data, constK, constHeight);
}
}
int BA_Task::writeFrameInfo()
{
for (size_t i = 0; i < _currMatrix.size(); i++)
{
auto Key = _frameInd[i];
// 更新缓存
updateCacheH(Key, _currMatrix[i]);
}
return 0;
}
SINT32 BA_Task::CalMatchMat()
{
_IOUMat = cv::Mat::zeros(_polygon.size(), _polygon.size(), CV_32FC1);
_MatchMat = cv::Mat::zeros(_polygon.size(), _polygon.size(), CV_32SC1);
// 先计算IOU矩阵
for (size_t i = 0; i < _polygon.size(); i++)
{
vector<cv::Point2f> poly_i = _polygon[i];
for (size_t j = i + 1; j < _polygon.size(); j++)
{
if (i == j)
{
_IOUMat.at<float>(i, j) = 1;
continue;
}
vector<cv::Point2f> poly_j = _polygon[j];
float fiou = computeQuadrilateralIOU(poly_i, poly_j);
_IOUMat.at<float>(i, j) = fiou;
_IOUMat.at<float>(j, i) = fiou;//是对称矩阵
}
}
return 0;
}
void BA_Task::drawMatch(int i, int j, std::vector<cv::DMatch> matches, cv::Mat H_map)
{
//cv::Mat image(1000, 1000, CV_8UC3, cv::Scalar(0, 0, 0));
//cv::Mat imagetmp(1000, 1000, CV_8UC3, cv::Scalar(0, 0, 0));
//vector<vector<cv::Point2f>> tmpPoly;
//tmpPoly.push_back(_polygon[i]);
//tmpPoly.push_back(_polygon[j]);
//cv::warpPerspective(_imgVec[i], imagetmp, H_map * _origMatrix[i], imagetmp.size());
//// 生成遮罩(全白图像,表示有效区域)
//cv::Mat mask1 = cv::Mat::ones(_imgVec[i].size(), CV_8UC1) * 255;
//cv::Mat warped_mask1;
//cv::warpPerspective(mask1, warped_mask1, H_map * _origMatrix[i], image.size());
//imagetmp.copyTo(image, warped_mask1);
//cv::warpPerspective(_imgVec[j], imagetmp, H_map * _origMatrix[j], imagetmp.size());
//cv::Mat mask2 = cv::Mat::ones(_imgVec[i].size(), CV_8UC1) * 255;
//cv::Mat warped_mask2;
//cv::warpPerspective(mask2, warped_mask2, H_map * _origMatrix[j], image.size());
//imagetmp.copyTo(image, warped_mask2);
//drawPolygons(image, tmpPoly);
//// 显示绘制结果
//cv::imshow("Polygons", image);
//cv::waitKey(1);
// 可视化匹配结果
cv::Mat img_matches;
cv::drawMatches(
_imgVec[i], _FeaPtVec[i], // 第一幅图像及其特征点
_imgVec[j], _FeaPtVec[j], // 第二幅图像及其特征点
matches, // 匹配结果
img_matches, // 输出图像
cv::Scalar::all(-1), // 匹配线颜色(默认随机颜色)
cv::Scalar::all(-1), // 未匹配点颜色(默认不绘制)
std::vector<char>(), // 掩码(可选,用于筛选匹配)
cv::DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS // 不绘制未匹配的点
);
cv::resize(img_matches, img_matches, cv::Size(1280, 512));
// 显示匹配结果
cv::imshow("Feature Matches", img_matches);
cv::waitKey(0);
}
// 函数:绘制多边形
void drawPolygons(cv::Mat& image, const std::vector<std::vector<cv::Point2f>>& polygons)
{
// 定义颜色列表
std::vector<cv::Scalar> colors = {
cv::Scalar(255, 0, 0), // 蓝色
cv::Scalar(0, 255, 0), // 绿色
cv::Scalar(0, 0, 255), // 红色
cv::Scalar(255, 255, 0), // 青色
cv::Scalar(255, 0, 255), // 品红
cv::Scalar(0, 255, 255) // 黄色
};
// 绘制每个多边形
for (size_t i = 0; i < polygons.size(); ++i) {
const auto& polygon = polygons[i];
cv::Scalar color = colors[i % colors.size()]; // 循环使用颜色
// 将多边形点转换为整数类型
std::vector<cv::Point> intPolygon;
for (const auto& pt : polygon) {
intPolygon.push_back(cv::Point(static_cast<int>(pt.x), static_cast<int>(pt.y)));
}
// 绘制多边形
cv::polylines(image, intPolygon, true, color, 2);
}
}