完成BA框架,待修改。

main
wangchongwu 6 months ago
parent 46203ae469
commit 50929e6945

@ -4,6 +4,12 @@ project(stitch VERSION 0.1.0 LANGUAGES C CXX)
SET(ArithStitchDir stitch)
IF(WIN32)
set(OpenCV_DIR "D:/opencv410_vc17")
set(CMAKE_TOOLCHAIN_FILE "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/scripts/buildsystems/vcpkg.cmake")
ELSE(WIN32)
set(OpenCV_DIR "/home/wcw/opencv-3.4.16/install/share/OpenCV")
ENDIF(WIN32)
find_package(OpenCV REQUIRED)
@ -11,6 +17,7 @@ include_directories(${OpenCV_INCLUDE_DIRS} public_include ${ArithStitchDir}/src)
#
SET(LIB_STITCH GuideStitch)

@ -455,7 +455,7 @@ void PointMatcher::drawMatches(int imgIndex1, int imgIndex2, vector<Point2d> poi
CvFont font;
double hScale = 1;
double vScale = 1;
cvInitFont(&font,CV_FONT_HERSHEY_PLAIN, hScale,vScale,0,1); //¶¨Òå±ê¼Ç×ÖÌå
cvInitFont(&font,CV_FONT_HERSHEY_PLAIN, hScale,vScale,0,1); //<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
for (i = 0; i < pointSet1.size(); i ++)
{
Point2d tempPt1 = pointSet1[i];

@ -10,13 +10,24 @@ if(MSVC)
add_compile_options(/wd4996)
endif()
find_package(OpenMP REQUIRED)
IF(WIN32)
set(OpenCV_DIR "D:/opencv410_vc17")
set(CMAKE_TOOLCHAIN_FILE "D:/wangchongwu_gitea_2023/vcpkg-2025.01.13/scripts/buildsystems/vcpkg.cmake")
ELSE(WIN32)
set(OpenCV_DIR "/home/wcw/opencv-3.4.16/install/share/OpenCV")
ENDIF(WIN32)
find_package(Ceres REQUIRED)
include_directories(${CERES_INCLUDE_DIRS})
find_package(OpenMP REQUIRED)
include_directories(${CERES_INCLUDE_DIRS})
#
SET(ArithTrkPubInc ${CMAKE_SOURCE_DIR}/public_include)
@ -48,7 +59,8 @@ target_include_directories(${LIB_STITCH} PUBLIC
target_link_libraries(${LIB_STITCH}
OpenMP::OpenMP_CXX
${OpenCV_LIBS}
${CERES_LIBRARIES})
${CERES_LIBRARIES}
)
# # gcc0
if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")

@ -1,13 +1,52 @@
#include "Arith_BATask.h"
#include "ceres/ceres.h"
using namespace ceres;
struct CostFunctor {
// 定义残差结构体
struct HomographyResidual
{
HomographyResidual(const cv::KeyPoint& keypoint_i, const cv::KeyPoint& keypoint_j)
: keypoint_i_(keypoint_i), keypoint_j_(keypoint_j)
{
}
template <typename T>
bool operator()(const T* const x, T* residual) const {
residual[0] = 10.0 - x[0];
bool operator()(const T* const h_i, const T* const h_j, T* residual) const {
// 残差计算逻辑
T H_i[9] = { h_i[0], h_i[1], h_i[2],
h_i[3], h_i[4], h_i[5],
h_i[6], h_i[7], T(1.0) };
T H_j[9] = { h_j[0], h_j[1], h_j[2],
h_j[3], h_j[4], h_j[5],
h_j[6], h_j[7], T(1.0) };
T p_i[3] = { T(keypoint_i_.pt.x), T(keypoint_i_.pt.y), T(1.0) };
T p_j[3] = { T(keypoint_j_.pt.x), T(keypoint_j_.pt.y), T(1.0) };
T P_i[3] = { T(0), T(0), T(0) };
T P_j[3] = { T(0), T(0), T(0) };
for (int row = 0; row < 3; row++) {
for (int col = 0; col < 3; col++) {
P_i[row] += H_i[row * 3 + col] * p_i[col];
P_j[row] += H_j[row * 3 + col] * p_j[col];
}
}
P_i[0] /= P_i[2];
P_i[1] /= P_i[2];
P_j[0] /= P_j[2];
P_j[1] /= P_j[2];
residual[0] = P_i[0] - P_j[0];
residual[1] = P_i[1] - P_j[1];
return true;
}
private:
const cv::KeyPoint keypoint_i_; // 第 i 帧图像中的特征点
const cv::KeyPoint keypoint_j_; // 第 j 帧图像中的特征点
};
@ -15,7 +54,7 @@ struct CostFunctor {
BA_Task::BA_Task(GeoStitcher * pGeoTrans)
{
_GeoStitcher = pGeoTrans;
_FeaMatcher = new FeatureMatcher(DetectorType::ORB, MatcherType::BF);
_FeaMatcher = new FeatureMatcher(DetectorType::SIFT, MatcherType::FLANN);
_imgVec.reserve(100);
}
@ -52,9 +91,6 @@ SINT32 BA_Task::addFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
// 缓存包围多边形
_polygon.push_back(warpRectWithH(H,cv::Size(img.u32Width,img.u32Height)));
return _imgVec.size();
}
@ -64,10 +100,6 @@ void BA_Task::setPanPara(PanInfo info)
}
void BA_Task::Test()
{
// google::InitGoogleLogging("ceres");
@ -100,21 +132,124 @@ void BA_Task::Test()
void BA_Task::optimizeBA()
{
//for (int i = 0; i < _currMatrix.size(); i++)
//{
// std::cout << "------------" << std::endl;
// std::cout << _origMatrix[i] << std::endl;
// std::cout << _currMatrix[i] << std::endl;
// std::cout << "------------" << std::endl;
//}
// 计算匹配性矩阵
CalMatchMat(0.3);
google::InitGoogleLogging("ceres");
// 将 cv::Mat 转换为 Ceres 需要的数组形式
std::vector<double*> h_list(_origMatrix.size());
for (int i = 0; i < _origMatrix.size(); i++)
{
h_list[i] = (double*)_origMatrix[i].data;
}
// 创建 Ceres 问题
ceres::Problem problem;
// 添加残差块
for (int i = 0; i < _MatchMat.cols; i++)
{
for (int j = i + 1; j < _MatchMat.rows; j++)
{
int M = _MatchMat.at<int>(i, j); // 获取匹配点对的数量
if (M > 0)
{
// 添加匹配点对的残差块
for (int m = 0; m < M; m++)
{
// 注意:这里不对,应该找匹配点!! todo 节后完成
cv::KeyPoint keypoint_i = _FeaPtVec[i][m];
cv::KeyPoint keypoint_j = _FeaPtVec[j][m];
ceres::CostFunction* cost_function =
new ceres::AutoDiffCostFunction<HomographyResidual, 2, 8, 8>(
new HomographyResidual(keypoint_i, keypoint_j));
problem.AddResidualBlock(cost_function, nullptr, h_list[i], h_list[j]);
}
}
}
}
// 配置求解器
ceres::Solver::Options options;
options.minimizer_progress_to_stdout = true;
//options.linear_solver_type = ceres::SPARSE_NORMAL_CHOLESKY; // 使用稀疏求解器
options.minimizer_progress_to_stdout = true;
options.num_threads = 1; // 使用多线程
ceres::Solver::Summary summary;
// 求解
ceres::Solve(options, &problem, &summary);
//// 将优化后的参数转换回 cv::Mat
for (int i = 0; i < _currMatrix.size(); i++)
{
std::cout << "------------" << std::endl;
std::cout << _origMatrix[i] << std::endl;
std::cout << _currMatrix[i] << std::endl;
std::cout << "------------" << std::endl;
_currMatrix[i].at<double>(0, 0) = h_list[i][0];
_currMatrix[i].at<double>(0, 1) = h_list[i][1];
_currMatrix[i].at<double>(0, 2) = h_list[i][2];
_currMatrix[i].at<double>(1, 0) = h_list[i][3];
_currMatrix[i].at<double>(1, 1) = h_list[i][4];
_currMatrix[i].at<double>(1, 2) = h_list[i][5];
_currMatrix[i].at<double>(2, 0) = h_list[i][6];
_currMatrix[i].at<double>(2, 1) = h_list[i][7];
_currMatrix[i].at<double>(2, 2) = 1.0; // 固定 h_33 = 1
}
// 输出结果
std::cout << summary.BriefReport() << std::endl;
remap();
}
void BA_Task::remap()
{
cv::Mat image(1000, 1000, CV_8UC3, cv::Scalar(0, 0, 0));
for (size_t i = 0; i < _imgVec.size(); i++)
{
cv::Mat imagetmp(1000, 1000, CV_8UC3, cv::Scalar(0, 0, 0));
cv::warpPerspective(_imgVec[i], imagetmp, _currMatrix[i], imagetmp.size());
cv::Mat mask = cv::Mat::ones(_imgVec[i].size(), CV_8UC1) * 255;
cv::Mat warped_mask;
cv::warpPerspective(mask, warped_mask, _currMatrix[i], image.size());
imagetmp.copyTo(image, warped_mask);
}
imshow("remap", image);
cv::waitKey(0);
}
//#define SHOW_MATCH
SINT32 BA_Task::CalMatchMat(float fiou_thre)
{
_IOUMat = cv::Mat::zeros(_polygon.size(),_polygon.size(),CV_32FC1);
_MatchMat = cv::Mat::zeros(_polygon.size(), _polygon.size(), CV_32SC1);
// 先计算IOU矩阵
for (size_t i = 0; i < _polygon.size(); i++)
{
vector<cv::Point2f> poly_i = _polygon[i];
for (size_t j = 0; j < _polygon.size(); j++)
for (size_t j = i + 1; j < _polygon.size(); j++)
{
if (i == j)
{
@ -133,32 +268,64 @@ SINT32 BA_Task::CalMatchMat(float fiou_thre)
// IOU大于0.3以上才进行特征点匹配
for (size_t i = 0; i < _polygon.size(); i++)
{
for (size_t j = 0; j < _polygon.size(); j++)
for (size_t j = i+1; j < _polygon.size(); j++)
{
if (i == j)
{
continue;
}
// IOU满足条件才匹配特征点
if (_IOUMat.at<float>(i,j) < fiou_thre)
{
continue;
}
// IOU满足条件才匹配特征点
cv::Mat image(1000, 1000, CV_8UC3, cv::Scalar(255, 255, 255));
#ifdef SHOW_MATCH
cv::Mat image(1000, 1000, CV_8UC3, cv::Scalar(0, 0, 0));
cv::Mat imagetmp(1000, 1000, CV_8UC3, cv::Scalar(0, 0, 0));
vector<vector<cv::Point2f>> tmpPoly;
tmpPoly.push_back(_polygon[i]);
tmpPoly.push_back(_polygon[j]);
cv::warpPerspective(_imgVec[i], imagetmp, _origMatrix[i], imagetmp.size());
// 生成遮罩(全白图像,表示有效区域)
cv::Mat mask1 = cv::Mat::ones(_imgVec[i].size(), CV_8UC1) * 255;
cv::Mat warped_mask1;
cv::warpPerspective(mask1, warped_mask1, _origMatrix[i], image.size());
imagetmp.copyTo(image, warped_mask1);
cv::warpPerspective(_imgVec[j], imagetmp, _origMatrix[j], imagetmp.size());
cv::Mat mask2 = cv::Mat::ones(_imgVec[i].size(), CV_8UC1) * 255;
cv::Mat warped_mask2;
cv::warpPerspective(mask2, warped_mask2, _origMatrix[j], image.size());
imagetmp.copyTo(image, warped_mask2);
drawPolygons(image, tmpPoly);
// 显示绘制结果
cv::imshow("Polygons", image);
cv::waitKey(1);
#endif
std::vector<cv::DMatch> matches;
_FeaMatcher->matchFeatures(_FeaDespVec[i],_FeaDespVec[j],matches);
//_FeaMatcher->matchFeatures(_FeaDespVec[i],_FeaDespVec[j],matches);
_FeaMatcher->matchFeatures_WithH(_FeaPtVec[i],_FeaDespVec[i], _FeaPtVec[j], _FeaDespVec[j], _origMatrix[i], _origMatrix[j], matches);
// 图像特征匹配点对超过N对才认为有效
if (matches.size() > 50)
{
_MatchMat.at<int>(i, j) = matches.size();
_MatchMat.at<int>(j, i) = matches.size();
}
else
{
continue;
}
#ifdef SHOW_MATCH
// 可视化匹配结果
cv::Mat img_matches;
cv::drawMatches(
@ -172,12 +339,12 @@ SINT32 BA_Task::CalMatchMat(float fiou_thre)
cv::DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS // 不绘制未匹配的点
);
cv::resize(img_matches,img_matches,cv::Size(1280,512));
cv::resize(img_matches, img_matches, cv::Size(1280, 512));
// 显示匹配结果
cv::imshow("Feature Matches", img_matches);
cv::waitKey(0);
#endif
}
}

@ -38,11 +38,12 @@ public:
// BA优化
void optimizeBA();
// 使用H矩阵重投影所有帧
void remap();
private:
SINT32 CalMatchMat(float fiou_thre);//计算匹配性矩阵(以IOU约束)
SINT32 GuideMatch(int i,int j);//外参指示下的匹配
private:
GeoStitcher* _GeoStitcher;//外参计算
FeatureMatcher* _FeaMatcher;//特征匹配
@ -52,8 +53,10 @@ private:
Mat_<int> _MatchMat;//配准点邻接表
Mat_<float> _IOUMat;//交汇邻接表
vector<cv::Mat> _origMatrix;//初始H矩阵
vector<cv::Mat> _currMatrix;//当前H矩阵
vector<cv::Mat_<double>> _origMatrix;//初始H矩阵
vector<cv::Mat_<double>> _currMatrix;//当前H矩阵
//超大空间考虑文件cache todo
private:

@ -1,4 +1,7 @@
#include "Arith_FeaMatch.h"
#include "Arith_GeoStitcher.h"
using std::vector;
using cv::KeyPoint;
// 构造函数
FeatureMatcher::FeatureMatcher(DetectorType detectorType, MatcherType matcherType)
@ -27,6 +30,49 @@ void FeatureMatcher::matchFeatures(cv::Mat& descriptors1, cv::Mat& descriptors2,
}
}
void FeatureMatcher::matchFeatures_WithH(vector<KeyPoint> keypoints1, cv::Mat& descriptors1, vector<KeyPoint> keypoints2, cv::Mat& descriptors2, cv::Mat H1, cv::Mat H2, std::vector<cv::DMatch>& matches)
{
std::vector<cv::DMatch> _matches;
if (matcherType_ == FLANN)
{
flannMatcher_->match(descriptors1, descriptors2, _matches);
}
else
{
bfMatcher_->match(descriptors1, descriptors2, _matches);
}
int i, j;
double minimaDsit = 99999;
for (i = 0; i < _matches.size(); i++)
{
double dist = _matches[i].distance;
if (dist < minimaDsit)
{
minimaDsit = dist;
}
}
double fitedThreshold = minimaDsit * 10;
for (size_t i = 0; i < _matches.size(); i++)
{
cv::DMatch mc = _matches[i];
if (mc.distance > fitedThreshold)
{
continue;
}
auto warp_pt1 = warpPointWithH(H1, keypoints1[mc.queryIdx].pt);
auto warp_pt2 = warpPointWithH(H2, keypoints2[mc.trainIdx].pt);
if (fabs(warp_pt1.x - warp_pt2.x) + fabs(warp_pt1.y - warp_pt2.y) < 15)
{
matches.push_back(mc);
}
}
}
// 计算单应性矩阵
cv::Mat FeatureMatcher::computeHomography(std::vector<cv::KeyPoint>& keypoints1, std::vector<cv::KeyPoint>& keypoints2,
std::vector<cv::DMatch>& matches, double ransacReprojThreshold)
@ -46,7 +92,7 @@ void FeatureMatcher::initDetector()
switch (detectorType_)
{
case SIFT:
detector_ = cv::SIFT::create(100);
detector_ = cv::SIFT::create(500);
break;
case SURF:
//detector_ = cv::xfeatures2d::SURF::create();
@ -57,7 +103,7 @@ void FeatureMatcher::initDetector()
#endif
break;
case ORB:
detector_ = cv::ORB::create(1000);
detector_ = cv::ORB::create(500);
break;
default:
throw std::invalid_argument("Unsupported feature detector type");

@ -35,6 +35,12 @@ public:
// 匹配特征点
void matchFeatures(cv::Mat& descriptors1, cv::Mat& descriptors2, std::vector<cv::DMatch>& matches);
// 带初始H监督的匹配
void matchFeatures_WithH(std::vector<cv::KeyPoint> keypoints1, cv::Mat& descriptors1, std::vector<cv::KeyPoint> keypoints2, cv::Mat& descriptors2,
cv::Mat H1, cv::Mat H2, std::vector<cv::DMatch>& matches);
// 计算单应性矩阵
cv::Mat computeHomography(std::vector<cv::KeyPoint>& keypoints1, std::vector<cv::KeyPoint>& keypoints2,
std::vector<cv::DMatch>& matches, double ransacReprojThreshold = 3.0);

Loading…
Cancel
Save