wangchongwu 6 months ago
parent 2d2a6d1e02
commit 46203ae469

@ -3,12 +3,6 @@ project(stitch VERSION 0.1.0 LANGUAGES C CXX)
SET(ArithStitchDir stitch)
IF(WIN32)
set(OpenCV_DIR "C:/opencv/build/x64/vc14/lib")
set(CMAKE_TOOLCHAIN_FILE "C:/Users/75929/vcpkg/scripts/buildsystems/vcpkg.cmake")
ELSE(WIN32)
set(OpenCV_DIR "/home/wcw/opencv-3.4.16/install/share/OpenCV")
ENDIF(WIN32)
find_package(OpenCV REQUIRED)

@ -94,16 +94,17 @@ int main(int, char**)
cv::Mat mat_pan;//全景显示
//return 0;
FILE* file = fopen("../20241219152917_4.xraw","rb");
//FILE* file = fopen("/home/wang/data62/729dataset/ir_1280_1024_para40_y16/5.xraw","rb");
for (size_t i = 0; i < 900; i++)
{
fread(pFrameIR,2,IMAGE_WIDTH_IR*(IMAGE_HEIGHT_IR + PARA_IR_LINE),file);
S729paras_IR Paras_IR = { 0 };
memcpy(&Paras_IR, (unsigned char*)(pFrameIR + IMAGE_WIDTH_IR*IMAGE_HEIGHT_IR), sizeof(S729paras_IR));
FrameInfo info = {0};
info.nFrmID = i;
@ -132,13 +133,6 @@ int main(int, char**)
cv::Mat mat_src(IMAGE_HEIGHT_IR, IMAGE_WIDTH_IR, CV_8UC1, pImageIR);
//auto detector_ = cv::ORB::create();
//std::vector<cv::KeyPoint> keypoints;
//cv::Mat descriptors;
//detector_->detectAndCompute(mat_src, cv::noArray(), keypoints, descriptors);
frame.enPixelFormat = GD_PIXEL_FORMAT_GRAY_Y8;
frame.u32Width = IMAGE_WIDTH_IR;
frame.u32Height = IMAGE_HEIGHT_IR;
@ -158,23 +152,39 @@ int main(int, char**)
}
else
{
//cout << info.craft.stPos.B << " " <<info.craft.stPos.L << " " << info.craft.stPos.H << " "
//<< info.craft.stAtt.fYaw << " " << info.craft.stAtt.fPitch << " " << info.craft.stAtt.fRoll<< " "
//<< info.servoInfo.fServoAz << " " << info.servoInfo.fServoPt
//<< endl;
if(i % 20 != 0)
{
continue;
}
cout << info.craft.stPos.B << " " <<info.craft.stPos.L << " " << info.craft.stPos.H << " "
<< info.craft.stAtt.fYaw << " " << info.craft.stAtt.fPitch << " " << info.craft.stAtt.fRoll<< " "
<< info.servoInfo.fServoAz << " " << info.servoInfo.fServoPt
<< endl;
// 基于地理系的拼接
//stitcher->GeoStitch(frame,info);
stitcher->GeoStitch(frame,info);
// 基于地理+BA拼接
auto nNum = stitcher->BAStitch(frame,info);
//cout << nNum << endl;
//imshow("src", mat_src);
//waitKey(0);
}
//imshow("src", mat_src);
imshow("pan", mat_pan);
waitKey(1);
// imshow("pan", mat_pan);
// waitKey(1);
}
waitKey(0);
// 读入所有帧执行一次全局BA
stitcher->BAOpt();
//waitKey(0);
}

@ -28,8 +28,10 @@ public:
// 几何校正快拼
virtual BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
// 基于特征点拼接,全局光束平差方法
virtual BYTE8 BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
// 基于特征点拼接,全局光束平差方法,分为更新和优化调整两个接口
virtual SINT32 BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
virtual SINT32 BAOpt() = 0;
// 获取全景图
virtual GD_VIDEO_FRAME_S ExportPanAddr() = 0;

@ -10,9 +10,12 @@ struct CostFunctor {
}
};
BA_Task::BA_Task()
BA_Task::BA_Task(GeoStitcher * pGeoTrans)
{
// 来个100帧
_GeoStitcher = pGeoTrans;
_FeaMatcher = new FeatureMatcher(DetectorType::ORB, MatcherType::BF);
_imgVec.reserve(100);
}
@ -22,17 +25,44 @@ BA_Task::~BA_Task()
SINT32 BA_Task::addFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
{
cv::Mat frame = Mat(img.u32Height, img.u32Width, CV_8UC1, (void*)img.u64VirAddr[0]);
// 缓存图像帧
if (img.enPixelFormat == GD_PIXEL_FORMAT_E::GD_PIXEL_FORMAT_GRAY_Y8)
{
_imgVec.emplace_back(Mat(img.u32Height, img.u32Width, CV_8UC1, img.u64VirAddr[0]));
_imgVec.emplace_back(frame.clone());
}
// 缓存外参
_paraVec.push_back(para);
// 缓存初始H
Proj t_Proj = _GeoStitcher->AnlayseTform(para);
auto H = _GeoStitcher->findHomography(t_Proj,_panPara);
_origMatrix.push_back(H);
_currMatrix.push_back(H);
// 提取特征点
std::vector<cv::KeyPoint> keypoints;
cv::Mat descriptors;
_FeaMatcher->extractFeatures(frame,keypoints,descriptors);
_FeaPtVec.push_back(keypoints);
_FeaDespVec.push_back(descriptors);
// 缓存包围多边形
_polygon.push_back(warpRectWithH(H,cv::Size(img.u32Width,img.u32Height)));
return _imgVec.size();
}
void BA_Task::setPanPara(PanInfo info)
{
_panPara = info;
}
@ -40,28 +70,151 @@ SINT32 BA_Task::addFrame(GD_VIDEO_FRAME_S img, FrameInfo para)
void BA_Task::Test()
{
google::InitGoogleLogging("ceres");
// The variable to solve for with its initial value. It will be
// mutated in place by the solver.
double x = 0.5;
const double initial_x = x;
// Build the problem.
ceres::Problem problem;
// Set up the only cost function (also known as residual). This uses
// auto-differentiation to obtain the derivative (jacobian).
ceres::CostFunction* cost_function =
new ceres::AutoDiffCostFunction<CostFunctor, 1, 1>(new CostFunctor);
problem.AddResidualBlock(cost_function, nullptr, &x);
// Run the solver!
ceres::Solver::Options options;
options.minimizer_progress_to_stdout = true;
ceres::Solver::Summary summary;
ceres::Solve(options, &problem, &summary);
std::cout << summary.BriefReport() << "\n";
std::cout << "x : " << initial_x << " -> " << x << "\n";
// google::InitGoogleLogging("ceres");
// // The variable to solve for with its initial value. It will be
// // mutated in place by the solver.
// double x = 0.5;
// const double initial_x = x;
// // Build the problem.
// ceres::Problem problem;
// // Set up the only cost function (also known as residual). This uses
// // auto-differentiation to obtain the derivative (jacobian).
// ceres::CostFunction* cost_function =
// new ceres::AutoDiffCostFunction<CostFunctor, 1, 1>(new CostFunctor);
// problem.AddResidualBlock(cost_function, nullptr, &x);
// // Run the solver!
// ceres::Solver::Options options;
// options.minimizer_progress_to_stdout = true;
// ceres::Solver::Summary summary;
// ceres::Solve(options, &problem, &summary);
// std::cout << summary.BriefReport() << "\n";
// std::cout << "x : " << initial_x << " -> " << x << "\n";
}
void BA_Task::optimizeBA()
{
// 计算匹配性矩阵
CalMatchMat(0.3);
}
SINT32 BA_Task::CalMatchMat(float fiou_thre)
{
_IOUMat = cv::Mat::zeros(_polygon.size(),_polygon.size(),CV_32FC1);
// 先计算IOU矩阵
for (size_t i = 0; i < _polygon.size(); i++)
{
vector<cv::Point2f> poly_i = _polygon[i];
for (size_t j = 0; j < _polygon.size(); j++)
{
if (i == j)
{
_IOUMat.at<float>(i,j) = 1;
continue;
}
vector<cv::Point2f> poly_j = _polygon[j];
float fiou = computeQuadrilateralIOU(poly_i,poly_j);
_IOUMat.at<float>(i,j) = fiou;
_IOUMat.at<float>(j,i) = fiou;//是对称矩阵
}
}
// IOU大于0.3以上才进行特征点匹配
for (size_t i = 0; i < _polygon.size(); i++)
{
for (size_t j = 0; j < _polygon.size(); j++)
{
if (i == j)
{
continue;
}
if (_IOUMat.at<float>(i,j) < fiou_thre)
{
continue;
}
// IOU满足条件才匹配特征点
cv::Mat image(1000, 1000, CV_8UC3, cv::Scalar(255, 255, 255));
vector<vector<cv::Point2f>> tmpPoly;
tmpPoly.push_back(_polygon[i]);
tmpPoly.push_back(_polygon[j]);
drawPolygons(image, tmpPoly);
// 显示绘制结果
cv::imshow("Polygons", image);
cv::waitKey(1);
std::vector<cv::DMatch> matches;
_FeaMatcher->matchFeatures(_FeaDespVec[i],_FeaDespVec[j],matches);
// 可视化匹配结果
cv::Mat img_matches;
cv::drawMatches(
_imgVec[i], _FeaPtVec[i], // 第一幅图像及其特征点
_imgVec[j], _FeaPtVec[j], // 第二幅图像及其特征点
matches, // 匹配结果
img_matches, // 输出图像
cv::Scalar::all(-1), // 匹配线颜色(默认随机颜色)
cv::Scalar::all(-1), // 未匹配点颜色(默认不绘制)
std::vector<char>(), // 掩码(可选,用于筛选匹配)
cv::DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS // 不绘制未匹配的点
);
cv::resize(img_matches,img_matches,cv::Size(1280,512));
// 显示匹配结果
cv::imshow("Feature Matches", img_matches);
cv::waitKey(0);
}
}
return 0;
}
// 函数:绘制多边形
void drawPolygons(cv::Mat& image, const std::vector<std::vector<cv::Point2f>>& polygons)
{
// 定义颜色列表
std::vector<cv::Scalar> colors = {
cv::Scalar(255, 0, 0), // 蓝色
cv::Scalar(0, 255, 0), // 绿色
cv::Scalar(0, 0, 255), // 红色
cv::Scalar(255, 255, 0), // 青色
cv::Scalar(255, 0, 255), // 品红
cv::Scalar(0, 255, 255) // 黄色
};
// 绘制每个多边形
for (size_t i = 0; i < polygons.size(); ++i) {
const auto& polygon = polygons[i];
cv::Scalar color = colors[i % colors.size()]; // 循环使用颜色
// 将多边形点转换为整数类型
std::vector<cv::Point> intPolygon;
for (const auto& pt : polygon) {
intPolygon.push_back(cv::Point(static_cast<int>(pt.x), static_cast<int>(pt.y)));
}
// 绘制多边形
cv::polylines(image, intPolygon, true, color, 2);
}
}

@ -11,7 +11,10 @@
*****************************************************************************************/
#pragma once
#include "StitchStruct.h"
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include "Arith_GeoStitcher.h"
#include "Arith_FeaMatch.h"
using cv::Mat_;
@ -21,24 +24,47 @@ using cv::Mat_;
class BA_Task
{
public:
BA_Task();
BA_Task(GeoStitcher* pGeoTrans);
~BA_Task();
// 加入优化帧
SINT32 addFrame(GD_VIDEO_FRAME_S img, FrameInfo para);
// 设置全景参数
void setPanPara(PanInfo info);
void Test();
// BA优化
void optimizeBA();
private:
SINT32 CalMatchMat(float fiou_thre);//计算匹配性矩阵(以IOU约束)
SINT32 GuideMatch(int i,int j);//外参指示下的匹配
private:
Mat_<int> _similarityMat;//配准点邻接表
GeoStitcher* _GeoStitcher;//外参计算
FeatureMatcher* _FeaMatcher;//特征匹配
PanInfo _panPara;//全景图配置
private:
Mat_<int> _MatchMat;//配准点邻接表
Mat_<float> _IOUMat;//交汇邻接表
vector<cv::Mat> _origMatrix;//初始H矩阵
vector<cv::Mat> _currMatrix;//当前H矩阵
//超大空间考虑文件cache todo
private:
vector<cv::Mat> _imgVec;//图像缓存
vector<cv::Point2d> _FeaPtVec;//特征点缓存
vector<vector<cv::KeyPoint>> _FeaPtVec;//特征点缓存
vector<cv::Mat> _FeaDespVec;//特征点描述子
vector<FrameInfo> _paraVec;
vector<vector<cv::Point2f>> _polygon;//帧包围四边形
};
void drawPolygons(cv::Mat& image, const std::vector<std::vector<cv::Point2f>>& polygons);

@ -57,7 +57,7 @@ void FeatureMatcher::initDetector()
#endif
break;
case ORB:
detector_ = cv::ORB::create();
detector_ = cv::ORB::create(1000);
break;
default:
throw std::invalid_argument("Unsupported feature detector type");

@ -26,7 +26,6 @@ class FeatureMatcher
{
public:
// 构造函数
FeatureMatcher(DetectorType detectorType, MatcherType matcherType);

@ -28,14 +28,14 @@ PanInfo GeoStitcher::InitMap(FrameInfo info)
PanInfo panPara = {0};
panPara.m_pan_width = 1000;//全景宽
panPara.m_pan_height = 1000;//全景高
panPara.scale = 0.5;//比例尺,1m = ?pix
panPara.scale = 0.3;//比例尺,1m = ?pix
// 直接无平移解算
auto cur = Trans_Geo2pan(ct_geo, panPara);
// 计算平移到全景图固定点的平移量,从此处开始拼接
int planX = 200;
int planY = 100;
int planX = 500;
int planY = 500;
panPara.map_shiftX = planX - (cur.x);//平移X
panPara.map_shiftY = planY - (cur.y);//平移Y
@ -212,6 +212,35 @@ Proj GeoStitcher::AnlayseTform(FrameInfo info)
return projection;
}
cv::Mat GeoStitcher::findHomography(Proj proj, PanInfo pan)
{
// 同名点计算,从像方到全景
cv::Point2f leftTop_map = back_project(cv::Point2f(0,0), proj, pan);
cv::Point2f rightTop_map = back_project(cv::Point2f(1000,0), proj, pan);
cv::Point2f rightBottom_map = back_project(cv::Point2f(1000,1000), proj, pan);
cv::Point2f leftBottom_map = back_project(cv::Point2f(0,1000), proj, pan);
std::vector<cv::Point2f> srcPoints;
srcPoints.push_back(cv::Point2f(0,0));
srcPoints.push_back(cv::Point2f(1000,0));
srcPoints.push_back(cv::Point2f(1000,1000));
srcPoints.push_back(cv::Point2f(0,1000));
// 目标图像(全景图)的四个顶点坐标
std::vector<cv::Point2f> dstPoints;
dstPoints.push_back(leftTop_map); // 左
dstPoints.push_back(rightTop_map); // 右上
dstPoints.push_back(rightBottom_map); // 右下
dstPoints.push_back(leftBottom_map); // 左下
// 计算单应性矩阵 H
cv::Mat H = cv::findHomography(srcPoints, dstPoints);
return H;
}
// 计算多边形的面积
double polygonArea(const vector<cv::Point2f>& points)
@ -245,4 +274,25 @@ double computeQuadrilateralIOU(const vector<cv::Point2f>& quad1, const vector<cv
// 计算 IOU
if (unionArea == 0) return 0.0; // 避免除零错误
return intersectionArea / unionArea;
}
cv::Point2f warpPointWithH(cv::Mat H, cv::Point2f srcPt)
{
Mat point = (Mat_<double>(3, 1) << srcPt.x, srcPt.y, 1);
Mat result = H * point;
// 转局部地理系
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
return cv::Point2f(warpedX,warpedY);
}
vector<cv::Point2f> warpRectWithH(cv::Mat H,cv::Size size)
{
vector<cv::Point2f> _res;
_res.push_back(warpPointWithH(H, cv::Point2f(0,0)));
_res.push_back(warpPointWithH(H, cv::Point2f(size.width,0)));
_res.push_back(warpPointWithH(H, cv::Point2f(size.width,size.height)));
_res.push_back(warpPointWithH(H, cv::Point2f(0,size.height)));
return _res;
}

@ -1,6 +1,6 @@
/*********版权所有C2025武汉高德红外股份有限公司***************************************
* Arith_ExtrinsicStitcher.h
* Arith_GeoStitcher.h
*
* ,,
*
@ -51,6 +51,9 @@ public:
// 计算当前帧像方-地理坐标系R t反投影关系
Proj AnlayseTform(FrameInfo info);
// 建立H矩阵,经验证与上述反投影过程完全等效,该过程应该可以通过解析直接获得,此处不再深入研究
cv::Mat findHomography(Proj proj,PanInfo pan);
public:
@ -85,3 +88,9 @@ double polygonArea(const vector<cv::Point2f>& points);
// 计算两个四边形的 IOU
double computeQuadrilateralIOU(const vector<cv::Point2f>& quad1, const vector<cv::Point2f>& quad2);
// H映射点
cv::Point2f warpPointWithH(cv::Mat H,cv::Point2f srcPt);
// H映射多边形按照顺时针
vector<cv::Point2f> warpRectWithH(cv::Mat H,cv::Size size);

@ -23,9 +23,9 @@ void API_VideoStitch::Destroy(API_VideoStitch * obj)
VideoStitch::VideoStitch(SINT32 nWidth, SINT32 nHeight)
{
_GeoStitcher = new GeoStitcher();
_FeaMatcher = new FeatureMatcher(DetectorType::ORB, MatcherType::FLANN);
memset(&_pan, 0, sizeof(GD_VIDEO_FRAME_S));
_BATask = new BA_Task(_GeoStitcher);
}
VideoStitch::~VideoStitch()
@ -43,6 +43,8 @@ PanInfo VideoStitch::Init(FrameInfo info)
{
_panPara = _GeoStitcher->InitMap(info);
_BATask->setPanPara(_panPara);
_pan = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC1);
return _panPara;
@ -59,6 +61,12 @@ BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
cv::Point2f leftBottom_map = _GeoStitcher->back_project(cv::Point2f(0,img.u32Height), t_Proj, _panPara);
auto H = _GeoStitcher->findHomography(t_Proj,_panPara);
leftTop_map = warpPointWithH(H,cv::Point2f(0,0));
rightTop_map = warpPointWithH(H,cv::Point2f(img.u32Width,0));
rightBottom_map = warpPointWithH(H,cv::Point2f(img.u32Width,img.u32Height));
leftBottom_map = warpPointWithH(H,cv::Point2f(0,img.u32Height));
// 计算全景图的范围
int right = max(max(max(leftTop_map.x, leftBottom_map.x), rightTop_map.x), rightBottom_map.x);
int left = min(min(min(leftTop_map.x, leftBottom_map.x), rightTop_map.x), rightBottom_map.x);
@ -83,6 +91,7 @@ BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
//转换为pixel坐标
cv::Point2f p_img = _GeoStitcher->project(Point2f(j, i), t_Proj, _panPara);
p_img = warpPointWithH(H.inv(),Point2f(j, i));
if (p_img.x >= 0 && p_img.y >= 0 && p_img.x < img.u32Width && p_img.y < img.u32Height)
{
_pan.data[i * _pan.rows + j] =
@ -95,10 +104,19 @@ BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
return 0;
}
BYTE8 VideoStitch::BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
SINT32 VideoStitch::BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para)
{
SINT32 nFrameCnt = _BATask->addFrame(img,para);
return BYTE8();
return 0;
}
SINT32 VideoStitch::BAOpt()
{
_BATask->optimizeBA();
return 0;
}
GD_VIDEO_FRAME_S VideoStitch::ExportPanAddr()

@ -3,6 +3,7 @@
#include "Arith_GeoStitcher.h"
#include "Arith_FeaMatch.h"
#include "StitchStruct.h"
#include "Arith_BATask.h"
class VideoStitch:public API_VideoStitch
{
@ -18,7 +19,8 @@ public:
BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para);
BYTE8 BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para);
SINT32 BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para);
SINT32 BAOpt();
GD_VIDEO_FRAME_S ExportPanAddr();
private:
@ -29,10 +31,9 @@ private:
GeoStitcher* _GeoStitcher;//外参计算
FeatureMatcher* _FeaMatcher;//特征匹配
BA_Task* _BATask;//BA
PanInfo _panPara;//全景图配置
cv::Mat _pan;

Loading…
Cancel
Save