diff --git a/main.cpp b/main.cpp index 5ed4b65..d49c544 100644 --- a/main.cpp +++ b/main.cpp @@ -114,7 +114,7 @@ int main(int, char**) //FILE* file = fopen("/media/wang/WORK/wangchongwu_gitea_2023/20241219152917_4.xraw","rb"); FILE* file = fopen("D:/wangchongwu_gitea_2023/20241219152917_4.xraw", "rb"); //FILE* file = fopen("/home/wang/data62/729dataset/ir_1280_1024_para40_y16/5.xraw","rb"); - GaussianRandom gr(0.0, 1, 0.2); + GaussianRandom gr(0.0, 1, 0.0); for (size_t i = 0; i < 600; i++) { @@ -190,15 +190,15 @@ int main(int, char**) //stitcher->GeoStitch(frame,info); // 基于地理+BA拼接 - //auto nNum = stitcher->BAStitch(frame,info); + auto nNum = stitcher->BAStitch(frame,info); // test接口 //if (info.nFrmID == 30 || info.nFrmID == 40) { - auto a = stitcher->ReceiveFrame(frame,info); + //auto a = stitcher->ReceiveFrame(frame,info); } - stitcher->ProcFrame(); + //stitcher->ProcFrame(); //cout << nNum << endl; @@ -215,7 +215,7 @@ int main(int, char**) // 读入所有帧,执行一次全局BA - //stitcher->BAOpt(); + stitcher->BAOpt(); //waitKey(0); diff --git a/stitch/src/API_VideoStitch.h b/stitch/src/API_VideoStitch.h index 2dedac0..ce517ec 100644 --- a/stitch/src/API_VideoStitch.h +++ b/stitch/src/API_VideoStitch.h @@ -28,6 +28,8 @@ public: // 几何校正快拼 virtual BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para) = 0; + + // 基于特征点拼接,全局光束平差方法,分为更新和优化调整两个接口 virtual SINT32 BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para) = 0; virtual SINT32 BAOpt() = 0; diff --git a/stitch/src/Arith_BATask.cpp b/stitch/src/Arith_BATask.cpp index bc8cd56..f606907 100644 --- a/stitch/src/Arith_BATask.cpp +++ b/stitch/src/Arith_BATask.cpp @@ -96,7 +96,7 @@ SINT32 BA_Task::addFrame(GD_VIDEO_FRAME_S img, FrameInfo para) // 缓存初始H Proj t_Proj = _GeoStitcher->AnlayseTform(para); - auto H = _GeoStitcher->findHomography(t_Proj,_panPara); + auto H = _GeoStitcher->findHomography(t_Proj); _origMatrix.push_back(H.clone()); _currMatrix.push_back(H.clone()); @@ -126,9 +126,10 @@ void BA_Task::Test() } //#define SHOW_MATCH -void BA_Task::optimizeBA() +void BA_Task::optimizeBA(cv::Mat H_pan) { - //remap(_origMatrix, "_origMatrix"); + + remap(_origMatrix, H_pan, "_origMatrix"); // 计算匹配性矩阵 CalMatchMat(0.3); @@ -192,7 +193,7 @@ void BA_Task::optimizeBA() _FeaMatcher->matchFeatures_WithH(_FeaPtVec[i], _FeaDespVec[i], _FeaPtVec[j], _FeaDespVec[j], _origMatrix[i], _origMatrix[j], matches); // 图像特征匹配点对超过N对才认为有效 - if (matches.size() > 20) + if (matches.size() > 50) { _MatchMat.at(i, j) = matches.size(); _MatchMat.at(j, i) = matches.size(); @@ -249,7 +250,7 @@ void BA_Task::optimizeBA() // 配置求解器 ceres::Solver::Options options; - options.max_num_iterations = 6; // 增加最大迭代次数 + options.max_num_iterations = 3; // 增加最大迭代次数 options.function_tolerance = 1e-8; // 设置更严格的函数值容忍度 options.gradient_tolerance = 1e-10; // 设置更严格的梯度容忍度 options.parameter_tolerance = 1e-10; // 设置更严格的参数容忍度 @@ -274,24 +275,23 @@ void BA_Task::optimizeBA() std::cout << summary.BriefReport() << std::endl; - //remap(_currMatrix, "_currMatrix"); + remap(_currMatrix, H_pan, "_currMatrix"); cv::waitKey(0); } -void BA_Task::remap(vector> H_Vec, std::string winname) +void BA_Task::remap(vector> H_Vec, cv::Mat_ H_pan, std::string winname) { cv::Mat image(1500, 1500, CV_8UC3, cv::Scalar(0, 0, 0)); for (size_t i = 0; i < _imgVec.size(); i++) { cv::Mat imagetmp(1500, 1500, CV_8UC3, cv::Scalar(0, 0, 0)); - cv::warpPerspective(_imgVec[i], imagetmp, H_Vec[i], imagetmp.size()); - + cv::warpPerspective(_imgVec[i], imagetmp, H_pan * H_Vec[i], imagetmp.size()); cv::Mat mask = cv::Mat::ones(_imgVec[i].size(), CV_8UC1) * 255; cv::Mat warped_mask; - cv::warpPerspective(mask, warped_mask, H_Vec[i], image.size()); + cv::warpPerspective(mask, warped_mask, H_pan * H_Vec[i], image.size()); imagetmp.copyTo(image, warped_mask); } diff --git a/stitch/src/Arith_BATask.h b/stitch/src/Arith_BATask.h index 18ecce7..9145a87 100644 --- a/stitch/src/Arith_BATask.h +++ b/stitch/src/Arith_BATask.h @@ -36,10 +36,10 @@ public: void Test(); // BA优化 - void optimizeBA(); + void optimizeBA(cv::Mat H_pan); - // 使用H矩阵重投影所有帧 - void remap(vector>H_Vec,std::string winname); + // 使用地理系H矩阵和全景图H重投影所有帧 + void remap(vector>H_Vec, cv::Mat_ H_pan, std::string winname); private: SINT32 CalMatchMat(float fiou_thre);//计算匹配性矩阵(以IOU约束) diff --git a/stitch/src/Arith_FeaMatch.cpp b/stitch/src/Arith_FeaMatch.cpp index 471d6ed..b084c06 100644 --- a/stitch/src/Arith_FeaMatch.cpp +++ b/stitch/src/Arith_FeaMatch.cpp @@ -92,7 +92,7 @@ void FeatureMatcher::initDetector() switch (detectorType_) { case SIFT: - detector_ = cv::SIFT::create(500); + detector_ = cv::SIFT::create(FEA_NUM_MAX); break; case SURF: //detector_ = cv::xfeatures2d::SURF::create(); @@ -103,7 +103,7 @@ void FeatureMatcher::initDetector() #endif break; case ORB: - detector_ = cv::ORB::create(500); + detector_ = cv::ORB::create(FEA_NUM_MAX); break; default: throw std::invalid_argument("Unsupported feature detector type"); diff --git a/stitch/src/Arith_GeoStitcher.cpp b/stitch/src/Arith_GeoStitcher.cpp index 8c4aee3..341f71e 100644 --- a/stitch/src/Arith_GeoStitcher.cpp +++ b/stitch/src/Arith_GeoStitcher.cpp @@ -241,6 +241,33 @@ cv::Mat GeoStitcher::findHomography(Proj proj, PanInfo pan) return H; } +cv::Mat GeoStitcher::findHomography(Proj proj) +{ + std::vector srcPoints; + srcPoints.push_back(cv::Point2f(0, 0)); + srcPoints.push_back(cv::Point2f(1000, 0)); + srcPoints.push_back(cv::Point2f(1000, 1000)); + srcPoints.push_back(cv::Point2f(0, 1000)); + + // 同名点计算,从像方到全景 + cv::Point2f leftTop_map = Trans_uv2Geo(srcPoints[0], proj.tf_p2g); + cv::Point2f rightTop_map = Trans_uv2Geo(srcPoints[1], proj.tf_p2g); + cv::Point2f rightBottom_map = Trans_uv2Geo(srcPoints[2], proj.tf_p2g); + cv::Point2f leftBottom_map = Trans_uv2Geo(srcPoints[3], proj.tf_p2g); + + // 目标图像(全景图)的四个顶点坐标 + std::vector dstPoints; + dstPoints.push_back(leftTop_map); // 左 + dstPoints.push_back(rightTop_map); // 右上 + dstPoints.push_back(rightBottom_map); // 右下 + dstPoints.push_back(leftBottom_map); // 左下 + + // 计算单应性矩阵 H + cv::Mat H = cv::findHomography(srcPoints, dstPoints); + + return H; +} + // 计算多边形的面积 double polygonArea(const vector& points) diff --git a/stitch/src/Arith_GeoStitcher.h b/stitch/src/Arith_GeoStitcher.h index 7a8ff30..964da87 100644 --- a/stitch/src/Arith_GeoStitcher.h +++ b/stitch/src/Arith_GeoStitcher.h @@ -48,12 +48,16 @@ public: // 反投影:从帧到全景图 cv::Point2f back_project(cv::Point2f pos_frame, Proj m_Proj, PanInfo pan); + // 计算当前帧像方-地理坐标系R t(反投影关系) Proj AnlayseTform(FrameInfo info); // 建立H矩阵,经验证与上述反投影过程完全等效,该过程应该可以通过解析直接获得,此处不再深入研究 cv::Mat findHomography(Proj proj,PanInfo pan); + // 建立全景图与归一化地理系的H矩阵 + cv::Mat findHomography(Proj proj); + public: diff --git a/stitch/src/Arith_VideoStitch.cpp b/stitch/src/Arith_VideoStitch.cpp index 7cdd3de..52db3ec 100644 --- a/stitch/src/Arith_VideoStitch.cpp +++ b/stitch/src/Arith_VideoStitch.cpp @@ -23,7 +23,6 @@ void API_VideoStitch::Destroy(API_VideoStitch * obj) VideoStitch::VideoStitch(SINT32 nWidth, SINT32 nHeight) { _GeoStitcher = new GeoStitcher(); - memset(&_pan, 0, sizeof(GD_VIDEO_FRAME_S)); _BATask = new BA_Task(_GeoStitcher); @@ -52,58 +51,85 @@ PanInfo VideoStitch::Init(FrameInfo info) return _panPara; } +//BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para) +//{ +// Proj t_Proj = _GeoStitcher->AnlayseTform(para); +// +// // 计算帧的map四至 +// cv::Point2f leftTop_map = _GeoStitcher->back_project(cv::Point2f(0,0), t_Proj, _panPara); +// cv::Point2f rightTop_map = _GeoStitcher->back_project(cv::Point2f(img.u32Width,0), t_Proj, _panPara); +// cv::Point2f rightBottom_map = _GeoStitcher->back_project(cv::Point2f(img.u32Width,img.u32Height), t_Proj, _panPara); +// cv::Point2f leftBottom_map = _GeoStitcher->back_project(cv::Point2f(0,img.u32Height), t_Proj, _panPara); +// +// +// auto H = _GeoStitcher->findHomography(t_Proj,_panPara); +// leftTop_map = warpPointWithH(H,cv::Point2f(0,0)); +// rightTop_map = warpPointWithH(H,cv::Point2f(img.u32Width,0)); +// rightBottom_map = warpPointWithH(H,cv::Point2f(img.u32Width,img.u32Height)); +// leftBottom_map = warpPointWithH(H,cv::Point2f(0,img.u32Height)); +// +// // 计算全景图的范围 +// int right = max(max(max(leftTop_map.x, leftBottom_map.x), rightTop_map.x), rightBottom_map.x); +// int left = min(min(min(leftTop_map.x, leftBottom_map.x), rightTop_map.x), rightBottom_map.x); +// int top = min(min(min(leftTop_map.y, leftBottom_map.y), rightTop_map.y), rightBottom_map.y); +// int bottom = max(max(max(leftTop_map.y, leftBottom_map.y), rightTop_map.y), rightBottom_map.y); +// +// +// int xRange = right - left; +// int yRnage = bottom - top; +// +// //反映射到像素坐标 +// int valid_top = std::max(0, top); +// int valid_bottom = std::min(_pan.cols, bottom); +// int valid_left = std::max(0, left); +// int valid_right = std::min(_pan.rows, right); +// +//#pragma omp parallel for +// for (int i = valid_top; i < valid_bottom; i++) +// { +// for (int j = valid_left; j < valid_right; j++) +// { +// //转换为pixel坐标 +// cv::Point2f p_img = _GeoStitcher->project(Point2f(j, i), t_Proj, _panPara); +// +// p_img = warpPointWithH(H.inv(),Point2f(j, i)); +// if (p_img.x >= 0 && p_img.y >= 0 && p_img.x < img.u32Width && p_img.y < img.u32Height) +// { +// _pan.data[i * _pan.rows + j] = +// FourPointInterpolation(img.u64VirAddr[0], img.u32Width, img.u32Height, p_img.x, p_img.y); +// } +// } +// } +// +// +// return 0; +//} + BYTE8 VideoStitch::GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para) { Proj t_Proj = _GeoStitcher->AnlayseTform(para); - // 计算帧的map四至 - cv::Point2f leftTop_map = _GeoStitcher->back_project(cv::Point2f(0,0), t_Proj, _panPara); - cv::Point2f rightTop_map = _GeoStitcher->back_project(cv::Point2f(img.u32Width,0), t_Proj, _panPara); - cv::Point2f rightBottom_map = _GeoStitcher->back_project(cv::Point2f(img.u32Width,img.u32Height), t_Proj, _panPara); - cv::Point2f leftBottom_map = _GeoStitcher->back_project(cv::Point2f(0,img.u32Height), t_Proj, _panPara); - - - auto H = _GeoStitcher->findHomography(t_Proj,_panPara); - leftTop_map = warpPointWithH(H,cv::Point2f(0,0)); - rightTop_map = warpPointWithH(H,cv::Point2f(img.u32Width,0)); - rightBottom_map = warpPointWithH(H,cv::Point2f(img.u32Width,img.u32Height)); - leftBottom_map = warpPointWithH(H,cv::Point2f(0,img.u32Height)); + // uv2Geo H + cv::Mat H1 = _GeoStitcher->findHomography(t_Proj); - // 计算全景图的范围 - int right = max(max(max(leftTop_map.x, leftBottom_map.x), rightTop_map.x), rightBottom_map.x); - int left = min(min(min(leftTop_map.x, leftBottom_map.x), rightTop_map.x), rightBottom_map.x); - int top = min(min(min(leftTop_map.y, leftBottom_map.y), rightTop_map.y), rightBottom_map.y); - int bottom = max(max(max(leftTop_map.y, leftBottom_map.y), rightTop_map.y), rightBottom_map.y); + // Geo2Pan H + cv::Mat H2 = getHFromGeo2Pan(_panPara); + cv::Mat H = H2 * H1; - int xRange = right - left; - int yRnage = bottom - top; + // 利用H投影当前帧到全景 + cv::Mat imagetmp(_pan.size(), CV_8UC3, cv::Scalar(0, 0, 0)); - //反映射到像素坐标 - int valid_top = std::max(0, top); - int valid_bottom = std::min(_pan.cols, bottom); - int valid_left = std::max(0, left); - int valid_right = std::min(_pan.rows, right); + cv::Mat src(img.u32Height, img.u32Width, CV_8UC1, img.u64VirAddr[0]); -#pragma omp parallel for - for (int i = valid_top; i < valid_bottom; i++) - { - for (int j = valid_left; j < valid_right; j++) - { - //转换为pixel坐标 - cv::Point2f p_img = _GeoStitcher->project(Point2f(j, i), t_Proj, _panPara); - - p_img = warpPointWithH(H.inv(),Point2f(j, i)); - if (p_img.x >= 0 && p_img.y >= 0 && p_img.x < img.u32Width && p_img.y < img.u32Height) - { - _pan.data[i * _pan.rows + j] = - FourPointInterpolation(img.u64VirAddr[0], img.u32Width, img.u32Height, p_img.x, p_img.y); - } - } - } + cv::warpPerspective(src, imagetmp, H, imagetmp.size()); + cv::Mat mask = cv::Mat::ones(src.size(), CV_8UC1) * 255; + cv::Mat warped_mask; + cv::warpPerspective(mask, warped_mask, H, imagetmp.size()); + imagetmp.copyTo(_pan, warped_mask); - return 0; + return 0; } SINT32 VideoStitch::BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para) @@ -117,7 +143,7 @@ SINT32 VideoStitch::BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para) SINT32 VideoStitch::BAOpt() { - _BATask->optimizeBA(); + _BATask->optimizeBA(getHFromGeo2Pan(_panPara)); return 0; } @@ -145,8 +171,10 @@ SINT32 VideoStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para) // 深拷贝图像数据 memcpy(_t_frame_cache->_data,img.u64VirAddr[0],imgSize); + + + - auto size = sizeof(_t_frame_cache); // 加入文件缓存 _cache->set(para.nFrmID,_t_frame_cache); @@ -158,26 +186,26 @@ SINT32 VideoStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para) SINT32 VideoStitch::ProcFrame() { - printf("size:%d\n",_cache->getMemSize()); - std::shared_ptr loadedFrame1; - bool bflag1 = _cache->get(30, loadedFrame1); - - std::shared_ptr loadedFrame2; - auto bflag2 = _cache->get(40,loadedFrame2); - - if (bflag1 && loadedFrame1->_frame_info.u32Height > 0) - { - cv::Mat src1(loadedFrame1->_frame_info.u32Height, loadedFrame1->_frame_info.u32Width, CV_8UC1, loadedFrame1->_data); - imshow("src30",src1); - waitKey(1); - } - - if (bflag2 && loadedFrame2->_frame_info.u32Height > 0) - { - cv::Mat src2(loadedFrame2->_frame_info.u32Height, loadedFrame2->_frame_info.u32Width, CV_8UC1, loadedFrame2->_data); - imshow("src40",src2); - waitKey(0); - } + //printf("size:%d\n",_cache->getMemSize()); + //std::shared_ptr loadedFrame1; + // bool bflag1 = _cache->get(30, loadedFrame1); + + //std::shared_ptr loadedFrame2; + //auto bflag2 = _cache->get(40,loadedFrame2); + + //if (bflag1 && loadedFrame1->_frame_info.u32Height > 0) + //{ + // cv::Mat src1(loadedFrame1->_frame_info.u32Height, loadedFrame1->_frame_info.u32Width, CV_8UC1, loadedFrame1->_data); + // imshow("src30",src1); + // waitKey(1); + //} + // + //if (bflag2 && loadedFrame2->_frame_info.u32Height > 0) + //{ + // cv::Mat src2(loadedFrame2->_frame_info.u32Height, loadedFrame2->_frame_info.u32Width, CV_8UC1, loadedFrame2->_data); + // imshow("src40",src2); + // waitKey(0); + //} return 0; @@ -198,4 +226,13 @@ GD_VIDEO_FRAME_S VideoStitch::ExportPanAddr() return pan_out; } +cv::Mat VideoStitch::getHFromGeo2Pan(PanInfo _pan) +{ + Mat H = (Mat_(3, 3) << _pan.scale, 0, _pan.map_shiftX, + 0, -_pan.scale, _panPara.m_pan_height + _pan.map_shiftY, + 0, 0, 1 + ); + return H; +} + diff --git a/stitch/src/Arith_VideoStitch.h b/stitch/src/Arith_VideoStitch.h index f0e5395..6396bdd 100644 --- a/stitch/src/Arith_VideoStitch.h +++ b/stitch/src/Arith_VideoStitch.h @@ -14,17 +14,18 @@ public: void Test(); - -public: PanInfo Init(FrameInfo info); + + // 接收帧 + SINT32 ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para); + +public: BYTE8 GeoStitch(GD_VIDEO_FRAME_S img, FrameInfo para); SINT32 BAStitch(GD_VIDEO_FRAME_S img, FrameInfo para); SINT32 BAOpt(); - // 接收帧 - SINT32 ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para); // 处理帧 SINT32 ProcFrame(); @@ -45,5 +46,9 @@ private: BA_Task* _BATask;//BA PanInfo _panPara;//全景图配置 + + cv::Mat getHFromGeo2Pan(PanInfo _pan);//计算全景图投影,从地理系到全景地图,统一计算 + + cv::Mat _pan; }; \ No newline at end of file diff --git a/stitch/src/StitchStruct.h b/stitch/src/StitchStruct.h index 39f730a..48daa76 100644 --- a/stitch/src/StitchStruct.h +++ b/stitch/src/StitchStruct.h @@ -41,11 +41,16 @@ struct Match_Net }; -// 帧缓存 +#define IMG_CACHE_SIZE (1920 * 1080 * 3) //图像缓存尺寸 +#define FEA_NUM_MAX 500 // 单帧特征点数量 +#define FEA_DES_SIZE 128 // 特征点描述子尺度 + +// 帧缓存:按照固定大小设计,便于管理 struct FrameCache { FrameInfo _para; GD_VIDEO_FRAME_S _frame_info; - BYTE8 _data[1920*1080]; - cv::Point2f _pt[500]; + BYTE8 _data[IMG_CACHE_SIZE]; + cv::Point2f _pt[FEA_NUM_MAX]; + FLOAT32 _desp[FEA_NUM_MAX * FEA_DES_SIZE]; }; \ No newline at end of file