#include "API_FrontStitch.h" #include "Arith_FrontStitch.h" #include "Arith_Utils.h" #include "Arith_CoordModule.h" #include "Arith_FeaMatch.h" #include "Arith_FrontProj.h" #include #include using namespace std; using namespace cv; API_FrontStitch* API_FrontStitch::Create(SINT32 nWidth, SINT32 nHeight) { return new FrontStitch(nWidth, nHeight); } void API_FrontStitch::Destroy(API_FrontStitch* obj) { delete obj; } FrontStitch::FrontStitch(SINT32 nWidth, SINT32 nHeight) { _GeoSolver = new GeoSolver(); } FrontStitch::~FrontStitch() { cudaFree(_cuda_mem.global_cuda_Frame); cudaFree(_cuda_mem.global_cuda_Pan); cudaFree(_cuda_mem.global_cuda_H_inv_data); cudaFree(_cuda_mem.global_cuda_pan_mask); } FPanInfo FrontStitch::Init(FrameInfo info, float AzRange, float PtRange) { _panPara.fAglRes = 0.02; // 设置当前拼接起始位置 _GeoSolver->SetOriginPoint(info); cv::Mat H = _GeoSolver->findHomography(info); // 计算中心点的极坐标 Pole centPole = getPoleFromImgWithH(H, cv::Point2f(info.nWidth / 2, info.nHeight / 2), info.nEvHeight); // 可以正反验证 //cv::Point2f imgpos = getImgPosFromPole(H, centPole, info.nEvHeight); _panPara.center.fAz = centPole.beta; _panPara.center.fPt = centPole.alpha; _panPara.range.fAz = AzRange; _panPara.range.fPt = PtRange; // 全景图规模 _panPara.m_pan_width = AzRange / _panPara.fAglRes; _panPara.m_pan_height = PtRange / _panPara.fAglRes; _panImage = cv::Mat::zeros(_panPara.m_pan_height, _panPara.m_pan_width, CV_8UC3); cudaMalloc((void**)&_cuda_mem.global_cuda_Frame, info.nWidth * info.nHeight * 3);//rgb cudaMalloc((void**)&_cuda_mem.global_cuda_Pan, _panPara.m_pan_width * _panPara.m_pan_height * 3);//rgb cudaMalloc((void**)&_cuda_mem.global_cuda_H_inv_data, 9 * sizeof(double));// cudaMalloc((void**)&_cuda_mem.global_cuda_pan_mask, _panPara.m_pan_width * _panPara.m_pan_height * 1);//gray return _panPara; } BYTE8 FrontStitch::PoleStitch(GD_VIDEO_FRAME_S img, FrameInfo info) { cv::Mat H = _GeoSolver->findHomography(info); float dep = info.nEvHeight; // 获取rgb源图像 cv::Mat rgb = getRGBMatFromGDFrame(img, img.u64VirAddr[0]); Pole CT_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width/2, img.u32Height/2), info.nEvHeight); // 计算帧的map四至 Pole leftTop_map = getPoleFromImgWithH(H,cv::Point2f(0,0), info.nEvHeight); Pole rightTop_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width,0), info.nEvHeight); Pole rightBottom_map = getPoleFromImgWithH(H, cv::Point2f(img.u32Width,img.u32Height), info.nEvHeight); Pole leftBottom_map = getPoleFromImgWithH(H, cv::Point2f(0,img.u32Height), info.nEvHeight); // 转全景图的像方 cv::Point2f p_leftTop_map = getFPanFromPole(leftTop_map,_panPara); cv::Point2f p_rightTop_map = getFPanFromPole(rightTop_map, _panPara); cv::Point2f p_rightBottom_map = getFPanFromPole(rightBottom_map, _panPara); cv::Point2f p_leftBottom_map = getFPanFromPole(leftBottom_map, _panPara); // 计算全景图上范围 int right = max(max(max(p_leftTop_map.x, p_rightTop_map.x), p_rightBottom_map.x), p_leftBottom_map.x); int left = min(min(min(p_leftTop_map.x, p_rightTop_map.x), p_rightBottom_map.x), p_leftBottom_map.x); int top = min(min(min(p_leftTop_map.y, p_rightTop_map.y), p_rightBottom_map.y), p_leftBottom_map.y); int bottom = max(max(max(p_leftTop_map.y, p_rightTop_map.y), p_rightBottom_map.y), p_leftBottom_map.y); int xRange = right - left; int yRnage = bottom - top; //反映射到像素坐标 MINMAXRECT32S RANGES = { 0 }; RANGES.minY = std::max(0, top); RANGES.maxY = std::min(_panImage.cols, bottom); RANGES.minX = std::max(0, left); RANGES.maxX = std::min(_panImage.rows, right); cv::Rect2d roi(RANGES.minX, RANGES.minY, RANGES.maxX - RANGES.minX, RANGES.maxY- RANGES.minY); cv::Mat H_inv = H.inv(); //UpdatePan_CPU(rgb, _panImage, roi, H_inv, _panPara, dep); UpdatePan_CUDA(rgb, _panImage, roi, H_inv, _panPara, dep, _cuda_mem); return 0; } SINT32 FrontStitch::ReceiveFrame(GD_VIDEO_FRAME_S img, FrameInfo para) { return SINT32(); } SINT32 FrontStitch::ProcessFrame() { return SINT32(); } GD_VIDEO_FRAME_S FrontStitch::ExportPanAddr() { GD_VIDEO_FRAME_S pan_out; pan_out.enPixelFormat = GD_PIXEL_FORMAT_RGB_PACKED; pan_out.u32Width = _panPara.m_pan_width; pan_out.u32Height = _panPara.m_pan_height; pan_out.u64VirAddr[0] = _panImage.data; return pan_out; } void UpdatePan_CPU(cv::Mat rgbFrame, cv::Mat pan, cv::Rect2d roi, cv::Mat H_inv, FPanInfo _panPara, float dep) { cv::Mat subImg = pan(roi); // 遍历整个子图 #pragma omp parallel for for (int i = 0; i < (int)roi.height; i++) { for (int j = 0; j < (int)roi.width; j++) { cv::Point2f p_img = getImgPosFromPole(H_inv, getPoleFromFPan(Point2f(j + roi.x, i + roi.y), _panPara), dep); if (p_img.x >= 200 && p_img.y >= 200 && p_img.x < rgbFrame.cols - 200 && p_img.y < rgbFrame.rows - 200) { auto rgbVal = Interpolation_RGB24(rgbFrame.data, rgbFrame.cols, rgbFrame.rows, p_img.x, p_img.y); uchar* row = subImg.ptr(i); row[3 * j + 0] = rgbVal.R; row[3 * j + 1] = rgbVal.G; row[3 * j + 2] = rgbVal.B; } } } }