You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

303 lines
7.4 KiB

#include "API_VideoStitch.h"
#include "Arith_VideoStitch.h"
#include "Arith_Utils.h"
#include "Arith_CoordModule.h"
#include <opencv2/opencv.hpp>
#include <omp.h>
using namespace std;
using namespace cv;
API_VideoStitch * API_VideoStitch::Create(SINT32 nWidth, SINT32 nHeight)
{
return new VideoStitch(nWidth,nHeight);
}
void API_VideoStitch::Destroy(API_VideoStitch * obj)
{
delete obj;
}
VideoStitch::VideoStitch(SINT32 nWidth, SINT32 nHeight)
{
}
VideoStitch::~VideoStitch()
{
}
void VideoStitch::Test()
{
FrameInfo info = {0};
info.camInfo.nFocus = 200;
info.camInfo.fPixelSize = 25;
info.craft.stAtt.fYaw = 1;
info.craft.stAtt.fPitch = 2;
info.craft.stAtt.fRoll = 3;
info.nEvHeight = 1000;
info.servoInfo.fServoAz = 4;
info.servoInfo.fServoPt = 5;
info.nWidth = 1280;
info.nHeight = 1024;
Mat R = Mat_TransENG2uv(info);
Mat R_inv = R.inv();
//auto point = getWarpPoint(540,340, (double*)R_inv.data);
//std::cout << point.x << " " << point.y << std::endl;
}
BBOOL VideoStitch::Init(FrameInfo info)
{
// 原点信息
originPoint = info;
// 转换关系计算
AnlayseTform(info);
// 帧中心的大地坐标,将其平移到全景图中心
cv::Point2f ct_geo = Trans_uv2Geo(cv::Point2f(info.nWidth / 2, info.nHeight / 2), m_Proj.tf_p2g);
// 全景图初始化
m_Proj.panPara.m_pan_width = 1000;//全景宽
m_Proj.panPara.m_pan_height = 1000;//全景高
m_Proj.panPara.scale = 0.3;//比例尺,1m = ?pix
// 直接无平移解算
auto cur = Trans_Geo2pan(ct_geo, m_Proj.panPara);
m_Proj.panPara.map_shiftX = m_Proj.panPara.m_pan_width / 2 - (cur.x);//平移X
m_Proj.panPara.map_shiftY = m_Proj.panPara.m_pan_height / 2 - (cur.y);//平移Y
// 重新计算,验证已平移到中心
//auto cur2 = Trans_Geo2pan(ct_geo, m_Proj.panPara);
m_pan = Mat::zeros(m_Proj.panPara.m_pan_height, m_Proj.panPara.m_pan_width,CV_8UC1);
return true;
}
BYTE8 VideoStitch::Updata(Mat img, FrameInfo para)
{
AnlayseTform(para);
// 计算帧的map四至
cv::Point2f leftTop_map = back_project(cv::Point2f(0,0), m_Proj);
cv::Point2f rightTop_map = back_project(cv::Point2f(img.cols,0), m_Proj);
cv::Point2f rightBottom_map = back_project(cv::Point2f(img.cols,img.rows), m_Proj);
cv::Point2f leftBottom_map = back_project(cv::Point2f(0,img.rows), m_Proj);
// 计算全景图的范围
int right = max(max(max(leftTop_map.x, leftBottom_map.x), rightTop_map.x), rightBottom_map.x);
int left = min(min(min(leftTop_map.x, leftBottom_map.x), rightTop_map.x), rightBottom_map.x);
int top = min(min(min(leftTop_map.y, leftBottom_map.y), rightTop_map.y), rightBottom_map.y);
int bottom = max(max(max(leftTop_map.y, leftBottom_map.y), rightTop_map.y), rightBottom_map.y);
int xRange = right - left;
int yRnage = bottom - top;
//反映射到像素坐标
for (int i = top; i < bottom; i++)
{
for (int j = left; j < right; j++)
{
if (i < 0 || j < 0 || i >= m_pan.rows || j >= m_pan.cols)
continue;
//转换为pixel坐标
cv::Point2f p_img = project(Point2f(j, i), m_Proj);
if (p_img.x < 0 || p_img.y < 0 || p_img.x > img.cols || p_img.y > img.rows)
{
continue;
}
//线性插值
m_pan.data[i* m_pan.cols + j] =
FourPointInterpolation(img.data, img.cols, img.rows, p_img.x, p_img.y);
}
}
cv::imshow("pan", m_pan);
cv::waitKey(1);
return BYTE8();
}
cv::Point2f VideoStitch::project(cv::Point2f pos_pan, Proj m_Proj)
{
cv::Point2f pos_geo = Trans_pan2Geo(pos_pan, m_Proj.panPara);
cv::Point2f pos_frame = Trans_Geo2uv(pos_geo, m_Proj.tf_g2p);
return pos_frame;
}
cv::Point2f VideoStitch::back_project(cv::Point2f pos_frame, Proj m_Proj)
{
cv::Point2f pos_geo = Trans_uv2Geo(pos_frame, m_Proj.tf_p2g);
cv::Point2f pos_pan = Trans_Geo2pan(pos_geo, m_Proj.panPara);
return pos_pan;
}
cv::Point2f VideoStitch::Trans_uv2Geo(cv::Point2f pos_frame, TForm form)
{
Mat point = (Mat_<double>(3, 1) << pos_frame.x, pos_frame.y, 1);
Mat result = form.R * point;
// 转局部地理系
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
// 平移到原点地理系
warpedX += form.T.at<double>(0, 0);
warpedY += form.T.at<double>(1, 0);
return cv::Point2f(warpedX, warpedY);
}
cv::Point2f VideoStitch::Trans_Geo2uv(cv::Point2f pos_geo, TForm form_inv)
{
// 先平移到当前相机位置
cv::Point2f pos_cam = pos_geo;
pos_cam.x = pos_geo.x + form_inv.T.at<double>(0, 0);
pos_cam.y = pos_geo.y + form_inv.T.at<double>(1, 0);
Mat point = (Mat_<double>(3, 1) << pos_cam.x, pos_cam.y, 1);
Mat result = form_inv.R * point;
// 转像方
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
return cv::Point2f(warpedX, warpedY);
}
cv::Point2f VideoStitch::Trans_pan2Geo(cv::Point2f pos_pan, PanInfo panPara)
{
double x = (pos_pan.x - panPara.map_shiftX)/ panPara.scale;
double y = (panPara.m_pan_height - (pos_pan.y - panPara.map_shiftY)) / panPara.scale;
return cv::Point2f(x, y);
}
cv::Point2f VideoStitch::Trans_Geo2pan(cv::Point2f pos_geo, PanInfo panPara)
{
double pan_x = pos_geo.x * panPara.scale + panPara.map_shiftX;
double pan_y = (panPara.m_pan_height - pos_geo.y * panPara.scale) + panPara.map_shiftY;
return cv::Point2f(pan_x, pan_y);
}
Mat VideoStitch::Mat_TransENGMove(FrameInfo info)
{
PointXYZ ptCurr = getXYZFromBLH(info.craft.stPos);
PointXYZ ptOrig = getXYZFromBLH(originPoint.craft.stPos);
PointXYZ diff = getNUEXYZFromCGCSXYZ(ptCurr, ptOrig);
Mat move = Mat::zeros(3, 1, CV_64F);
move.at<double>(0, 0) = diff.Z;
move.at<double>(1, 0) = diff.X;
move.at<double>(2, 0) = diff.Y;
return move;
}
Mat VideoStitch::Mat_TransENG2uv(FrameInfo info)
{
//从地理坐标系转像素坐标
//[u,v,1]'=Z*M*[X,Y,DH]' = Z*M*[1,0,0;0,1,0;0,0,DH]'*[X,Y,1]'
//[u,v,1]'=Z*M(内参)*M(alaph)*M(beta)*M(roll)*M(pitch)*M(yaw)*[X,Y,DH]
// 深度矩阵
Mat M_het = (Mat_<double>(3, 3) << 1,0,0,
0,1,0,
0,0,info.nEvHeight
);
float yaw = info.craft.stAtt.fYaw;
Mat M_yaw = (Mat_<double>(3, 3) << cosd(yaw), -sind(yaw),0,
sind(yaw), cosd(yaw),0,
0,0,1
);
float pit = info.craft.stAtt.fPitch;
Mat M_pitch = (Mat_<double>(3, 3) << 1,0,0,
0,cosd(pit),-sind(pit),
0, sind(pit), cosd(pit)
);
/* 1 0 0
0 cos sin
0 -sin cos
*/
float roll = info.craft.stAtt.fRoll;
Mat M_roll = (Mat_<double>(3, 3) << cosd(roll),0, sind(roll),
0,1,0,
-sind(roll),0,cosd(roll)
);
float beta = info.servoInfo.fServoAz;
Mat M_beta = (Mat_<double>(3, 3) << cosd(beta), -sind(beta),0,
sind(beta),cosd(beta),0,
0,0,1
);
float alaph = info.servoInfo.fServoPt;
Mat M_alaph = (Mat_<double>(3, 3) << 1,0,0,
0, cosd(alaph),-sind(alaph),
0,sind(alaph),cosd(alaph)
);
// 内参
FLOAT32 fd = info.camInfo.nFocus /info.camInfo.fPixelSize * 1000;
Mat M_cam = (Mat_<double>(3, 3) << fd,0,info.nWidth/2,
0,-fd,info.nHeight/2,
0,0,1
);
Mat M = M_cam*M_alaph*M_beta*M_roll*M_pitch*M_yaw * M_het;
//cout << M_cam * M_alaph * M_beta * M_roll * M_pitch * M_yaw * M_het;
return M;
}
void VideoStitch::AnlayseTform(FrameInfo info)
{
// 从像方->地理
m_Proj.tf_p2g.R = Mat_TransENG2uv(info).inv();
m_Proj.tf_p2g.T = Mat_TransENGMove(info);
// 从地理->像方
m_Proj.tf_g2p.R = Mat_TransENG2uv(info);
m_Proj.tf_g2p.T = -m_Proj.tf_p2g.T;
}