整理结构

main
wangchongwu 5 months ago
parent 632dd9026b
commit cdc98d9952

@ -5,9 +5,10 @@ SET(ArithStitchDir stitch)
IF(WIN32)
set(OpenCV_DIR "C:/opencv/build/x64/vc14/lib")
set(CMAKE_TOOLCHAIN_FILE "C:/Users/75929/vcpkg/scripts/buildsystems/vcpkg.cmake")
ELSE(WIN32)
set(OpenCV_DIR "/home/wcw/opencv-3.4.16/install/share/OpenCV")
ENDIF(WIN32)
ENDIF(WIN32)
find_package(OpenCV REQUIRED)

@ -1,6 +1,7 @@
#include <iostream>
#include "API_VideoStitch.h"
#include "S729.h"
#include "PlatformDefine.h"
#include <string.h>
#include "opencv2/opencv.hpp"
using namespace std;
@ -87,7 +88,11 @@ int main(int, char**)
auto stitcher = API_VideoStitch::Create(IMAGE_WIDTH_IR,IMAGE_HEIGHT_IR);
//stitcher->Test();
GD_VIDEO_FRAME_S frame = {0};//输入帧
GD_VIDEO_FRAME_S pan = { 0 };//输出全景
cv::Mat mat_pan;//全景显示
//return 0;
FILE* file = fopen("../20241219152917_4.xraw","rb");
@ -96,7 +101,7 @@ int main(int, char**)
fread(pFrameIR,2,IMAGE_WIDTH_IR*(IMAGE_HEIGHT_IR + PARA_IR_LINE),file);
if(i % 3 != 0)
if(i % 2 != 0)
{
continue;
}
@ -127,35 +132,43 @@ int main(int, char**)
Map16BitTo8Bit(pFrameIR,IMAGE_WIDTH_IR*IMAGE_HEIGHT_IR,pImageIR);
cv::Mat src(IMAGE_HEIGHT_IR,IMAGE_WIDTH_IR,CV_8UC1,pImageIR);
cv::Mat mat_src(IMAGE_HEIGHT_IR, IMAGE_WIDTH_IR, CV_8UC1, pImageIR);
frame.enPixelFormat = GD_PIXEL_FORMAT_GRAY_Y8;
frame.u32Width = IMAGE_WIDTH_IR;
frame.u32Height = IMAGE_HEIGHT_IR;
frame.u64VirAddr[0] = pImageIR;
if(i == 0)
{
stitcher->Init(info);
pan = stitcher->ExportPanAddr();
mat_pan = cv::Mat(pan.u32Height, pan.u32Width, CV_8UC1, pan.u64VirAddr[0]);
}
else
{
// info.craft.stAtt.fYaw = 0;
// info.craft.stAtt.fPitch = 0;
// info.craft.stAtt.fRoll = 0;
// info.servoInfo.fServoAz = 0;
//info.servoInfo.fServoPt = 0;
//cout << info.craft.stPos.B << " " <<info.craft.stPos.L << " " << info.craft.stPos.H << " "
//<< info.craft.stAtt.fYaw << " " << info.craft.stAtt.fPitch << " " << info.craft.stAtt.fRoll<< " "
//<< info.servoInfo.fServoAz << " " << info.servoInfo.fServoPt
//<< endl;
cout << info.craft.stPos.B << " " <<info.craft.stPos.L << " " << info.craft.stPos.H << " "
<< info.craft.stAtt.fYaw << " " << info.craft.stAtt.fPitch << " " << info.craft.stAtt.fRoll<< " "
<< info.servoInfo.fServoAz << " " << info.servoInfo.fServoPt
<< endl;
stitcher->Updata(src,info);
stitcher->Updata(frame,info);
}
imshow("src", src);
}
//imshow("src", mat_src);
imshow("pan", mat_pan);
waitKey(1);
}
waitKey(0);
}

@ -1,274 +0,0 @@
/*
* @Author: turboLIU
* @Date: 2022-11-02 15:04:08
* @LastEditTime: 2024-10-25 09:17:48
* @Description: Do not edit
* @FilePath: /Algo_ACL_Common/API/AI_API.h
*/
#ifndef AI_API
#define AI_API
#include <string>
#include "detection_type_api.h"
using namespace AIGO;
#ifdef __cplusplus
extern "C"{
#endif
/**
* @brief NPU
*
* @return int 0
*/
int Init_ACL();
/**
* @brief Set the deque size object
*
* @param irdetin
* @param visdetin
* @param irsegin
* @param vissegin
* @param irdetout
* @param visdetout
* @param irsegout
* @param vissegout
* @param irdetres
* @param visdetres
* @param irsegres
* @param vissegres
* @return int
*/
int set_deque_size(int irdetin=3, int visdetin=3, int irsegin=3, int vissegin=3, int irdetout=5, int visdetout = 5, int irsegout = 5, int vissegout = 5,
int irdetres = 10, int visdetres = 10, int irsegres = 10, int vissegres = 10);
int get_deque_size(int &irdetin, int &visdetin, int &irsegin, int &vissegin, int &irdetout, int &visdetout, int &irsegout, int &vissegout,
int &irdetres, int &visdetres, int &irsegres, int &vissegres);
int reset_dequeu_size(int irdetin=30, int visdetin=30, int irsegin=30, int vissegin=30, int irdetout=50, int visdetout = 50, int irsegout = 50, int vissegout = 50,
int irdetres = 100, int visdetres = 100, int irsegres = 100, int vissegres = 100);
/**
* @brief Set the stream size object
* init 1280 1024
*
* @param width 1280
* @param height 1024
* @return int
*/
int set_stream_size(int width, int height, MODE mode);
int get_stream_size(int &width, int &height, MODE mode);
int reset_stream_size(int width, int height, MODE mode);
/**
* @brief
*
* @param irdetmodel
* @param visdetmodel
* @param irsegmodel
* @param vissegmodel
* @return int
*/
int Init_AI(const char* irdetmodel, const char* visdetmodel, const char* irsegmodel, const char* vissegmodel);
int Det_VL(ImgMat frame, std::vector<objinfo> &outputs);
int Det_IR(ImgMat frame, std::vector<objinfo> &outputs);
int Seg_VL(ImgMat frame, TenMat* output);
int Seg_IR(ImgMat frame, TenMat* output);
/**
* @brief get_result
*
* @param img
* @param GoDet
* @param GoSeg
* @param cropFlag set_ROIArea
* @return int
*/
int runAsync(ImgMat img, bool GoDet=true, bool GoSeg=true, bool cropFlag=false);
/**
* @brief Get the result object
*
* @param timestamp
* @param dets
* @param maskImg
* @param front: true: false:
* @param single true false
* @return int -1: 1: ; 0:
*/
int get_result(long long &timestamp, std::vector<objinfo> &dets, TenMat * maskImg, bool front=true, MODE mode=VIS); // async
/**
* @brief
*
* @return int
*/
int switch_vis_to_ir();
/**
* @brief
*
* @return int
*/
int switch_ir_to_vis();
/**
* @brief Set the dropout param
*
* @param drop true: drop deque front data, false: not drop data, : false
* @return int
*/
int set_dropout_param(bool drop);
/**
* @brief Get the dropout param
*
* @param drop
* @return int
*/
int get_dropout_param(bool &drop);
/**
* @brief
*
* @param mode IR or VIS
* @param road segment road or not
* @param bridge segment bridge or not
* @param water segment water or not
* @param building segment building or not
* @return int
*/
int set_seg_classes(MODE mode, bool road, bool bridge, bool water, bool building);
/**
* @brief Set the nms threshold object
*
* @param nms
* @return int
*/
int set_nms_threshold(float nms);
/**
* @brief Get the nms threshold object
*
* @param nms
* @return int
*/
int get_nms_threshold(float& nms);
/**
* @brief Set the det threshold object
*
* @param threshold
* @param idx 115CLASSTYPE
* @return int
*/
int set_det_threshold(float threshold, int idx);
/**
* @brief Get the det threshold object
*
* @param threshold
* @param idx 115CLASSTYPE
* @return int
*/
int get_det_threshold(float &threshold, int idx);
/**
* @brief Set the seg threshold object
*
* @param thr
* @return int
*/
int set_seg_threshold(float thr);
/**
* @brief Get the seg threshold object
*
* @return float
*/
float get_seg_threshold();
/**
* @brief : BGRRGBNV12 NV21 RGB
*
* @param m
* @return int
*/
int set_StreamMode(INPUT_LAYOUT m);
/**
* @brief Get the StreamMode object
*
* @return INPUT_LAYOUT
*/
INPUT_LAYOUT get_StreamMode();
/**
* @brief
*
* @return int
*/
int Uninit_AI();
/**
* @brief only deinit hardware
*
* @return int
*/
int Uninit_ACL();
/**
* @brief Set the ROIArea object
*
* @param x1 lefttop
* @param y1 lefttop
* @param x2 botright
* @param y2 botright
* @return int
*/
int set_ROIArea(int x1, int y1, int x2, int y2);
int setLabelMap(uint32_t A[], uint32_t B[], uint32_t len);
class DETECTION_API
{
private:
/* data */
public:
DETECTION_API(/* args */);
~DETECTION_API();
public:
int netWidth=0;
int netHeight = 0;
int clsnum = 0;
std::vector<std::vector<float>> anchors;// = {{7,11, 9,21, 21,15},{16,39, 31,24, 57,27},{43,59, 90,59, 131,116}};
void* AIGOhandle=NULL;
int AIGOinit(const char* modename, int deviceID=0);
int AIGOpreprocess(ImgMat img, bool doCopy=true);
int AIGOinfer();
int AIGOpostprocess(std::vector<objinfo> &results);
int AIGOdeinit();
};
class SEGMENTION_API
{
private:
/* data */
public:
SEGMENTION_API(/* args */);
~SEGMENTION_API();
int netWidth = 0;
int netHeight = 0;
int clsnum = 0;
void* AIGOhandle = NULL;
int AIGOinit(const char* modename, int deviceID=0);
int AIGOpreprocess(ImgMat img, bool doCopy=true);
int AIGOinfer();
int AIGOpostprocess();
int AIGOdeinit();
};
#ifdef __cplusplus
}
#endif
#endif

@ -1,131 +0,0 @@
/*
* @Author: turboLIU
* @Date: 2022-07-27 14:12:19
* @LastEditTime: 2024-09-27 09:18:16
* @Description: Do not edit
* @FilePath: /C/include/detection_type_api.h
*/
#ifndef DETECTION_TYPE_API
#define DETECTION_TYPE_API
#define NMS_UNION 1
#define NMS_MIN 2
#include <string>
#include <vector>
#include "PlatformDefine.h"
namespace AIGO{
typedef enum{
IR = 1,
VIS = 2
}MODE;
typedef enum{
CHW = 1,
HWC = 2,
WHC = 3,
}DATA_LAYOUT;
typedef enum{
RGB = 1,
BGR = 2,
RGBA = 3,
BGRA = 4,
NV12 = 5, // uv
NV21 = 6, // vu
}INPUT_LAYOUT;
typedef enum{
AI_DT_UNDEFINED = -1, //未知数据类型,默认值。
AI_FLOAT = 0,
AI_FLOAT16 = 1,
AI_INT8 = 2,
AI_INT32 = 3,
AI_UINT8 = 4,
AI_INT16 = 6,
AI_UINT16 = 7,
AI_UINT32 = 8,
AI_INT64 = 9,
AI_UINT64 = 10,
AI_DOUBLE = 11,
AI_BOOL = 12,
AI_STRING = 13,
AI_COMPLEX64 = 16,
AI_COMPLEX128 = 17,
AI_BF16 = 27
}DATATYPE;
typedef struct tag_ID_idx
{
/* data */
int x_loc;
int y_loc;
int z_loc;
float score;
}ID_idx;
typedef struct tag_IDX
{
/* data */
float x;
float y;
float z;
float score;
}IDX;
typedef struct tag_Objinfo
{
/* data */
float x1;
float y1;
float x2;
float y2;
float score;
int classNum;
int ID;
int trackID;
float area;
}objinfo;
typedef struct tag_ImgMat
{
/* data */
unsigned char* data=NULL;
int width=0;
int height=0;
int depth=0;
DATA_LAYOUT layout=HWC;
INPUT_LAYOUT inlayout = RGB;
int frameID=0;
MODE mode = IR;
// bool relayout=false;
long long timestamp = 0;
long long cfg = 0; // address of GD_VIDEO_FRAME_S structure
}ImgMat;
typedef struct tag_TenMat
{
/* data */
void * data = nullptr;
int width=0;
int height=0;
int depth=0;
size_t dataByte=0;
DATATYPE type;
DATA_LAYOUT layout;
int elementNum = 0;
long long timestamp = 0;
}TenMat;
}
#endif

@ -12,6 +12,11 @@ endif()
find_package(OpenMP REQUIRED)
find_package(Ceres REQUIRED)
include_directories(${CERES_INCLUDE_DIRS})
#
SET(ArithTrkPubInc ${CMAKE_SOURCE_DIR}/public_include)
@ -29,22 +34,21 @@ set(ArithSrcDIR_MAIN "src") # 库源文件路径
# 使Common
file(GLOB libsrcs ${ArithSrcDIR_MAIN}/*.cpp ${ArithSrcDIR_MAIN}/*.c ${ArithSrcDIR_MAIN}/*.h ${ArithSrcDIR_MAIN}/*.hpp)
file(GLOB CommonSrc ${ArithSrcDIR_MAIN}/utils/*.cpp ${ArithSrcDIR_MAIN}/utils/*.c ${ArithSrcDIR_MAIN}/utils/*.h ${ArithSrcDIR_MAIN}/utils/*.hpp)
file(GLOB utmsrc ${ArithSrcDIR_MAIN}/utils/*.cpp ${ArithSrcDIR_MAIN}/utils/*.c ${ArithSrcDIR_MAIN}/utils/*.h ${ArithSrcDIR_MAIN}/utils/*.hpp)
add_library(${LIB_STITCH} SHARED ${libsrcs} ${CommonSrc} ${utmsrc}) #
add_library(${LIB_STITCH} SHARED ${libsrcs} ${CommonSrc}) #
#
target_include_directories(${LIB_STITCH} PUBLIC
${ArithTrkPubInc}
${ArithSrcDIR_MAIN}
${ArithSrcDIR_MAIN}/utils
${ArithSrcDIR_MAIN}/utm
)
#
target_link_libraries(${LIB_STITCH}
OpenMP::OpenMP_CXX
${OpenCV_LIBS})
${OpenCV_LIBS}
${CERES_LIBRARIES})
# # gcc0
if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")

@ -17,6 +17,7 @@
#include "Arith_CommonDef.h"
#include "opencv2/opencv.hpp"
#include "PlatformDefine.h"
// 帧内外方位元素
struct FrameInfo
{
@ -28,17 +29,31 @@ struct FrameInfo
int nHeight;
};
// 全景图配置
struct PanInfo
{
int m_pan_width;
int m_pan_height;
float scale;// 比例尺
float map_shiftX;// 平移X
float map_shiftY;// 平移Y
};
class STD_STITCH_API API_VideoStitch
{
public:
virtual ~API_VideoStitch() = default;
virtual BBOOL Init(FrameInfo info) = 0;
virtual BYTE8 Updata(cv::Mat img, FrameInfo para) = 0;
// 初始化拼接
virtual PanInfo Init(FrameInfo info) = 0;
// 输入帧
virtual BYTE8 Updata(GD_VIDEO_FRAME_S img, FrameInfo para) = 0;
// 获取全景图
virtual GD_VIDEO_FRAME_S ExportPanAddr() = 0;
virtual void Test() = 0;
public:
static API_VideoStitch* Create(SINT32 nWidth, SINT32 nHeight);
static void Destroy(API_VideoStitch* obj);

@ -0,0 +1,216 @@
#include "Arith_ExtrinsicStitcher.h"
#include "Arith_VideoStitch.h"
#include "Arith_Utils.h"
#include "Arith_CoordModule.h"
#include "Arith_SysStruct.h"
using namespace cv;
ExtrinsicStitcher::ExtrinsicStitcher()
{
}
ExtrinsicStitcher::~ExtrinsicStitcher()
{
}
PanInfo ExtrinsicStitcher::InitMap(FrameInfo info)
{
// 设置拼接原点
SetOriginPoint(info);
// 转换关系计算
Proj t_Proj = AnlayseTform(info);
// 帧中心的大地坐标,将其平移到全景图中心
cv::Point2f ct_geo = Trans_uv2Geo(cv::Point2f(info.nWidth / 2, info.nHeight / 2), t_Proj.tf_p2g);
// 全景图初始化
PanInfo panPara = {0};
panPara.m_pan_width = 1000;//全景宽
panPara.m_pan_height = 1000;//全景高
panPara.scale = 0.5;//比例尺,1m = ?pix
// 直接无平移解算
auto cur = Trans_Geo2pan(ct_geo, panPara);
// 计算平移到全景图固定点的平移量,从此处开始拼接
int planX = 200;
int planY = 100;
panPara.map_shiftX = planX - (cur.x);//平移X
panPara.map_shiftY = planY - (cur.y);//平移Y
return panPara;
}
cv::Point2f ExtrinsicStitcher::project(cv::Point2f pos_pan, Proj m_Proj, PanInfo pan)
{
cv::Point2f pos_geo = Trans_pan2Geo(pos_pan, pan);
cv::Point2f pos_frame = Trans_Geo2uv(pos_geo, m_Proj.tf_g2p);
return pos_frame;
}
cv::Point2f ExtrinsicStitcher::back_project(cv::Point2f pos_frame, Proj m_Proj, PanInfo pan)
{
cv::Point2f pos_geo = Trans_uv2Geo(pos_frame, m_Proj.tf_p2g);
cv::Point2f pos_pan = Trans_Geo2pan(pos_geo, pan);
return pos_pan;
}
void ExtrinsicStitcher::SetOriginPoint(FrameInfo info)
{
originPoint = info;
}
cv::Point2f ExtrinsicStitcher::Trans_uv2Geo(cv::Point2f pos_frame, TForm form)
{
Mat point = (Mat_<double>(3, 1) << pos_frame.x, pos_frame.y, 1);
Mat result = form.R * point;
// 转局部地理系
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
// 平移到原点地理系
warpedX += form.T.at<double>(0, 0);
warpedY += form.T.at<double>(1, 0);
return cv::Point2f(warpedX, warpedY);
}
cv::Point2f ExtrinsicStitcher::Trans_Geo2uv(cv::Point2f pos_geo, TForm form_inv)
{
// 先平移到当前相机位置
cv::Point2f pos_cam = pos_geo;
pos_cam.x = pos_geo.x + form_inv.T.at<double>(0, 0);
pos_cam.y = pos_geo.y + form_inv.T.at<double>(1, 0);
Mat point = (Mat_<double>(3, 1) << pos_cam.x, pos_cam.y, 1);
Mat result = form_inv.R * point;
// 转像方
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
return cv::Point2f(warpedX, warpedY);
}
cv::Point2f ExtrinsicStitcher::Trans_pan2Geo(cv::Point2f pos_pan, PanInfo panPara)
{
double x = (pos_pan.x - panPara.map_shiftX) / panPara.scale;
double y = (panPara.m_pan_height - (pos_pan.y - panPara.map_shiftY)) / panPara.scale;
return cv::Point2f(x, y);
}
cv::Point2f ExtrinsicStitcher::Trans_Geo2pan(cv::Point2f pos_geo, PanInfo panPara)
{
double pan_x = pos_geo.x * panPara.scale + panPara.map_shiftX;
double pan_y = (panPara.m_pan_height - pos_geo.y * panPara.scale) + panPara.map_shiftY;
return cv::Point2f(pan_x, pan_y);
}
Mat ExtrinsicStitcher::Mat_TransENGMove(FrameInfo info)
{
PointXYZ ptCurr = getXYZFromBLH(info.craft.stPos);
PointXYZ ptOrig = getXYZFromBLH(originPoint.craft.stPos);
PointXYZ diff = getNUEXYZFromCGCSXYZ(ptCurr, ptOrig);
Mat move = Mat::zeros(3, 1, CV_64F);
move.at<double>(0, 0) = diff.Z;
move.at<double>(1, 0) = diff.X;
move.at<double>(2, 0) = diff.Y;
return move;
}
Mat ExtrinsicStitcher::Mat_TransENG2uv(FrameInfo info)
{
//从地理坐标系转像素坐标
//[u,v,1]'=Z*M*[X,Y,DH]' = Z*M*[1,0,0;0,1,0;0,0,DH]'*[X,Y,1]'
//[u,v,1]'=Z*M(内参)*M(alaph)*M(beta)*M(roll)*M(pitch)*M(yaw)*[X,Y,DH]
// 深度矩阵
Mat M_het = (Mat_<double>(3, 3) << 1, 0, 0,
0, 1, 0,
0, 0, info.nEvHeight
);
float yaw = info.craft.stAtt.fYaw;
Mat M_yaw = (Mat_<double>(3, 3) << cosd(yaw), -sind(yaw), 0,
sind(yaw), cosd(yaw), 0,
0, 0, 1
);
float pit = info.craft.stAtt.fPitch;
Mat M_pitch = (Mat_<double>(3, 3) << 1, 0, 0,
0, cosd(pit), -sind(pit),
0, sind(pit), cosd(pit)
);
/* 1 0 0
0 cos sin
0 -sin cos
*/
float roll = info.craft.stAtt.fRoll;
Mat M_roll = (Mat_<double>(3, 3) << cosd(roll), 0, sind(roll),
0, 1, 0,
-sind(roll), 0, cosd(roll)
);
float beta = info.servoInfo.fServoAz;
Mat M_beta = (Mat_<double>(3, 3) << cosd(beta), -sind(beta), 0,
sind(beta), cosd(beta), 0,
0, 0, 1
);
float alaph = info.servoInfo.fServoPt;
Mat M_alaph = (Mat_<double>(3, 3) << 1, 0, 0,
0, cosd(alaph), -sind(alaph),
0, sind(alaph), cosd(alaph)
);
// 内参
FLOAT32 fd = info.camInfo.nFocus / info.camInfo.fPixelSize * 1000;
Mat M_cam = (Mat_<double>(3, 3) << fd, 0, info.nWidth / 2,
0, -fd, info.nHeight / 2,
0, 0, 1
);
Mat M = M_cam * M_alaph * M_beta * M_roll * M_pitch * M_yaw * M_het;
//cout << M_cam * M_alaph * M_beta * M_roll * M_pitch * M_yaw * M_het;
return M;
}
Proj ExtrinsicStitcher::AnlayseTform(FrameInfo info)
{
Proj projection;
// 从像方->地理
projection.tf_p2g.R = Mat_TransENG2uv(info).inv();
projection.tf_p2g.T = Mat_TransENGMove(info);
// 从地理->像方
projection.tf_g2p.R = Mat_TransENG2uv(info);
projection.tf_g2p.T = -projection.tf_p2g.T;
return projection;
}

@ -0,0 +1,79 @@
/*********版权所有C2025武汉高德红外股份有限公司***************************************
* Arith_ExtrinsicStitcher.h
*
* ,,
*
* V0.5
* 04046wcw
* 2025/01/15
*
*****************************************************************************************/
#pragma once
#include <opencv2/opencv.hpp>
#include "Arith_CommonDef.h"
#include "API_VideoStitch.h"
// 像方-物方转换关系
struct TForm
{
cv::Mat R;
cv::Mat T;
};
// 投影关系,描述反算过程
struct Proj
{
TForm tf_p2g;//从帧到地理坐标系的Rt矩阵
TForm tf_g2p;//从地理坐标系到帧的Rt矩阵
};
class ExtrinsicStitcher
{
public:
// 构造函数
ExtrinsicStitcher();
~ExtrinsicStitcher();
// 初始化地理参数和全景图参数
PanInfo InitMap(FrameInfo info);
// 投影:从全景图到帧
cv::Point2f project(cv::Point2f pos_pan, Proj m_Proj, PanInfo pan);
// 反投影:从帧到全景图
cv::Point2f back_project(cv::Point2f pos_frame, Proj m_Proj, PanInfo pan);
// 计算当前帧像方-成图坐标系R t反投影关系
Proj AnlayseTform(FrameInfo info);
public:
// 帧像方-地理坐标
cv::Point2f Trans_uv2Geo(cv::Point2f pos_frame, TForm form);
// 地理坐标-帧像方
cv::Point2f Trans_Geo2uv(cv::Point2f pos_geo, TForm form);
// 从全景图->地理
cv::Point2f Trans_pan2Geo(cv::Point2f pos_pan, PanInfo panPara);
// 地理->全景图
cv::Point2f Trans_Geo2pan(cv::Point2f pos_geo, PanInfo panPara);
// 平移矩阵,以初始化点为基准,计算当前位置在初始点的地理坐标,那么当前帧所有点的坐标做此平移
cv::Mat Mat_TransENGMove(FrameInfo info);
// 机体ENG(东北地)到像方的 旋转矩阵
cv::Mat Mat_TransENG2uv(FrameInfo info);
private:
// 设置起始拼接点外参
void SetOriginPoint(FrameInfo info);
FrameInfo originPoint;//成图初始点,作为拼接参考
};

@ -22,80 +22,71 @@ void API_VideoStitch::Destroy(API_VideoStitch * obj)
VideoStitch::VideoStitch(SINT32 nWidth, SINT32 nHeight)
{
m_GeoStitcher = new ExtrinsicStitcher();
memset(&m_pan, 0, sizeof(GD_VIDEO_FRAME_S));
}
VideoStitch::~VideoStitch()
{
delete m_GeoStitcher;
}
#include "ceres/ceres.h"
using namespace ceres;
struct CostFunctor {
template <typename T>
bool operator()(const T* const x, T* residual) const {
residual[0] = 10.0 - x[0];
return true;
}
};
void VideoStitch::Test()
{
FrameInfo info = {0};
info.camInfo.nFocus = 200;
info.camInfo.fPixelSize = 25;
info.craft.stAtt.fYaw = 1;
info.craft.stAtt.fPitch = 2;
info.craft.stAtt.fRoll = 3;
info.nEvHeight = 1000;
google::InitGoogleLogging("ceres");
info.servoInfo.fServoAz = 4;
info.servoInfo.fServoPt = 5;
// The variable to solve for with its initial value. It will be
// mutated in place by the solver.
double x = 0.5;
const double initial_x = x;
info.nWidth = 1280;
info.nHeight = 1024;
// Build the problem.
ceres::Problem problem;
Mat R = Mat_TransENG2uv(info);
// Set up the only cost function (also known as residual). This uses
// auto-differentiation to obtain the derivative (jacobian).
ceres::CostFunction* cost_function =
new ceres::AutoDiffCostFunction<CostFunctor, 1, 1>(new CostFunctor);
problem.AddResidualBlock(cost_function, nullptr, &x);
Mat R_inv = R.inv();
//auto point = getWarpPoint(540,340, (double*)R_inv.data);
//std::cout << point.x << " " << point.y << std::endl;
// Run the solver!
ceres::Solver::Options options;
options.minimizer_progress_to_stdout = true;
ceres::Solver::Summary summary;
ceres::Solve(options, &problem, &summary);
std::cout << summary.BriefReport() << "\n";
std::cout << "x : " << initial_x << " -> " << x << "\n";
}
BBOOL VideoStitch::Init(FrameInfo info)
PanInfo VideoStitch::Init(FrameInfo info)
{
// 原点信息
originPoint = info;
// 转换关系计算
AnlayseTform(info);
// 帧中心的大地坐标,将其平移到全景图中心
cv::Point2f ct_geo = Trans_uv2Geo(cv::Point2f(info.nWidth / 2, info.nHeight / 2), m_Proj.tf_p2g);
// 全景图初始化
m_Proj.panPara.m_pan_width = 1000;//全景宽
m_Proj.panPara.m_pan_height = 1000;//全景高
m_Proj.panPara.scale = 0.3;//比例尺,1m = ?pix
m_panPara = m_GeoStitcher->InitMap(info);
// 直接无平移解算
auto cur = Trans_Geo2pan(ct_geo, m_Proj.panPara);
m_pan = cv::Mat::zeros(m_panPara.m_pan_height, m_panPara.m_pan_width, CV_8UC1);
m_Proj.panPara.map_shiftX = m_Proj.panPara.m_pan_width / 2 - (cur.x);//平移X
m_Proj.panPara.map_shiftY = m_Proj.panPara.m_pan_height / 2 - (cur.y);//平移Y
// 重新计算,验证已平移到中心
//auto cur2 = Trans_Geo2pan(ct_geo, m_Proj.panPara);
m_pan = Mat::zeros(m_Proj.panPara.m_pan_height, m_Proj.panPara.m_pan_width,CV_8UC1);
return true;
return m_panPara;
}
BYTE8 VideoStitch::Updata(Mat img, FrameInfo para)
BYTE8 VideoStitch::Updata(GD_VIDEO_FRAME_S img, FrameInfo para)
{
AnlayseTform(para);
Proj t_Proj = m_GeoStitcher->AnlayseTform(para);
// 计算帧的map四至
cv::Point2f leftTop_map = back_project(cv::Point2f(0,0), m_Proj);
cv::Point2f rightTop_map = back_project(cv::Point2f(img.cols,0), m_Proj);
cv::Point2f rightBottom_map = back_project(cv::Point2f(img.cols,img.rows), m_Proj);
cv::Point2f leftBottom_map = back_project(cv::Point2f(0,img.rows), m_Proj);
cv::Point2f leftTop_map = m_GeoStitcher->back_project(cv::Point2f(0,0), t_Proj, m_panPara);
cv::Point2f rightTop_map = m_GeoStitcher->back_project(cv::Point2f(img.u32Width,0), t_Proj, m_panPara);
cv::Point2f rightBottom_map = m_GeoStitcher->back_project(cv::Point2f(img.u32Width,img.u32Height), t_Proj, m_panPara);
cv::Point2f leftBottom_map = m_GeoStitcher->back_project(cv::Point2f(0,img.u32Height), t_Proj, m_panPara);
// 计算全景图的范围
@ -109,194 +100,41 @@ BYTE8 VideoStitch::Updata(Mat img, FrameInfo para)
int yRnage = bottom - top;
//反映射到像素坐标
int valid_top = std::max(0, top);
int valid_bottom = std::min(m_pan.cols, bottom);
int valid_left = std::max(0, left);
int valid_right = std::min(m_pan.rows, right);
for (int i = top; i < bottom; i++)
#pragma omp parallel for
for (int i = valid_top; i < valid_bottom; i++)
{
for (int j = left; j < right; j++)
for (int j = valid_left; j < valid_right; j++)
{
if (i < 0 || j < 0 || i >= m_pan.rows || j >= m_pan.cols)
continue;
//转换为pixel坐标
cv::Point2f p_img = project(Point2f(j, i), m_Proj);
cv::Point2f p_img = m_GeoStitcher->project(Point2f(j, i), t_Proj, m_panPara);
if (p_img.x < 0 || p_img.y < 0 || p_img.x > img.cols || p_img.y > img.rows)
if (p_img.x >= 0 && p_img.y >= 0 && p_img.x < img.u32Width && p_img.y < img.u32Height)
{
continue;
m_pan.data[i * m_pan.rows + j] =
FourPointInterpolation(img.u64VirAddr[0], img.u32Width, img.u32Height, p_img.x, p_img.y);
}
//线性插值
m_pan.data[i* m_pan.cols + j] =
FourPointInterpolation(img.data, img.cols, img.rows, p_img.x, p_img.y);
}
}
cv::imshow("pan", m_pan);
cv::waitKey(1);
return BYTE8();
}
cv::Point2f VideoStitch::project(cv::Point2f pos_pan, Proj m_Proj)
{
cv::Point2f pos_geo = Trans_pan2Geo(pos_pan, m_Proj.panPara);
cv::Point2f pos_frame = Trans_Geo2uv(pos_geo, m_Proj.tf_g2p);
return pos_frame;
}
cv::Point2f VideoStitch::back_project(cv::Point2f pos_frame, Proj m_Proj)
{
cv::Point2f pos_geo = Trans_uv2Geo(pos_frame, m_Proj.tf_p2g);
cv::Point2f pos_pan = Trans_Geo2pan(pos_geo, m_Proj.panPara);
return pos_pan;
}
cv::Point2f VideoStitch::Trans_uv2Geo(cv::Point2f pos_frame, TForm form)
{
Mat point = (Mat_<double>(3, 1) << pos_frame.x, pos_frame.y, 1);
Mat result = form.R * point;
// 转局部地理系
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
// 平移到原点地理系
warpedX += form.T.at<double>(0, 0);
warpedY += form.T.at<double>(1, 0);
return cv::Point2f(warpedX, warpedY);
}
cv::Point2f VideoStitch::Trans_Geo2uv(cv::Point2f pos_geo, TForm form_inv)
{
// 先平移到当前相机位置
cv::Point2f pos_cam = pos_geo;
pos_cam.x = pos_geo.x + form_inv.T.at<double>(0, 0);
pos_cam.y = pos_geo.y + form_inv.T.at<double>(1, 0);
Mat point = (Mat_<double>(3, 1) << pos_cam.x, pos_cam.y, 1);
Mat result = form_inv.R * point;
// 转像方
double warpedX = result.at<double>(0, 0) / result.at<double>(2, 0);
double warpedY = result.at<double>(1, 0) / result.at<double>(2, 0);
return cv::Point2f(warpedX, warpedY);
}
cv::Point2f VideoStitch::Trans_pan2Geo(cv::Point2f pos_pan, PanInfo panPara)
{
double x = (pos_pan.x - panPara.map_shiftX)/ panPara.scale;
double y = (panPara.m_pan_height - (pos_pan.y - panPara.map_shiftY)) / panPara.scale;
return cv::Point2f(x, y);
return 0;
}
cv::Point2f VideoStitch::Trans_Geo2pan(cv::Point2f pos_geo, PanInfo panPara)
GD_VIDEO_FRAME_S VideoStitch::ExportPanAddr()
{
double pan_x = pos_geo.x * panPara.scale + panPara.map_shiftX;
double pan_y = (panPara.m_pan_height - pos_geo.y * panPara.scale) + panPara.map_shiftY;
return cv::Point2f(pan_x, pan_y);
}
GD_VIDEO_FRAME_S pan_out;
pan_out.enPixelFormat = GD_PIXEL_FORMAT_GRAY_Y8;
pan_out.u32Width = m_panPara.m_pan_width;
pan_out.u32Height = m_panPara.m_pan_height;
pan_out.u64VirAddr[0] = m_pan.data;
Mat VideoStitch::Mat_TransENGMove(FrameInfo info)
{
PointXYZ ptCurr = getXYZFromBLH(info.craft.stPos);
PointXYZ ptOrig = getXYZFromBLH(originPoint.craft.stPos);
PointXYZ diff = getNUEXYZFromCGCSXYZ(ptCurr, ptOrig);
Mat move = Mat::zeros(3, 1, CV_64F);
move.at<double>(0, 0) = diff.Z;
move.at<double>(1, 0) = diff.X;
move.at<double>(2, 0) = diff.Y;
return move;
return pan_out;
}
Mat VideoStitch::Mat_TransENG2uv(FrameInfo info)
{
//从地理坐标系转像素坐标
//[u,v,1]'=Z*M*[X,Y,DH]' = Z*M*[1,0,0;0,1,0;0,0,DH]'*[X,Y,1]'
//[u,v,1]'=Z*M(内参)*M(alaph)*M(beta)*M(roll)*M(pitch)*M(yaw)*[X,Y,DH]
// 深度矩阵
Mat M_het = (Mat_<double>(3, 3) << 1,0,0,
0,1,0,
0,0,info.nEvHeight
);
float yaw = info.craft.stAtt.fYaw;
Mat M_yaw = (Mat_<double>(3, 3) << cosd(yaw), -sind(yaw),0,
sind(yaw), cosd(yaw),0,
0,0,1
);
float pit = info.craft.stAtt.fPitch;
Mat M_pitch = (Mat_<double>(3, 3) << 1,0,0,
0,cosd(pit),-sind(pit),
0, sind(pit), cosd(pit)
);
/* 1 0 0
0 cos sin
0 -sin cos
*/
float roll = info.craft.stAtt.fRoll;
Mat M_roll = (Mat_<double>(3, 3) << cosd(roll),0, sind(roll),
0,1,0,
-sind(roll),0,cosd(roll)
);
float beta = info.servoInfo.fServoAz;
Mat M_beta = (Mat_<double>(3, 3) << cosd(beta), -sind(beta),0,
sind(beta),cosd(beta),0,
0,0,1
);
float alaph = info.servoInfo.fServoPt;
Mat M_alaph = (Mat_<double>(3, 3) << 1,0,0,
0, cosd(alaph),-sind(alaph),
0,sind(alaph),cosd(alaph)
);
// 内参
FLOAT32 fd = info.camInfo.nFocus /info.camInfo.fPixelSize * 1000;
Mat M_cam = (Mat_<double>(3, 3) << fd,0,info.nWidth/2,
0,-fd,info.nHeight/2,
0,0,1
);
Mat M = M_cam*M_alaph*M_beta*M_roll*M_pitch*M_yaw * M_het;
//cout << M_cam * M_alaph * M_beta * M_roll * M_pitch * M_yaw * M_het;
return M;
}
void VideoStitch::AnlayseTform(FrameInfo info)
{
// 从像方->地理
m_Proj.tf_p2g.R = Mat_TransENG2uv(info).inv();
m_Proj.tf_p2g.T = Mat_TransENGMove(info);
// 从地理->像方
m_Proj.tf_g2p.R = Mat_TransENG2uv(info);
m_Proj.tf_g2p.T = -m_Proj.tf_p2g.T;
}

@ -1,33 +1,8 @@
#include "API_VideoStitch.h"
#include "opencv2/opencv.hpp"
#include "Arith_ExtrinsicStitcher.h"
// 全景图配置
struct PanInfo
{
int m_pan_width;
int m_pan_height;
float scale;// 比例尺
float map_shiftX;// 平移X
float map_shiftY;// 平移Y
};
// 像方-物方转换关系
struct TForm
{
cv::Mat R;
cv::Mat T;
};
// 投影关系,描述反算过程
struct Proj
{
TForm tf_p2g;//从帧到地理坐标系的Rt矩阵
TForm tf_g2p;//从地理坐标系到帧的Rt矩阵
PanInfo panPara;//全景图配置
};
class VideoStitch:public API_VideoStitch
{
public:
@ -38,47 +13,18 @@ public:
public:
BBOOL Init(FrameInfo info);
BYTE8 Updata(cv::Mat img, FrameInfo para);
// 投影:从全景图到帧
cv::Point2f project(cv::Point2f pos_pan, Proj m_Proj);
PanInfo Init(FrameInfo info);
// 反投影:从帧到全景图
cv::Point2f back_project(cv::Point2f pos_frame, Proj m_Proj);
BYTE8 Updata(GD_VIDEO_FRAME_S img, FrameInfo para);
GD_VIDEO_FRAME_S ExportPanAddr();
private:
FrameInfo originPoint;//成图初始点,作为拼接参考
// 帧像方-地理坐标
cv::Point2f Trans_uv2Geo(cv::Point2f pos_frame, TForm form);
// 地理坐标-帧像方
cv::Point2f Trans_Geo2uv(cv::Point2f pos_geo, TForm form);
// 从全景图->地理
cv::Point2f Trans_pan2Geo(cv::Point2f pos_pan, PanInfo panPara);
// 地理->全景图
cv::Point2f Trans_Geo2pan(cv::Point2f pos_geo, PanInfo panPara);
// 平移矩阵,以初始化点为基准,计算当前位置在初始点的地理坐标,那么当前帧所有点的坐标做此平移
cv::Mat Mat_TransENGMove(FrameInfo info);
// 机体ENG(东北地)到像方的 旋转矩阵
cv::Mat Mat_TransENG2uv(FrameInfo info);
// 计算当前帧像方-成图坐标系R t反投影关系
void AnlayseTform(FrameInfo info);
private:
Proj m_Proj;// 当前帧投影矩阵集合
cv::Mat m_pan;//全景图
ExtrinsicStitcher* m_GeoStitcher;//外参计算
PanInfo m_panPara;//全景图配置
cv::Mat m_pan;
};

@ -320,6 +320,7 @@ unsigned char FourPointInterpolation(unsigned char* pSrcImage, int iWidth, int i
j1 = fmax((int)y, 0);
j2 = fmin(j1 + 1, iHeight - 1);
float t, s; //t = x - [x], s = y- [y];
t = x - i1;

Loading…
Cancel
Save