You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

735 lines
20 KiB

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

#include "decodedata.h"
#include <windows.h>
#include <functional>
#include <iostream>
#include "opencv2/opencv.hpp"
using namespace cv;
DecodeData::DecodeData()
{
}
size_t DecodeData::readFileChunk(const std::string &filename, size_t chunkSize, std::streamoff &startPos, char *buffer)
{
if (!buffer) {
std::cerr << "缓冲区指针不能为空" << std::endl;
return 0;
}
// 以二进制模式打开文件
std::ifstream file(filename, std::ios::in | std::ios::binary);
if (!file.is_open()) {
std::cerr << "无法打开文件: " << filename << std::endl;
return 0;
}
// 移动到指定位置
file.seekg(startPos, std::ios::beg);
// 检查是否已到文件末尾
if (file.eof()) {
return 0;
}
// 读取数据到缓冲区
file.read(buffer, chunkSize);
// 获取实际读取的字节数
std::streamsize bytesRead = file.gcount();
// 更新下一次读取的位置
startPos += bytesRead;
return static_cast<size_t>(bytesRead);
}
void DecodeData::processFileInChunks(const std::string& filename, size_t chunkSize) {
std::streamoff currentPos = 0;
size_t chunkNumber = 0;
// 分配缓冲区
char* buffer = new char[chunkSize];
if (!buffer) {
std::cerr << "内存分配失败" << std::endl;
return;
}
while (true) {
// 读取一个包
size_t bytesRead = readFileChunk(filename, chunkSize, currentPos, buffer);
// 如果没有读取到数据,退出循环
if (bytesRead == 0) {
break;
}
// 处理当前包的数据(这里只是简单打印信息)
chunkNumber++;
//std::cout << "处理第 " << chunkNumber << " 个包,大小: " << bytesRead << " 字节" << std::endl;
DealWithUDPData(buffer, bytesRead);
std::this_thread::sleep_for(std::chrono::milliseconds(20)); //每20ms读取10KB数据模拟平均码率
}
std::cout << "文件处理完成,共处理 " << chunkNumber << " 个包" << std::endl;
// 释放缓冲区内存
delete[] buffer;
buffer = nullptr;
}
void DecodeData::DealWithUDPData(char *Buffer, int Length)
{
for (int i = 0; i < Length; i++)
{
BYTE ch = (uchar)Buffer[i];
if(m_iUDPPoint > 4096)
{
m_iUDPPoint = 0;
}
*(m_cUDPBuffer + m_iUDPPoint++) = ch;
//协议
if (m_iUDPState == 0 && ch == 0xEB)//0xEB
{
m_iUDPState = 1001;
}
else if (m_iUDPState == 1001 && ch == 0x90)//0x90
{
m_iUDPState = 1002;
}
else if (m_iUDPState == 1002)
{
//解析协议
if (m_iUDPPoint >= 512)
{
m_iImgPacketLen = (((m_cUDPBuffer[13])<<8) | m_cUDPBuffer[12]);
m_iImgPacketLenTest = ((m_cUDPBuffer[7]<<24) |(m_cUDPBuffer[6]<<16) |(m_cUDPBuffer[5]<<8) | m_cUDPBuffer[4]);
UdpParam up;
up.nFrameNum = m_iImgPacketLenTest;
up.nDataSize = m_iImgPacketLen;
up.nPackNum = (((m_cUDPBuffer[9])<<8) | m_cUDPBuffer[8]);
up.nPackIndex = (((m_cUDPBuffer[11])<<8) | m_cUDPBuffer[10]);
if(up.nPackIndex == 0)
{
m_lIndex = 0;
}
else
{
++m_lIndex;
if(m_lIndex != up.nPackIndex)
{
m_lLostNum++;
//qDebug() << "LostPacket,FrameNo:" << m_iImgPacketLenTest << "TotalPacket:"<<up.nPackNum << "PacketRecvCount:"<< m_lIndex << "PacketNo:"<<up.nPackIndex;
m_lIndex = up.nPackIndex;
}
}
// qDebug() << "当前数据包长度: " << m_iImgPacketLen << "总包数:" << up.nPackNum << "index:" << up.nPackIndex;
if((m_iImgPacketLen>0) && (m_iImgPacketLen<=(512-32)))
{
processData((char*)(m_cUDPBuffer+32), m_iImgPacketLen);//H264数据
// qDebug() << "===== " << m_iImgPacketLen << "---" << getStr((char*)m_cUDPBuffer+32,10);
}
m_iUDPState = m_iUDPPoint = 0;
}
}
else
{
m_iUDPState = m_iUDPPoint = 0;
}
}
}
void DecodeData::processData(char *cH264Data, int iLength)
{
FRAME_LIST_ELEMENT *pListEle = new FRAME_LIST_ELEMENT;
pListEle->FrameID=0;
pListEle->len = iLength;
if(iLength<=0) return;
pListEle->pFrameInfo = new uchar[iLength];
ZeroMemory(pListEle->pFrameInfo,iLength);
memcpy(pListEle->pFrameInfo, cH264Data, iLength);
m_UDPdataMutex.lock();
if(ImageCapQueue.size() >= UDP_BUFFER_LENGTHIMG)//队列满剔除队首
{
for(int m=0;m<UDP_BUFFER_LENGTHIMG;m++)
{
PFRAME_LIST_ELEMENT pTempListEle = (PFRAME_LIST_ELEMENT)ImageCapQueue.front();
ImageCapQueue.remove(pTempListEle);
delete (pTempListEle->pFrameInfo);
delete pTempListEle;
pTempListEle = NULL;
}
}
ImageCapQueue.push_back(pListEle);
m_UDPdataMutex.unlock();
}
void DecodeData::Init()
{
m_pBuffer = nullptr;
m_FrameNo = 0;
m_cUDPBuffer = new uchar[4096];
memset(m_cUDPBuffer,0,4096);
m_cDecodeBuffer = new uint8_t [UDPIMGWIDTH * UDPIMGHEIGHT * 4];
m_pBufferIR = new unsigned char[UDPIMGWIDTH * UDPIMGHEIGHT * 4];
InitDecode();
m_bDataThreadRunning = true;
m_ptrThread_DataDealwith = new std::thread(std::bind(&DecodeData::ThreadEntry, &DecodeData::ThreadFunData, (void*)this));
m_frontStitcher = API_FrontStitch::Create();
m_underStitcher = API_UnderStitch::Create("E:/google_tiles");
}
void DecodeData::UnInit()
{
if (m_pFmtOpts)
{
av_dict_free(&m_pFmtOpts);
m_pFmtOpts = NULL;
}
if (m_pCodecOpts)
{
av_dict_free(&m_pCodecOpts);
m_pCodecOpts = NULL;
}
if (m_pCodecCtx)
{
avcodec_close(m_pCodecCtx);
m_pCodecCtx = NULL;
}
if (m_pFmtCtx)
{
avformat_close_input(&m_pFmtCtx);
m_pFmtCtx = NULL;
}
if (m_pFrame)
{
av_frame_free(&m_pFrame);
m_pFrame = NULL;
}
if (m_pPacket)
{
av_packet_unref(m_pPacket);
m_pPacket = NULL;
}
if (m_pSwsCtx)
{
sws_freeContext(m_pSwsCtx);
m_pSwsCtx = NULL;
}
if (m_pBuffer)
{
av_free(m_pBuffer);
m_pBuffer = NULL;
}
if(m_cDecodeBuffer)
{
delete[]m_cDecodeBuffer;
}
if(m_pBufferIR)
{
delete[]m_pBufferIR;
}
}
int DecodeData::InitDecode()
{
int ret = 0;
m_pFmtOpts = NULL;
m_pCodecOpts = NULL;
m_pFmtCtx = NULL;
m_pCodecCtx = NULL;
m_pPacket = NULL;
m_pFrame = NULL;
m_pSwsCtx = NULL;
m_pVideoCodec = NULL;
if (!s_bFFMpegInit)
{
av_register_all(); //1.
avformat_network_init(); //2.
s_bFFMpegInit = true;
}
//指定解码器
m_pVideoCodec = avcodec_find_decoder(AV_CODEC_ID_H265);
if (!m_pVideoCodec)
{
printf("video decoder not found");
return ret;
}
m_pCodecCtx = avcodec_alloc_context3(m_pVideoCodec);
if (!m_pCodecCtx) {
return ret;
}
//设置加速解码
m_pCodecCtx->lowres = m_pVideoCodec->max_lowres;
m_pCodecCtx->flags2 |= AV_CODEC_FLAG2_FAST;
//打开视频解码器
ret = avcodec_open2(m_pCodecCtx, m_pVideoCodec, NULL);
if (ret < 0)
{
ret = -14;
printf("open video codec error");
return ret;
}
m_pPacket = av_packet_alloc();
m_pFrame = av_frame_alloc();
}
//void DecodeData::getPanoSourceData(unsigned char *dst, unsigned char *src)
//{
// for(int i=0;i<1024;++i)
// {
// memcpy(dst+1280*3*i,src+(28+i)*1920*3+320*3,1280*3);
// }
//}
void DecodeData::ThreadEntry(DecodeData::ThreadRunFunPtr pRunFun, void *pOwner)
{
DecodeData* pRunClass = reinterpret_cast<DecodeData*>(pOwner);
if (!pRunClass)
{
return;
}
(pRunClass->*pRunFun)();
}
void DecodeData::ThreadFunData()
{
while(m_bDataThreadRunning)
{
if(!ImageCapQueue.empty())
{
m_UDPdataMutex.lock();
PFRAME_LIST_ELEMENT pListEle = NULL;
pListEle = (ImageCapQueue.front());//获得数据指针
ImageCapQueue.remove(pListEle);
m_UDPdataMutex.unlock();
DecodeUDPData((uint8_t *)(pListEle->pFrameInfo), pListEle->len);
if((pListEle != NULL) && (pListEle->len>0))
{
delete (pListEle->pFrameInfo);
delete pListEle;
pListEle = NULL;
}
}
else
{
std::this_thread::sleep_for(std::chrono::milliseconds(5));
}
}
}
void DecodeData::DecodeUDPData(uint8_t *cData, long lLength)
{
for (int i = 0; i < lLength; ++i)
{
uint8_t ch = cData[i];
m_cDecodeBuffer[m_lDecodePoint++] = ch;
if(ch == 0x00 && m_nRes == 0 )
{
m_nRes = 1;
}
else if(m_nRes == 1 && ch == 0x00)
{
m_nRes = 2;
}
else if(m_nRes == 2 && ch == 0x00)
{
m_nRes = 3;
}
else if(m_nRes == 3 && ch == 0x01)
{
m_nRes = 4;
}
else if(m_nRes == 4)// && ((ch == 0x67) || (ch == 0x68) || (ch == 0x06) || (ch == 0x65) || (ch == 0x61)))
{
m_nRes = 5;
printf("aabbccdd:%d,\n",ch);
}
else if(m_nRes == 5)
{
if(m_lDecodePoint>=10)
{
/* if(m_cDecodeBuffer[m_lDecodePoint-5] == 0x00 && m_cDecodeBuffer[m_lDecodePoint-4] == 0x00 && m_cDecodeBuffer[m_lDecodePoint-3] == 0x00 && m_cDecodeBuffer[m_lDecodePoint-2] == 0x01 \
&& (m_cDecodeBuffer[m_lDecodePoint-1] == 0x67 || m_cDecodeBuffer[m_lDecodePoint-1] == 0x68 || \
m_cDecodeBuffer[m_lDecodePoint-1] == 0x06 || m_cDecodeBuffer[m_lDecodePoint-1] == 0x65 || \
m_cDecodeBuffer[m_lDecodePoint-1] == 0x61) )*/
if(m_cDecodeBuffer[m_lDecodePoint-5] == 0x00 && m_cDecodeBuffer[m_lDecodePoint-4] == 0x00 && m_cDecodeBuffer[m_lDecodePoint-3] == 0x00 && m_cDecodeBuffer[m_lDecodePoint-2] == 0x01)
{
DecodeH264Data(m_cDecodeBuffer,m_lDecodePoint-5);
memcpy(m_cDecodeBuffer,m_cDecodeBuffer+m_lDecodePoint-5,5);
memset(m_cDecodeBuffer+5,0,m_lDecodePoint-5);
m_lDecodePoint = 5;
}
}
}
else
{
m_nRes = 0;
m_lDecodePoint = 0;
}
}
}
void DecodeData::DecodeH264Data(uint8_t *h264Data, long dataLength)
{
m_pPacket->data = const_cast<uchar*>(h264Data);
m_pPacket->size = dataLength;
/*
* 读取SEI
*/
{
uint8_t* pData = m_pPacket->data;
uint8_t* pBuff = nullptr;
int i = 0, j = 0, SEI_Size = 0;
if((0x00 == pData[0]) && (0x00 == pData[1]) && (0x00 == pData[2]) && (0x01 == pData[3]) && (0x06 == pData[4]) )
{
/* 求出SEI参数帧size */
i = 6;
do
{
SEI_Size += pData[i];
}while(0xFF == pData[i++]);
if(SEI_Size>128)
{
if(SEI_Size > 2048 || SEI_Size < 1024)
SEI_Size = 2048;
pBuff = (uint8_t*)malloc(sizeof(uint8_t)*SEI_Size); //SEI数据buffer
/* i后面的就是SEI的数据位此时需要去壳(当数据连续为0x00 0x00时H264会在后面加上0x03这样就变成0x00 0x00 0x03此处数据为需要去掉0x03) */
for(j = 0; j < SEI_Size; i++)
{
/* 如果当前pData[i] == 0x03, 且前2个8bit均为0x00, 则当前pData[i]不赋值给pBuff */
if((0x03 == pData[i]) && (0x00 == pData[i-1]) && (0x00 == pData[i-2]) )
{
continue;
}
/* 没有 0x00 0x00 0x03, 则pData[i]赋值给pBuff[j]*/
pBuff[j] = pData[i];
j++;
}
memcpy(&SEI_Data, pBuff, sizeof(H264SEI_Para_732) );
// memcpy(ImageParaData, pBuff, sizeof(H264SEI_Para_732) );
free(pBuff);
pBuff = nullptr;
return;
}
else
{
}
}
}
// 解码H.264帧
int ret = avcodec_send_packet(m_pCodecCtx, m_pPacket);
if (ret != 0)
{
return;
}
while(ret == 0)
{
ret = avcodec_receive_frame(m_pCodecCtx, m_pFrame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break; // 需要更多数据或已到末尾,正常退出
}
else if (ret < 0) {
// av_log(NULL, AV_LOG_ERROR, "接收帧失败: %s\n", av_err2str(ret));
return;
}
if(m_nWidth!=m_pFrame->width || m_nHeight != m_pFrame->height || m_emPixFmtSrc != static_cast<AVPixelFormat>(m_pFrame->format) )
{
m_nWidth = m_pFrame->width;
m_nHeight = m_pFrame->height;
m_emPixFmtSrc = static_cast<AVPixelFormat>(m_pFrame->format);
m_emPixFmtDst = AV_PIX_FMT_RGB24;
m_pSwsCtx = sws_getContext(m_nWidth, m_nHeight, m_emPixFmtSrc, m_nWidth, m_nHeight, m_emPixFmtDst, SWS_BICUBIC, NULL, NULL, NULL);
if (m_pBuffer)
{
av_free(m_pBuffer);
m_pBuffer = NULL;
}
int nSize = avpicture_get_size(AV_PIX_FMT_RGB24, m_nWidth, m_nHeight);
m_pBuffer = (uint8_t *)av_malloc(nSize * sizeof(uint8_t));
if (m_emPixFmtDst == AV_PIX_FMT_YUV420P)
{
int y_size = m_nWidth * m_nHeight;
int nLen = y_size * 3 / 2;
m_pArrayData[0] = (uint8_t*)m_pBuffer;
m_pArrayData[1] = m_pArrayData[0] + y_size;
m_pArrayData[2] = m_pArrayData[1] + y_size / 4;
m_nArrayLinesize[0] = m_nWidth;
m_nArrayLinesize[1] = m_nArrayLinesize[2] = m_nWidth / 2;
}
else
{
m_emPixFmtDst = AV_PIX_FMT_RGB24;
m_pArrayData[0] = (uint8_t*)m_pBuffer;
m_nArrayLinesize[0] = m_nWidth * 3;
}
}
if (!m_pSwsCtx)
{
printf("sws_getContext failed\n");
return ;
}
if (m_pSwsCtx)
{
int h = sws_scale(m_pSwsCtx, m_pFrame->data, m_pFrame->linesize, 0, m_pFrame->height, m_pArrayData, m_nArrayLinesize);
if (h <= 0 || h != m_pFrame->height)
{
return;
}
}
DecodeRGBImageAndPara(m_pBuffer,SEI_Data,m_nWidth,m_nHeight);
}
}
int DecodeData::DecodeRGBImageAndPara(unsigned char *RGBBuffer, H264SEI_Para_732 m_ImageParaData,int nwidth,int nHeight)
{
//拼接参数
FrameInfo para;
para.nFrmID = m_FrameNo++;
para.craft.nPlaneID = 0;
para.craft.stPos.L = m_ImageParaData.PLon*(1e-7);
para.craft.stPos.B = m_ImageParaData.PLat*(1e-7);
para.craft.stPos.H = m_ImageParaData.PH*(1e-3);
para.craft.stAtt.fYaw = m_ImageParaData.P_Az*180.0/32767;
para.craft.stAtt.fPitch = m_ImageParaData.P_Pt*180.0/32767;
para.craft.stAtt.fRoll = m_ImageParaData.P_Roll*180.0/32767;
if( m_CurrentVideo == 0)
{
para.camInfo.nFocus = m_ImageParaData.IRFocalValue*0.1;
para.camInfo.fPixelSize = 12;
para.camInfo.unVideoType = GLB_VIDEO_IR_LW;
para.camInfo.dCamx = 0;
para.camInfo.dCamy = 0;
if(nwidth == 1920)
{
para.nWidth = 1280;
para.nHeight = 1024;
}
else //720P红外对应抠图缩小
{
para.camInfo.fPixelSize = 12*1.5;
para.nWidth = 852;
para.nHeight = 682;
}
// getPanoSourceData(m_pBufferIR,RGBBuffer);//红外扣取中心1280*1024 RGB数据
getPanoSourceData(m_pBufferIR,RGBBuffer,para.nWidth,para.nHeight);
}
else if( m_CurrentVideo == 1)
{
para.camInfo.nFocus = m_ImageParaData.VLFocalValue*0.1;
para.camInfo.fPixelSize = 3.45 * 1920/nwidth;
para.camInfo.unVideoType = GLB_VIDEO_VL;
para.camInfo.dCamx = 0;
para.camInfo.dCamy = 0;
para.nWidth = nwidth;
para.nHeight = nHeight;
}
para.camInfo.fAglReso = (para.camInfo.fPixelSize/para.camInfo.nFocus) * 57.3*0.001;
para.servoInfo.fServoAz = m_ImageParaData.Servo_Az*360.0/65535;
para.servoInfo.fServoPt = m_ImageParaData.Servo_Pt*360.0/65535;
para.servoInfo.fServoAzSpeed = m_ImageParaData.Servo_AzSpeed*360.0/4294967295;
para.servoInfo.fServoPtSpeed = m_ImageParaData.Servo_PtSpeed*360.0/4294967295;
para.nEvHeight = para.craft.stPos.H - m_LocalHeight;
std::cout << m_FrameNo <<nwidth <<"*"<< nHeight <<",F:"<<para.camInfo.nFocus<< ",video:" << m_CurrentVideo <<",Az:"<< para.servoInfo.fServoAz <<",Pt:"<< para.servoInfo.fServoPt << ",Lon:" << para.craft.stPos.L <<",Lat:"<< para.craft.stPos.B <<",H:"<< para.craft.stPos.H << std::endl;
// cv::Mat rgb_mat(para.nHeight,para.nWidth,CV_8UC3,(void*)RGBBuffer);
cv::Mat rgb_mat;
if(m_CurrentVideo == 0)
{
rgb_mat = cv::Mat(para.nHeight,para.nWidth,CV_8UC3,(void*)m_pBufferIR);
}
else
{
rgb_mat = cv::Mat(para.nHeight,para.nWidth,CV_8UC3,(void*)RGBBuffer);
}
cv::Mat bgr_mat;
cv::cvtColor(rgb_mat, bgr_mat, cv::COLOR_RGB2BGR);
cv::namedWindow("RGB Image", cv::WINDOW_NORMAL);
cv::imshow("RGB Image", bgr_mat);
cv::waitKey(2);
//// 前视拼接初始化
//if (m_FrameNo == 1)
//{
// ScanRange scanAz = { -30,30 };
// ScanRange scanPt = { -10,-10 };
// m_frontStitcher->Init(para, scanAz, scanPt);
//}
//else
//{
// // 基于外参的快拼
// GD_VIDEO_FRAME_S frame = { 0 };//输入帧
// frame.enPixelFormat = GD_PIXEL_FORMAT_RGB_PACKED;
// frame.u32Width = bgr_mat.cols;
// frame.u32Height = bgr_mat.rows;
// frame.u64VirAddr[0] = bgr_mat.data;
// GD_VIDEO_FRAME_S pan = { 0 };//输出全景
// //m_frontStitcher->Run(frame, para);
// AI_Target tt = { 0 };
// tt.score = 0.6;
// tt.x1 = 100;
// tt.x2 = 110;
// tt.y1 = 200;
// tt.y2 = 250;
// m_frontStitcher->Run(frame, para, &tt, 1);
// pan = m_frontStitcher->ExportPanAddr();
// cv::Mat mat_pan = cv::Mat(pan.u32Height, pan.u32Width, CV_8UC3, pan.u64VirAddr[0]);
// // 显示全景图
// cv::Mat res;
// cv::resize(mat_pan, res, cv::Size(pan.u32Width / 2, pan.u32Height / 2));
// imshow("pan", res);
// cv::waitKey(1);
//}
if (m_FrameNo == 1)
{
m_underStitcher->Init(para);
UPanConfig cfg = { 0 };
cfg.bOutGoogleTile = true;
m_underStitcher->SetConfig(cfg);
m_underStitcher->SetOutput("baotou", "E:/google_tiles");
}
else
{
// 基于外参的快拼
GD_VIDEO_FRAME_S frame = { 0 };//输入帧
frame.enPixelFormat = GD_PIXEL_FORMAT_RGB_PACKED;
frame.u32Width = bgr_mat.cols;
frame.u32Height = bgr_mat.rows;
frame.u64VirAddr[0] = bgr_mat.data;
GD_VIDEO_FRAME_S pan = { 0 };//输出全景
cv::TickMeter tm;
tm.start();
m_underStitcher->Run(frame, para);
tm.stop();
printf("time cost:%f ms\n", tm.getTimeMilli());
pan = m_underStitcher->ExportPanAddr();
cv::Mat mat_pan = cv::Mat(pan.u32Height, pan.u32Width, CV_8UC4, pan.u64VirAddr[0]);
// 显示全景图
cv::Mat res;
cv::resize(mat_pan, res, cv::Size(pan.u32Width / 8, pan.u32Height / 8));
imshow("pan", res);
cv::waitKey(1);
}
return 0;
}
void DecodeData::getPanoSourceData(uchar *dst, uchar *src, int w, int h)
{
if(w == 1280)
{
for(int i=0;i<1024;++i)
{
memcpy(dst+1280*3*i,src+(28+i)*1920*3+320*3,1280*3);
}
}
else if(w < 1280) //720P图像对应红外缩小1.5
{
for(int i=0;i<682;++i)
{
memcpy(dst+852*3*i,src+(19+i)*1280*3+214*3,852*3);
}
}
}