You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

466 lines
17 KiB

#ifndef _TS_PACKER_H_
#define _TS_PACKER_H_
#include <string>
#include <ctime>
#include <chrono>
#include <string>
#include <sstream>
#include <iomanip>
#include <iostream>
#include <thread>
#include <math.h>
#include <stdlib.h>
#include <atomic>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavdevice/avdevice.h"
#include "libavutil/mathematics.h"
#include "libavutil/avstring.h"
#include "libavutil/mem.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
}
#include <stdio.h>
#include <stdlib.h>
#include <fcntl.h>
//#include <unistd.h>
#include <string.h>
#include <sys/stat.h>
#include <sys/types.h>
//#include <sys/time.h>
#include <stdint.h>
#include <QDebug>
#include "commondefine.h"
#include "Arith_zhryp.h"
#include <QJsonDocument>
#include <QJsonObject>
#include <QFile>
#define JADER_360(x) (x * 360.0 / 65535.0)
#define JADER_180(x) (x * 180.0 / 65535.0)
#define L2B_16Bit(data) (((data) & 0xFF)<<8 | ((data)>>8 & 0xFF))
#define L2B_32Bit(data) ((((data) & 0xFF) << 24) | ((((data) >> 8) & 0xFF) << 16) | ((((data) >> 16) & 0xFF) << 8) | (((data) >> 24) & 0xFF))
#pragma pack(push, 1)
//红外叠加参数结构体
typedef struct {
//时间信息
uint16_t u16Year; //年 字节7~8
uint8_t u8Month; //月 字节9
uint8_t u8Day; //日 字节10
uint8_t u8Hour; //时 字节11
uint8_t u8Minute; //分 字节12
uint8_t u8Second; //秒 字节13
uint16_t u16Minisecond; //毫秒 字节14~15
//平台信息:即惯导信息
uint8_t u8PlatType; //平台类型 字节16
uint16_t u16PlatNumber; //平台编号 字节17~18
int16_t s16PlatYaw; //平台航向角 字节19~20
int16_t s16PlatPitch; //平台俯仰角 字节21~22
int16_t s16PlatRoll; //平台横滚角 字节23~24
int32_t s32PlatLongitude; //平台经度 字节25~28
int32_t s32PlatLatitude; //平台维度 字节29~32
int16_t s16PlatHeight; //平台高度 字节33~34
int16_t s16PlatEastV; //平台东向速度 字节35~36
int16_t s16PlatNorthV; //平台北向速度 字节37~38
int16_t s16PlatUpV; //平台天向速度 字节39~40 34
//传感器信息
uint8_t u8SensorType; //传感器类型 字节41
uint16_t u16SensorNumber; //传感器编号 字节42~43
uint8_t u8SensorSelect; //传感器选择回报代号非必填项字节44
uint16_t u16SensorHView; //传感器水平视场角 字节45~46
uint16_t u16SensorVView; //传感器垂直视场角 字节47~48
uint8_t u8SensorWorkMode; //传感器工作模式 字节49
int16_t mSensorYaw; //字节50~51 伺服方位角:传感器光轴相对安装平面航向角
int16_t mSensorPitch; //字节52~53 伺服俯仰角:传感器光轴相对安装平面俯仰角
int16_t mSensorRoll; //字节54~55 伺服横滚角:传感器光轴相对安装平面滚转角
uint16_t u16LsDistance; //字节56~57 激光测距值
int32_t s32MarkLongitude; //字节58~61 目标定位经度
int32_t s32MarkLatitude; //字节62~65 目标定位维度
int16_t s16MarkHeight; //字节66~67 目标定位高度
uint16_t mFrameYaw; //字节68~69 传感器框架角在航向上的补偿角度
int16_t mFramePitch; //字节70~71 传感器框架角在俯仰方向上的补偿角度
int16_t mFrameRoll; //字节72~73 传感器框架角在滚转方向上的补偿角度
uint16_t s16SensorSynYaw; //字节74~75 传感器综合航向角
int16_t s16SensorSynPitch; //字节76~77 传感器综合俯仰角
int16_t s16SensorSynRoll; //字节78~79 传感器综合滚转角
//相机信息
int16_t s16CameraYaw; //字节80~81 相机安装平面上的惯导测量得到的航向角
int16_t s16CameraPitch; //字节82~83 相机安装平面上的惯导测量得到的俯仰角
int16_t s16CameraRoll; //字节84~85 相机安装平面上的惯导测量得到的滚转角
//7个红外视场角
uint16_t u16IRViewFeild[7]; //字节86~99 7路红外视场角
//帧计数
uint8_t u8FrameCount; //帧计数 字节100
} SendOutTOFC_t;
#pragma pack(pop)
class TsPacker
{
public:
enum PACKER_TYPE{
BINDATA = 0,
VIDEO,
};
TsPacker(std::string &dst){
int nRet = 0;
avformat_network_init();
printf("ts packer --- %s\n", dst.c_str());
// open network format
nRet = avformat_alloc_output_context2(&_format_ctx, 0, "mpegts", dst.c_str());
if (nRet != 0)
{
std::cout << "Can not create output multicast stream." << std::endl;
}
AVOutputFormat *ofmt = _format_ctx->oformat;
AVCodec *codec = avcodec_find_encoder(ofmt->video_codec);
av_opt_set(_format_ctx->priv_data, "mpegts_flags", "pat_pmt_at_frames", 0);
_video_stream = avformat_new_stream(_format_ctx, codec);
_video_stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
_video_stream->codecpar->codec_id = AV_CODEC_ID_H264;
_video_stream->codecpar->format = AV_PIX_FMT_YUV420P;
_video_stream->codecpar->bit_rate = _vbit_rate;
_video_stream->codecpar->width = 1280;
_video_stream->codecpar->height = 1024;
_video_stream->id = mvstream_id;
_video_stream->avg_frame_rate = {25, 1};
_video_stream->sample_aspect_ratio.den = 1;
_video_stream->sample_aspect_ratio.num = 1;
_video_stream->start_time = 0;
_video_stream->r_frame_rate.den = 1;
_video_stream->r_frame_rate.num = 25;
_video_stream->nb_frames = 0;
_format_ctx->bit_rate = _vbit_rate;
_format_ctx->duration_estimation_method = AVFMT_DURATION_FROM_PTS;
if (_format_ctx->oformat->flags & AVFMT_GLOBALHEADER)
_format_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
#if 1
// telemetry stream
_telem_stream = avformat_new_stream(_format_ctx, 0);
_telem_stream->codecpar->codec_id = AV_CODEC_ID_BIN_DATA;
_telem_stream->codecpar->codec_type = AVMEDIA_TYPE_DATA;
_telem_stream->id = mtstream_id;
_telem_stream->time_base = {1, 25};
#endif
nRet = avio_open(&_format_ctx->pb, dst.c_str(), AVIO_FLAG_WRITE);
if (nRet != 0)
{
avformat_close_input(&_format_ctx);
std::cout << "Can not open URL for output." << std::endl;
}
nRet = avformat_write_header(_format_ctx, 0);
_video_pkg = av_packet_alloc();
_telem_pkg = av_packet_alloc();
QFile file("./config.json");
if(file.open(QIODevice::ReadOnly)){
char buf[160] = {0};
int ret = file.read(buf, 160);
std::cout << buf <<std::endl;
QJsonDocument js = QJsonDocument::fromJson(QByteArray(buf));
QJsonObject obj = js.object();
_type = obj.value("type").toInt();
_roll = obj.value("roll").toDouble();
_pitch = obj.value("pitch").toDouble();
_yaw = obj.value("yaw").toDouble();
if(_type){
QJsonObject ztobj = obj.value("zt").toObject();
_zt_yaw = ztobj.value("yaw").toDouble();
_zt_pitch = ztobj.value("pitch").toDouble();
_zt_roll = ztobj.value("roll").toDouble();
std::cout <<"pitch: "<< _zt_pitch <<", roll:" << _zt_roll<< ", yaw:" << _zt_yaw<< std::endl;
}
std::cout <<"pitch: "<< _pitch <<", roll:" << _roll<< std::endl;
}
file.close();
}
~TsPacker(){
av_write_trailer(_format_ctx);
if (_format_ctx)
avformat_free_context(_format_ctx);
av_packet_free(&_video_pkg);
av_packet_free(&_telem_pkg);
// release
avformat_network_deinit();
}
int init(int width, int height){
return 0;
}
static bool translatPackage(SendOutTOFC_t * src, std::vector<unsigned char> &result, unsigned int &resultSize){
telemetryData telem;
getTelemFromParam(src, telem);
return telemCodec::telemEncode(telem, result, resultSize);
}
static int large_2_little(unsigned short* src, int size){
for(int i=0; i<size; i++){
src[i] = L2B_16Bit(src[i]);
}
return 0;
}
int parseAngle(telemetryData *telem){
sRPYCacl::stTrackPlatInfo track;
track.fPlatAz = telem->mDroneYaw; //飞机惯导航向角
track.fPlatPt = telem->mDronePitch; //载荷2倾角传感器俯仰角
track.fPlatRoll = telem->mDroneRoll;//载荷2倾角传感器横滚角
track.fServoAz = telem->mFrameYaw + _yaw; //载荷2伺服的方位角
track.fServoPt = telem->mFramePitch + _pitch; //载荷2伺服的俯仰角
track.fRoll = telem->mFrameRoll + _roll;
sRPYCacl::EulerRPY S_RPY1 = Zh_dRPYDown(track);
telem->mInnerInertiaYaw = S_RPY1.Yaw;
telem->mInnerInertiaPitch = S_RPY1.Pitch;
telem->mInnerInertiaRoll = S_RPY1.Roll;
return 0;
}
int demuxer(AVPacket *packet, int type){
// std::cout<<"demuxer:"<<type<<std::endl;
if(type == VIDEO){
av_init_packet(_video_pkg);
_video_pkg->pts = packet->pts;
_video_pkg->duration = 40;
_video_pkg->stream_index = _video_stream->index;
_video_pkg->data = packet->buf->data + 6;
_video_pkg->size = packet->buf->size - 6;
int ret = av_interleaved_write_frame(_format_ctx, _video_pkg);
if(ret){
printf("failed to write frame -----------%d img size= %d\n", ret, packet->buf->size);
}
av_packet_unref(_video_pkg);
}else if(type == BINDATA){
//decode
telemetryData aTelem;
memset(&aTelem, 0, sizeof(telemetryData));
telemCodec::telemDecode(packet->buf->data, packet->buf->size, aTelem);
if(_type == 0){
parseAngle(&aTelem);
}else{
aTelem.mInnerInertiaYaw += _zt_yaw;
aTelem.mInnerInertiaPitch += _zt_pitch;
aTelem.mInnerInertiaRoll += _zt_roll;
}
//encode
std::vector<unsigned char> telemData;
unsigned int dataSize = 0;
if(telemCodec::telemEncode(aTelem, telemData, dataSize))
{
av_init_packet(_telem_pkg);
_telem_pkg->stream_index = _telem_stream->index;
_telem_pkg->data = &telemData[0];
_telem_pkg->size = dataSize;
_telem_pkg->pts = packet->pts;
_telem_pkg->duration = 40;
av_write_frame(_format_ctx, _telem_pkg);
av_packet_unref(_telem_pkg);
}
}
}
int demuxer(uint8_t *img, int img_size, int64_t pts, int type){
if(type == VIDEO){
av_init_packet(_video_pkg);
_video_pkg->pts = pts;
_video_pkg->duration = 40;
_video_pkg->stream_index = _video_stream->index;
_video_pkg->data = img;
_video_pkg->size = img_size;
int ret = av_write_frame(_format_ctx, _video_pkg);
if(ret){
printf("failed to write frame -----------%d img size= %d\n", ret, img_size);
}
av_packet_unref(_video_pkg);
}else if(type == BINDATA){
telemetryData aTelem;
memset(&aTelem, 0, sizeof(telemetryData));
telemCodec::telemDecode(img, img_size, aTelem);
if(_type == 0){
parseAngle(&aTelem);
}else{
aTelem.mInnerInertiaYaw += _zt_yaw;
aTelem.mInnerInertiaPitch += _zt_pitch;
aTelem.mInnerInertiaRoll += _zt_roll;
}
//encode
std::vector<unsigned char> telemData;
unsigned int dataSize = 0;
if(telemCodec::telemEncode(aTelem, telemData, dataSize))
{
av_init_packet(_telem_pkg);
_telem_pkg->stream_index = _telem_stream->index;
_telem_pkg->data = &telemData[0];
_telem_pkg->size = img_size;
_telem_pkg->pts = pts;
_telem_pkg->duration = 40;
av_write_frame(_format_ctx, _telem_pkg);
av_packet_unref(_telem_pkg);
}
}
mBase += 40;
_frame_index ++;
}
private:
std::string date_time(std::time_t posix)
{
char buf[20];
std::tm tp = *std::localtime(&posix);
return{ buf, std::strftime(buf, sizeof(buf),"%F %T",&tp) };
}
int writeFrame(AVStream * stream, AVPacket *pkg, uint8_t *frame, uint8_t size){
av_init_packet(pkg);
pkg->stream_index = stream->index;
pkg->data = frame;
pkg->size = size;
pkg->pts = mBase;
pkg->duration = 40;
av_interleaved_write_frame(_format_ctx, pkg);
av_packet_unref(pkg);
return 0;
}
static bool getTelemFromParam(SendOutTOFC_t * src, telemetryData &telem)
{
float cam_angle[7] = {0, 307.5, 322, 20, 340, 38, 52.5};
float plan_id[7] = {4, 1, 2, 5, 3, 17, 18};
float cam_yaw = cam_angle[src->u8SensorType];
int nsize = sizeof(telemetryData);
telem.mFlags.setTimestamp(true);
telem.mYear = src->u16Year;
telem.mMonth = src->u8Month;
telem.mDay = src->u8Day;
telem.mHour = src->u8Hour;
telem.mMinute = src->u8Minute;
telem.mSecond = src->u8Second;
telem.mMillSecond = src->u16Minisecond;
telem.mFlags.setPlatformType(true);
telem.mDroneType = src->u8PlatType;
telem.mFlags.setPlatformID(true);
telem.mDroneNum = src->u16PlatNumber;
telem.mFlags.setPlatformRotation(true);
telem.mDroneYaw = JADER_360(src->s16PlatYaw);
if(telem.mDroneYaw < 0){
telem.mDroneYaw += 360;
}
telem.mDronePitch = JADER_360(src->s16PlatPitch);
telem.mDroneRoll = JADER_360(src->s16PlatRoll);
telem.mFlags.setPlatformPosition(true);
telem.mDroneLongtitude = src->s32PlatLongitude / 10000000.0;
telem.mDroneLatitude = src->s32PlatLatitude / 10000000.0;
telem.mDroneAltitude = src->s16PlatHeight / 10.0;
telem.mDroneSpeedEast = src->s16PlatEastV * 1024.0 / 32767.0;
telem.mDroneSpeedNorth = src->s16PlatNorthV * 1024.0 / 32767.0;
telem.mDroneSpeedSky = src->s16PlatUpV * 1024.0 / 32767.0;
telem.mFlags.setSensorType(true);
telem.mSensorType = 0;
telem.mFlags.setSensorID(true);
if(src->u8SensorType <= 6){
telem.mSensorID = plan_id[src->u8SensorType];
}else{
telem.mSensorID = src->u16SensorNumber;
}
telem.mSensorSelectionCB = src->u8SensorSelect;
telem.mFlags.setSensorFOV(true);//
if(src->u8SensorType > 6){
telem.mSensorHFOV = JADER_180(src->u16SensorHView);
telem.mSensorVFOV = JADER_180(src->u16SensorVView);
}else{
telem.mSensorHFOV = src->u16SensorHView / 10.0;
telem.mSensorVFOV = src->u16SensorVView / 10.0;
}
telem.mFlags.setSensorMod(true);
telem.mSensorMod = src->u8SensorWorkMode;
telem.mFlags.setSensorRotation(true);
telem.mFrameYaw = JADER_360(src->mFrameYaw);
if(telem.mFrameYaw < 0){
telem.mFrameYaw += 360;
}
telem.mFramePitch = JADER_360(src->mFramePitch);
telem.mFrameRoll = JADER_360(src->mFrameRoll);
telem.mSensorYaw = cam_yaw; //字节68~69 传感器框架角在航向上的补偿角度
if(src->u8SensorType > 6){
telem.mSensorYaw = JADER_360(src->mSensorYaw);
}
telem.mSensorPitch = JADER_360(src->mSensorPitch); //字节70~71 传感器框架角在俯仰方向上的补偿角度
telem.mSensorRoll = JADER_360(src->mSensorRoll); //字节72~73 传感器框架角在滚转方向上的补偿角度
telem.mComposedYaw = JADER_360(src->s16SensorSynYaw); //字节74~75 传感器综合航向角
telem.mComposedPitch = JADER_360(src->s16SensorSynPitch); //字节76~77 传感器综合俯仰角
telem.mComposedRoll = JADER_360(src->s16SensorSynRoll); //字节78~79 传感器综合滚转角
telem.mLaserDistance = 0;//src->u16LsDistance * 10000.0 / 65535;
telem.mMarkLongtitude = 0;//src->s32MarkLongitude / 10000000;
telem.mMarkLatitude = 0;//src->s32MarkLatitude / 10000000;
telem.mMarkAltitude = 0;//src->s16MarkHeight / 10;
telem.mInnerInertiaYaw = JADER_360(src->s16CameraYaw);
if(telem.mInnerInertiaYaw < 0){
telem.mInnerInertiaYaw += 360;
}
telem.mInnerInertiaPitch = JADER_360(src->s16CameraPitch);
telem.mInnerInertiaRoll = JADER_360(src->s16CameraRoll);
return true;
}
const uint64_t _vbit_rate = 2000000;
const int mvstream_id = 301;
const int mtstream_id = 8;
uint64_t mBase;
int _type = 0;
double _roll = 0.0;
double _pitch = 0.0;
double _yaw = 0.0;
double _zt_roll = 0.0;
double _zt_pitch = 0.0;
double _zt_yaw = 0.0;
AVStream *_video_stream = nullptr;
AVStream *_telem_stream = nullptr;
AVFormatContext *_format_ctx = nullptr;
AVPacket *_video_pkg = nullptr;
AVPacket *_telem_pkg = nullptr;
uint32_t _frame_index = 0;
};
#endif