1、添加配置文件控制推流参数;

2、解决遥控器解码时帧序混乱的问题(gop_size = 1);
3、完善代码;
This commit is contained in:
tangchao0503
2024-01-29 17:21:07 +08:00
parent 2e4679aaef
commit a91f5f5b04
6 changed files with 445 additions and 385 deletions

View File

@ -4,138 +4,16 @@
#include "../Header_Files/rgbImage.h"
Encode::Encode()
{
fp= fopen("/media/nvme/delete/av.h264","wb");
index1 = 0;
}
void Encode::initffmpeg(int width, int height)
{
const AVCodec *codec;
int i, ret, x, y, got_output;
std::cout<<"init ok";
codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec)
{
fprintf(stderr, "Codec not found\n");
exit(1);
}
// 根据编码器,创建相对应的编码器上下文
avcodeccontext = avcodec_alloc_context3(codec);
if (!avcodeccontext) {
fprintf(stderr, "Could not allocate video codec context\n");
exit(1);
}
avcodeccontext->bit_rate = 400000;
avcodeccontext->width = width;
avcodeccontext->height = height;
/* frames per second */
//时间基每一秒25帧每一刻度25分之1(时间基根据帧率而变化)
avcodeccontext->time_base = (AVRational){1, 25};
//帧率
avcodeccontext->framerate = (AVRational){25, 1};
/* emit one intra frame every ten frames
* check frame pict_type before passing frame
* to encoder, if frame->pict_type is AV_PICTURE_TYPE_I
* then gop_size is ignored and the output of encoder
* will always be I frame irrespective to gop_size
*/
//多少帧产生一组关键帧
avcodeccontext->gop_size = 10;
//b帧参考帧
avcodeccontext->max_b_frames = 1;
//编码的原始数据的YUV格式
avcodeccontext->pix_fmt = AV_PIX_FMT_YUV420P;
//如果编码器id 是 h264
if (codec->id == AV_CODEC_ID_H264)
// preset表示采用一个预先设定好的h264参数集级别是slowslow表示压缩速度是慢的慢的可以保证视频质量用快的会降低视频质量
av_opt_set(avcodeccontext->priv_data, "preset", "slow", 0);
/* open it */
//打开编码器
if (avcodec_open2(avcodeccontext, codec, NULL) < 0) {
fprintf(stderr, "Could not open codec\n");
exit(1);
}
// avcodeccontext=c;
std::cout<<"init ok";
inpic = av_frame_alloc();
outpic = av_frame_alloc();
//avpicture_fill sets all of the data pointers in the AVFrame structures
//to the right places in the data buffers. It does not copy the data so
//the QImage and out_buffer still need to live after calling these.
inpic->width=width;
inpic->height=height;
inpic->format=AV_PIX_FMT_ARGB;
inpic->linesize[0]=width;
outpic->width=width;
outpic->height=height;
outpic->format=AV_PIX_FMT_YUV420P;
outpic->linesize[0]=width;
isinit= true;
}
void Encode::savedata(AVFrame *frame)
{
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
frame->pts = index1;
AVCodecInternal *avci = avcodeccontext->internal;
// if (avci->draining)
// return AVERROR_EOF;
// if (avci->buffer_frame->data[0])
// return AVERROR(EAGAIN);
encode(avcodeccontext,frame,&pkt,fp);
av_packet_unref(&pkt);
index1++;
}
void Encode::encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, FILE *outfile)
{
int ret;
/* send the frame to the encoder */
if (frame)
// printf("Send frame %3\"PRId64\"\n", frame->pts);
ret = avcodec_send_frame(enc_ctx, frame);
if (ret < 0) {
fprintf(stderr, "Error sending a frame for encoding\n");
exit(1);
}
while (ret >= 0) {
ret = avcodec_receive_packet(enc_ctx, pkt);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
return;
else if (ret < 0) {
fprintf(stderr, "Error during encoding\n");
exit(1);
}
// printf("Write packet %3\"PRId64\" (size=%5d)\n", pkt->pts, pkt->size);
fwrite(pkt->data, 1, pkt->size, outfile);
QByteArray buf;
buf.append((char *)pkt->data,pkt->size);
// emit senddata(buf);
av_packet_unref(pkt);
}
}
rgbImage::rgbImage(QWidget* pParent)
{
m_QRgbImage = nullptr;
m_matRgbImage = nullptr;
m_matFocusGrayImage = nullptr;
m_qimageFocusGrayImage = nullptr;
m_iRedBandNumber = 0;
m_iGreenBandNumber = 0;
m_iBlueBandNumber = 0;
}
rgbImage::~rgbImage()
@ -143,8 +21,18 @@ rgbImage::~rgbImage()
}
void rgbImage::SetRgbBandNumber(int redBandNumber, int greenBandNumber, int blueBandNumber)
{
m_iRedBandNumber = redBandNumber;
m_iGreenBandNumber = greenBandNumber;
m_iBlueBandNumber = blueBandNumber;
void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNumber)
// std::cout<<"rgbImage::SetRgbBandNumber红波段的波段号"<< redBandNumber <<std::endl;
// std::cout<<"rgbImage::SetRgbBandNumber绿波段的波段号"<< greenBandNumber <<std::endl;
// std::cout<<"rgbImage::SetRgbBandNumber蓝波段的波段号"<< blueBandNumber <<std::endl;
}
void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Width, int height)
{
using namespace cv;
@ -152,21 +40,21 @@ void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNum
{
delete m_QRgbImage;//有问题????????????????????????????????????????????????
}
//m_QRgbImage = new QImage(Sample, FrameNumber, QImage::Format_RGB888);
//m_QRgbImage = new QImage(Width, height, QImage::Format_RGB888);
if (m_matRgbImage != nullptr)
{
delete m_matRgbImage;
}
m_matRgbImage = new Mat(FrameNumber, Sample, CV_8UC3, Scalar(0, 0, 0));
m_matRgbImage = new Mat(height, Width, CV_8UC3, Scalar(0, 0, 0));
int codec = VideoWriter::fourcc('H', '2', '6', '4'); // select desired codec (must be available at runtime)
double fps = 20.0;// framerate of the created video stream
std::string filename = "appsrc ! autovideoconvert ! filesink location=/media/nvme/live.mp4";//https://blog.csdn.net/ancientapesman/article/details/117324638
// std::string filename = "/media/nvme/live.mp4";
auto ddddd=m_matRgbImage->size();
m_VideoWriter.open(filename, codec, fps, Size(20, 1368), true);
// int codec = VideoWriter::fourcc('H', '2', '6', '4'); // select desired codec (must be available at runtime)
// double fps = 20.0;// framerate of the created video stream
// std::string filename = "appsrc ! autovideoconvert ! filesink location=/media/nvme/live.mp4";//https://blog.csdn.net/ancientapesman/article/details/117324638
//// std::string filename = "/media/nvme/live.mp4";
// auto ddddd=m_matRgbImage->size();
// m_VideoWriter.open(filename, codec, fps, Size(20, 1368), true);
// VideoWriter video("test.avi", CV_FOURCC('M', 'J', 'P', 'G'), 25.0, Size(640, 480));
@ -178,33 +66,26 @@ void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNum
if (m_qimageFocusGrayImage == nullptr)
{
m_qimageFocusGrayImage = new QImage(Sample, BandCount, QImage::Format_RGB32);
m_qimageFocusGrayImage = new QImage(Width, BandCount, QImage::Format_RGB32);
}
if (m_matFocusGrayImage == nullptr)
{
m_matFocusGrayImage = new Mat(BandCount, Sample, CV_16U, Scalar(0));
m_matFocusGrayImage = new Mat(BandCount, Width, CV_16U, Scalar(0));
//cv::Mat matAdjustPreview = Mat::zeros(BandCount, Sample, CV_16U);
//cv::Mat matAdjustPreview = Mat::zeros(BandCount, Width, CV_16U);
}
//cv::Mat matAdjustPreview = Mat::zeros(BandCount, Sample, CV_16U);
//cv::Mat matAdjustPreview = Mat::zeros(BandCount, Width, CV_16U);
//m_matFocusGrayImage = matAdjustPreview;
std::cout << "设置帧数:" << FrameNumber << std::endl;
std::cout << "高光谱rgb图像设置高度" << height << std::endl;
m_iFrameCounter = 0;//每次都重置为0
m_iSampleNumber = Sample;
m_iSampleNumber = Width;
m_iBandNumber = BandCount;
m_iFrameNumber = FrameNumber;
m_iFrameNumber = height;
//std::cout << "rgb影像内存地址为" << m_QRgbImage << std::endl;
}
@ -216,13 +97,13 @@ void rgbImage::FillOnerowofRgbImage(cv::Mat * matRgbImage, int rowNumber, unsign
// for (int j = 0; j < m_iSampleNumber; j++)
// {
// //取值一帧影像中从左到右的rgb像元值
// r = *(datacube + 121 * m_iSampleNumber + j)*255/4096;
// g = *(datacube + 79 * m_iSampleNumber + j)*255/4096;
// b = *(datacube + 40 * m_iSampleNumber + j)*255/4096;
// r = *(datacube + m_iRedBandNumber * m_iSampleNumber + j)*255/4096;
// g = *(datacube + m_iGreenBandNumber * m_iSampleNumber + j)*255/4096;
// b = *(datacube + m_iBlueBandNumber * m_iSampleNumber + j)*255/4096;
//
//// r = *(datacube + 121 * m_iSampleNumber + j);
//// g = *(datacube + 79 * m_iSampleNumber + j);
//// b = *(datacube + 40 * m_iSampleNumber + j);
//// r = *(datacube + m_iRedBandNumber * m_iSampleNumber + j);
//// g = *(datacube + m_iGreenBandNumber * m_iSampleNumber + j);
//// b = *(datacube + m_iBlueBandNumber * m_iSampleNumber + j);
//
// //将像元值赋值到cv::Mat中操作像元值https://zhuanlan.zhihu.com/p/51842288
// //int dataType = m_matRgbImage->type();//当数据类型为CV_16UC3时返回18
@ -257,10 +138,10 @@ void rgbImage::FillOnerowofRgbImage(cv::Mat * matRgbImage, int rowNumber, unsign
for (int j = 0; j < m_iSampleNumber; j++)
{
//取值一帧影像中从左到右的rgb像元值
r = *(datacube + 121 * m_iSampleNumber + j)*255/4096;
g = *(datacube + 79 * m_iSampleNumber + j)*255/4096;
b = *(datacube + 40 * m_iSampleNumber + j)*255/4096;
//取值一帧影像中从左到右的rgb像元值,线性拉伸
r = *(datacube + m_iRedBandNumber * m_iSampleNumber + j)*255/4096;
g = *(datacube + m_iGreenBandNumber * m_iSampleNumber + j)*255/4096;
b = *(datacube + m_iBlueBandNumber * m_iSampleNumber + j)*255/4096;
*p_row0_b = b;
*p_row0_g = g;
@ -320,25 +201,15 @@ QImage rgbImage::Mat2QImage(cv::Mat cvImg)//https://www.cnblogs.com/annt/p/ant00
void rgbImage::FillRgbImage(unsigned short *datacube)
{
//通过行赋值将前m_iFrameNumber-1行向上移动一行https://blog.csdn.net/u014686356/article/details/65937750
//经tc验证此行代码工作异常
// m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1).copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));
//
// QString savePath_cv_3 = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "full0_cv.jpg";
// cv::imwrite(savePath_cv_3.toStdString(), *m_matRgbImage);
//
// cv::Mat upperPart = m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1);
// QString savePath_cv_ = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "upperPart_cv.jpg";
// cv::imwrite(savePath_cv_.toStdString(), upperPart);
//
// // 将上半部分的数据复制到下一行
// upperPart.copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));//?????????????????????????????????????????
// QString savePath_cv_2 = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "full_cv.jpg";
// cv::imwrite(savePath_cv_2.toStdString(), *m_matRgbImage);
//从第二行开始,向下移动一行https://blog.csdn.net/u014686356/article/details/65937750
// m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1).copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));//经tc验证此行代码工作异常为啥不加.clone()就异常??????
// m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1).clone().copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));//此方式ximea帧率130hz1min左右就出现漏帧
for (int i = m_matRgbImage->rows - 2; i >= 0; --i)
// cv::Mat upperPart = m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1).clone();//此方式ximea帧率130hz1min左右就出现漏帧
// upperPart.copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));
for (int i = m_matRgbImage->rows - 2; i >= 0; --i)//此方式ximea帧率130hz4.5min左右出现漏帧 → 此方式效率最高
{
// std::cout << "大于:" << i << std::endl;
m_matRgbImage->row(i).copyTo(m_matRgbImage->row(i+1));
}