实现功能:提取rgb波段并通过h264编码;

This commit is contained in:
tangchao0503
2024-01-23 09:35:01 +08:00
parent 061e1f83bd
commit 6298083fd5
5 changed files with 485 additions and 81 deletions

View File

@ -4,6 +4,131 @@
#include "../Header_Files/rgbImage.h"
Encode::Encode()
{
fp= fopen("/media/nvme/delete/av.h264","wb");
index1 = 0;
}
void Encode::initffmpeg(int width, int height)
{
const AVCodec *codec;
int i, ret, x, y, got_output;
std::cout<<"init ok";
codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec)
{
fprintf(stderr, "Codec not found\n");
exit(1);
}
// 根据编码器,创建相对应的编码器上下文
avcodeccontext = avcodec_alloc_context3(codec);
if (!avcodeccontext) {
fprintf(stderr, "Could not allocate video codec context\n");
exit(1);
}
avcodeccontext->bit_rate = 400000;
avcodeccontext->width = width;
avcodeccontext->height = height;
/* frames per second */
//时间基每一秒25帧每一刻度25分之1(时间基根据帧率而变化)
avcodeccontext->time_base = (AVRational){1, 25};
//帧率
avcodeccontext->framerate = (AVRational){25, 1};
/* emit one intra frame every ten frames
* check frame pict_type before passing frame
* to encoder, if frame->pict_type is AV_PICTURE_TYPE_I
* then gop_size is ignored and the output of encoder
* will always be I frame irrespective to gop_size
*/
//多少帧产生一组关键帧
avcodeccontext->gop_size = 10;
//b帧参考帧
avcodeccontext->max_b_frames = 1;
//编码的原始数据的YUV格式
avcodeccontext->pix_fmt = AV_PIX_FMT_YUV420P;
//如果编码器id 是 h264
if (codec->id == AV_CODEC_ID_H264)
// preset表示采用一个预先设定好的h264参数集级别是slowslow表示压缩速度是慢的慢的可以保证视频质量用快的会降低视频质量
av_opt_set(avcodeccontext->priv_data, "preset", "slow", 0);
/* open it */
//打开编码器
if (avcodec_open2(avcodeccontext, codec, NULL) < 0) {
fprintf(stderr, "Could not open codec\n");
exit(1);
}
// avcodeccontext=c;
std::cout<<"init ok";
inpic = av_frame_alloc();
outpic = av_frame_alloc();
//avpicture_fill sets all of the data pointers in the AVFrame structures
//to the right places in the data buffers. It does not copy the data so
//the QImage and out_buffer still need to live after calling these.
inpic->width=width;
inpic->height=height;
inpic->format=AV_PIX_FMT_ARGB;
inpic->linesize[0]=width;
outpic->width=width;
outpic->height=height;
outpic->format=AV_PIX_FMT_YUV420P;
outpic->linesize[0]=width;
isinit= true;
}
void Encode::savedata(AVFrame *frame)
{
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
frame->pts = index1;
AVCodecInternal *avci = avcodeccontext->internal;
// if (avci->draining)
// return AVERROR_EOF;
// if (avci->buffer_frame->data[0])
// return AVERROR(EAGAIN);
encode(avcodeccontext,frame,&pkt,fp);
av_packet_unref(&pkt);
index1++;
}
void Encode::encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, FILE *outfile)
{
int ret;
/* send the frame to the encoder */
if (frame)
// printf("Send frame %3\"PRId64\"\n", frame->pts);
ret = avcodec_send_frame(enc_ctx, frame);
if (ret < 0) {
fprintf(stderr, "Error sending a frame for encoding\n");
exit(1);
}
while (ret >= 0) {
ret = avcodec_receive_packet(enc_ctx, pkt);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
return;
else if (ret < 0) {
fprintf(stderr, "Error during encoding\n");
exit(1);
}
// printf("Write packet %3\"PRId64\" (size=%5d)\n", pkt->pts, pkt->size);
fwrite(pkt->data, 1, pkt->size, outfile);
QByteArray buf;
buf.append((char *)pkt->data,pkt->size);
// emit senddata(buf);
av_packet_unref(pkt);
}
}
rgbImage::rgbImage(QWidget* pParent)
{
@ -11,9 +136,6 @@ rgbImage::rgbImage(QWidget* pParent)
m_matRgbImage = nullptr;
m_matFocusGrayImage = nullptr;
m_qimageFocusGrayImage = nullptr;
}
rgbImage::~rgbImage()
@ -39,10 +161,12 @@ void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNum
}
m_matRgbImage = new Mat(FrameNumber, Sample, CV_8UC3, Scalar(0, 0, 0));
int codec = VideoWriter::fourcc('M', 'P', '4', '2'); // select desired codec (must be available at runtime)
double fps = 20.0; // framerate of the created video stream
std::string filename = "appsrc ! autovideoconvert ! filesink location=/media/nvme/delete/live.avi";//https://blog.csdn.net/ancientapesman/article/details/117324638
m_VideoWriter.open(filename, codec, fps, m_matRgbImage->size(), true);
int codec = VideoWriter::fourcc('H', '2', '6', '4'); // select desired codec (must be available at runtime)
double fps = 20.0;// framerate of the created video stream
std::string filename = "appsrc ! autovideoconvert ! filesink location=/media/nvme/live.mp4";//https://blog.csdn.net/ancientapesman/article/details/117324638
// std::string filename = "/media/nvme/live.mp4";
auto ddddd=m_matRgbImage->size();
m_VideoWriter.open(filename, codec, fps, Size(20, 1368), true);
// VideoWriter video("test.avi", CV_FOURCC('M', 'J', 'P', 'G'), 25.0, Size(640, 480));
@ -87,7 +211,50 @@ void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNum
void rgbImage::FillOnerowofRgbImage(cv::Mat * matRgbImage, int rowNumber, unsigned short *datacube)
{
//方式1逐像素修改
// unsigned short r, g, b;
// for (int j = 0; j < m_iSampleNumber; j++)
// {
// //取值一帧影像中从左到右的rgb像元值
// r = *(datacube + 121 * m_iSampleNumber + j)*255/4096;
// g = *(datacube + 79 * m_iSampleNumber + j)*255/4096;
// b = *(datacube + 40 * m_iSampleNumber + j)*255/4096;
//
//// r = *(datacube + 121 * m_iSampleNumber + j);
//// g = *(datacube + 79 * m_iSampleNumber + j);
//// b = *(datacube + 40 * m_iSampleNumber + j);
//
// //将像元值赋值到cv::Mat中操作像元值https://zhuanlan.zhihu.com/p/51842288
// //int dataType = m_matRgbImage->type();//当数据类型为CV_16UC3时返回18
// //std::cout << "m_matRgbImage数据类型为" << dataType << std::endl;
// if (matRgbImage->type() == CV_8UC3)
// {
//// std::cout << "操作像素值!" << std::endl;
// matRgbImage->at<cv::Vec3b>(rowNumber, j)[2] = r;
// matRgbImage->at<cv::Vec3b>(rowNumber, j)[1] = g;
// matRgbImage->at<cv::Vec3b>(rowNumber, j)[0] = b;
//
//// QString savePath_cv = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_cv.png";
//// cv::imwrite(savePath_cv.toStdString(), *matRgbImage);
// }
//
// int column = 800;
// if(j == column)
// {
// std::cout << "行:" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << r << " " << g << " " << b << std::endl;
// std::cout << "mat第 " << column << " 列的 r g b 分别为 " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[2] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[1] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[0] << std::endl;
// }
// }
//方式2通过指针操作更快
unsigned short r, g, b;
const int cols = matRgbImage->cols;
const int step = matRgbImage->channels();
unsigned char *p_row0_b = matRgbImage->ptr(rowNumber);
unsigned char *p_row0_g = matRgbImage->ptr(rowNumber) + 1;
unsigned char *p_row0_r = matRgbImage->ptr(rowNumber) + 2;
for (int j = 0; j < m_iSampleNumber; j++)
{
//取值一帧影像中从左到右的rgb像元值
@ -95,86 +262,102 @@ void rgbImage::FillOnerowofRgbImage(cv::Mat * matRgbImage, int rowNumber, unsign
g = *(datacube + 79 * m_iSampleNumber + j)*255/4096;
b = *(datacube + 40 * m_iSampleNumber + j)*255/4096;
//将像元值赋值到cv::Mat中操作像元值https://zhuanlan.zhihu.com/p/51842288
//int dataType = m_matRgbImage->type();//当数据类型为CV_16UC3时返回18
//std::cout << "m_matRgbImage数据类型为" << dataType << std::endl;
if (matRgbImage->type() == CV_16UC3)
{
//std::cout << "操作像素值!" << std::endl;
matRgbImage->at<cv::Vec3w>(rowNumber, j)[2] = r;
matRgbImage->at<cv::Vec3w>(rowNumber, j)[1] = g;
matRgbImage->at<cv::Vec3w>(rowNumber, j)[0] = b;
}
*p_row0_b = b;
*p_row0_g = g;
*p_row0_r = r;
// int column = 800;
// if(j == column)
// {
// std::cout << "行:" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << r << " " << g << " " << b << std::endl;
//// std::cout << "修改后" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << (unsigned short)*p_row0_r << " " << (unsigned short)*p_row0_g << " " << (unsigned short)*p_row0_b << std::endl;
//// std::cout << "mat第 " << column << " 列的 r g b 分别为 " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[2] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[1] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[0] << std::endl;
// }
p_row0_b += step;
p_row0_g += step;
p_row0_r += step;
}
//方式3通过内存拷贝快速提取rgb
// if (matRgbImage->isContinuous())// check mat is continuous or not
// matRgbImage->reshape(1, matRgbImage->rows * matRgbImage->cols).col(0).setTo(Scalar(value));
// else
// {
// for (int i = 0; i < matRgbImage->rows; i++)
// matRgbImage->row(i).reshape(1, matRgbImage->cols).col(0).setTo(Scalar(value));
// }
}
QImage rgbImage::Mat2QImage(cv::Mat cvImg)//https://www.cnblogs.com/annt/p/ant003.html
{
QImage qImg;
if (cvImg.channels() == 3)//3 channels color image
{
cv::cvtColor(cvImg, cvImg, CV_BGR2RGB);
qImg = QImage((const unsigned char*)(cvImg.data),
cvImg.cols, cvImg.rows,
cvImg.cols*cvImg.channels(),
QImage::Format_RGB888);
}
else if (cvImg.channels() == 1)//grayscale image
{
qImg = QImage((const unsigned char*)(cvImg.data),
cvImg.cols, cvImg.rows,
cvImg.cols*cvImg.channels(),
QImage::Format_Indexed8);
}
else
{
qImg = QImage((const unsigned char*)(cvImg.data),
cvImg.cols, cvImg.rows,
cvImg.cols*cvImg.channels(),
QImage::Format_RGB888);
}
return qImg;
}
void rgbImage::FillRgbImage(unsigned short *datacube)
{
unsigned short *r_row, *g_row, *b_row;
if(m_iFrameCounter<m_iFrameNumber)
{
FillOnerowofRgbImage(m_matRgbImage, m_iFrameCounter, datacube);
// std::cout << "小于:" << m_iFrameNumber << std::endl;
}
else
{
// std::cout << "大于:" << m_iFrameNumber << std::endl;
//通过行赋值将前m_iFrameNumber-1行向上移动一行https://blog.csdn.net/u014686356/article/details/65937750
// m_matRgbImage->rowRange(1, m_matRgbImage->rows).copyTo(m_matRgbImage->rowRange(0, m_matRgbImage->rows-1));
for (int i = 1; i < m_matRgbImage->rows; ++i)
{
// std::cout << "大于:" << i << std::endl;
m_matRgbImage->col(i).copyTo(m_matRgbImage->col(i-1));
// std::cout << "--------------" << i << std::endl;
}
// std::cout << "1111111111111111111111111111"<< std::endl;
//通过FillOnerowofRgbImage为m_iFrameNumber行赋值
FillOnerowofRgbImage(m_matRgbImage, m_iFrameNumber-1, datacube);
// std::cout << "22222222222222222"<< std::endl;
// //循环给每行像素赋值
// r_row = datacube + 121 * m_iSampleNumber;
// g_row = datacube + 79 * m_iSampleNumber;
// b_row = datacube + 40 * m_iSampleNumber;
// for (int j = 0; j < m_iFrameNumber; j++)
// {
// p = m_matRgbImage.ptr<uchar>(j);
// for ( j = 0; j < nCols; ++j){
// p[j] = table[p[j]];
// }
//通过行赋值将前m_iFrameNumber-1行向上移动一行https://blog.csdn.net/u014686356/article/details/65937750
//经tc验证此行代码工作异常
// m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1).copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));
//
// }
// QString savePath_cv_3 = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "full0_cv.jpg";
// cv::imwrite(savePath_cv_3.toStdString(), *m_matRgbImage);
//
// cv::Mat upperPart = m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1);
// QString savePath_cv_ = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "upperPart_cv.jpg";
// cv::imwrite(savePath_cv_.toStdString(), upperPart);
//
// // 将上半部分的数据复制到下一行
// upperPart.copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));//?????????????????????????????????????????
// QString savePath_cv_2 = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "full_cv.jpg";
// cv::imwrite(savePath_cv_2.toStdString(), *m_matRgbImage);
//保存rgb图片
if (m_iFrameCounter % m_iFramerate == 0 || m_iFrameCounter == m_iFrameNumber - 1)
{
////保存文件
//FileOperation * fileOperation = new FileOperation();
//string directory = fileOperation->getDirectoryOfExe();
//string rgbFilePathStrech = “/media/nvme/300TC/config/” + "\\tmp_image_strech.png";//没有拉伸图片
// std::string rgbFilePathNoStrech = "/media/nvme/300TC/config/" + std::to_string(m_iFrameCounter) + "ctmp_image_no_strech.png";
//m_QRgbImage->save(QString::fromStdString(rgbFilePathNoStrech), "PNG");
// cv::imwrite(rgbFilePathNoStrech, *m_matRgbImage);
//cv::imwrite(rgbFilePathStrech, CStretch(*m_matRgbImage, 0.01));
}
m_VideoWriter.write(*m_matRgbImage);
std::string rgbFilePathNoStrech = "/media/nvme/delete/" + std::to_string(m_iFrameCounter) + "ctmp_image_no_strech.png";
cv::imwrite(rgbFilePathNoStrech, *m_matRgbImage);
for (int i = m_matRgbImage->rows - 2; i >= 0; --i)
{
// std::cout << "大于:" << i << std::endl;
m_matRgbImage->row(i).copyTo(m_matRgbImage->row(i+1));
}
FillOnerowofRgbImage(m_matRgbImage, 0, datacube);
// m_Qphoto = Mat2QImage(*m_matRgbImage);
//保存rgb图片
// if (m_iFrameCounter % m_iFramerate == 0 || m_iFrameCounter == m_iFrameNumber - 1)
// {
//// QString savePath = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_qt.jpg";
//// m_Qphoto.save(savePath);
//
// QString savePath_cv = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_cv.jpg";
// cv::imwrite(savePath_cv.toStdString(), *m_matRgbImage);
// }
// m_VideoWriter.write(*m_matRgbImage);
// std::string rgbFilePathNoStrech = "/media/nvme/delete/" + std::to_string(m_iFrameCounter) + "ctmp_image_no_strech.png";
// cv::imwrite(rgbFilePathNoStrech, *m_matRgbImage);
m_iFrameCounter++;
}