diff --git a/CMakeLists.txt b/CMakeLists.txt index 1d58c0d..53ba6cf 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -23,6 +23,9 @@ find_package(OpenCV 4.2.0 REQUIRED) include_directories(/usr/local/include/opencv4/) link_directories(/usr/local/lib) +include_directories(/home/300tc/library/ffmpeg_build/include) +link_directories(/home/300tc/library/ffmpeg_build/lib) + add_executable(${CMAKE_PROJECT_NAME} Source_Files/fileoperation.cpp Header_Files/fileoperation.h @@ -53,4 +56,8 @@ target_link_libraries(${CMAKE_PROJECT_NAME} irisXimeaImager libconfig.so libconfig++.so - ${OpenCV_LIBS}) + ${OpenCV_LIBS} + avformat + avcodec + swscale + avutil) diff --git a/Header_Files/rgbImage.h b/Header_Files/rgbImage.h index 004cbc2..4a53f21 100644 --- a/Header_Files/rgbImage.h +++ b/Header_Files/rgbImage.h @@ -11,6 +11,40 @@ #include #include #include //包含了所有东西,编译很慢 +#include "opencv2/imgproc/types_c.h" + +extern "C" +{ + #include + #include + #include "libavutil/pixfmt.h" + #include "libswscale/swscale.h" + #include + #include + #include "libavdevice/avdevice.h" +} + +class Encode +{ +public: + Encode(); + void initffmpeg(int width, int height); + + FILE *fp; + AVCodecContext *avcodeccontext; + void savedata(AVFrame *frame); + void encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, FILE *outfile); + AVPacket m_avpacket; + AVFrame *inpic; + AVFrame *outpic; + + bool isinit; + int index1; + +private: +}; + + using namespace cv; class rgbImage :public QObject @@ -27,9 +61,11 @@ public: void FillFocusGrayQImage(unsigned short * datacube); void FillOnerowofRgbImage(cv::Mat * matRgbImage, int rowNumber, unsigned short *datacube); + QImage Mat2QImage(cv::Mat cvImg);//https://www.cnblogs.com/annt/p/ant003.html QImage *m_QRgbImage; cv::Mat *m_matRgbImage; + QImage m_Qphoto; QImage *m_qimageFocusGrayImage; cv::Mat *m_matFocusGrayImage;//用于调焦时,显示一帧的灰度图 @@ -61,6 +97,8 @@ private: int m_iFrameNumber;// + void initffmpeg(); + public slots: signals : diff --git a/Header_Files/ximeaimager.h b/Header_Files/ximeaimager.h index 6170bd0..ab1353b 100644 --- a/Header_Files/ximeaimager.h +++ b/Header_Files/ximeaimager.h @@ -42,6 +42,7 @@ #include "MemoryPool.h" #include #include +#include #include "rgbImage.h" @@ -152,6 +153,8 @@ public: int getMaxValueOfOneFrame(unsigned short * data, int numberOfPixel); int getImagerState() const; + + Encode ffmpegEncode; private: //0-61:ximea官方错误代码;99:发生的ximea官方错误代码,没有处理;100:未打开;101:打开;102:设置帧率;103:自动曝光;104:正在采集; int m_iImagerState; diff --git a/Source_Files/rgbImage.cpp b/Source_Files/rgbImage.cpp index 8cdb7e6..13dbdd9 100644 --- a/Source_Files/rgbImage.cpp +++ b/Source_Files/rgbImage.cpp @@ -4,6 +4,131 @@ #include "../Header_Files/rgbImage.h" +Encode::Encode() +{ + fp= fopen("/media/nvme/delete/av.h264","wb"); + index1 = 0; +} + +void Encode::initffmpeg(int width, int height) +{ + const AVCodec *codec; + + int i, ret, x, y, got_output; + + std::cout<<"init ok"; + codec = avcodec_find_encoder(AV_CODEC_ID_H264); + if (!codec) + { + fprintf(stderr, "Codec not found\n"); + exit(1); + } + + // 根据编码器,创建相对应的编码器上下文 + avcodeccontext = avcodec_alloc_context3(codec); + if (!avcodeccontext) { + fprintf(stderr, "Could not allocate video codec context\n"); + exit(1); + } + avcodeccontext->bit_rate = 400000; + + avcodeccontext->width = width; + avcodeccontext->height = height; + /* frames per second */ + //时间基,每一秒25帧,每一刻度25分之1(时间基根据帧率而变化) + avcodeccontext->time_base = (AVRational){1, 25}; + //帧率 + avcodeccontext->framerate = (AVRational){25, 1}; + + /* emit one intra frame every ten frames + * check frame pict_type before passing frame + * to encoder, if frame->pict_type is AV_PICTURE_TYPE_I + * then gop_size is ignored and the output of encoder + * will always be I frame irrespective to gop_size + */ + //多少帧产生一组关键帧 + avcodeccontext->gop_size = 10; + //b帧,参考帧 + avcodeccontext->max_b_frames = 1; + //编码的原始数据的YUV格式 + avcodeccontext->pix_fmt = AV_PIX_FMT_YUV420P; + + //如果编码器id 是 h264 + if (codec->id == AV_CODEC_ID_H264) + // preset表示采用一个预先设定好的h264参数集,级别是slow,slow表示压缩速度是慢的,慢的可以保证视频质量,用快的会降低视频质量 + av_opt_set(avcodeccontext->priv_data, "preset", "slow", 0); + + /* open it */ + //打开编码器 + if (avcodec_open2(avcodeccontext, codec, NULL) < 0) { + fprintf(stderr, "Could not open codec\n"); + exit(1); + } + // avcodeccontext=c; + std::cout<<"init ok"; + inpic = av_frame_alloc(); + outpic = av_frame_alloc(); + +//avpicture_fill sets all of the data pointers in the AVFrame structures +//to the right places in the data buffers. It does not copy the data so +//the QImage and out_buffer still need to live after calling these. + inpic->width=width; + inpic->height=height; + inpic->format=AV_PIX_FMT_ARGB; + inpic->linesize[0]=width; + outpic->width=width; + outpic->height=height; + outpic->format=AV_PIX_FMT_YUV420P; + outpic->linesize[0]=width; + isinit= true; +} + +void Encode::savedata(AVFrame *frame) +{ + AVPacket pkt; + av_init_packet(&pkt); + pkt.data = NULL; // packet data will be allocated by the encoder + pkt.size = 0; + frame->pts = index1; + + AVCodecInternal *avci = avcodeccontext->internal; +// if (avci->draining) +// return AVERROR_EOF; +// if (avci->buffer_frame->data[0]) +// return AVERROR(EAGAIN); + encode(avcodeccontext,frame,&pkt,fp); + + av_packet_unref(&pkt); + index1++; +} + +void Encode::encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, FILE *outfile) +{ + int ret; + /* send the frame to the encoder */ + if (frame) + // printf("Send frame %3\"PRId64\"\n", frame->pts); + ret = avcodec_send_frame(enc_ctx, frame); + if (ret < 0) { + fprintf(stderr, "Error sending a frame for encoding\n"); + exit(1); + } + while (ret >= 0) { + ret = avcodec_receive_packet(enc_ctx, pkt); + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) + return; + else if (ret < 0) { + fprintf(stderr, "Error during encoding\n"); + exit(1); + } + // printf("Write packet %3\"PRId64\" (size=%5d)\n", pkt->pts, pkt->size); + fwrite(pkt->data, 1, pkt->size, outfile); + QByteArray buf; + buf.append((char *)pkt->data,pkt->size); +// emit senddata(buf); + av_packet_unref(pkt); + } +} rgbImage::rgbImage(QWidget* pParent) { @@ -11,9 +136,6 @@ rgbImage::rgbImage(QWidget* pParent) m_matRgbImage = nullptr; m_matFocusGrayImage = nullptr; m_qimageFocusGrayImage = nullptr; - - - } rgbImage::~rgbImage() @@ -39,10 +161,12 @@ void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNum } m_matRgbImage = new Mat(FrameNumber, Sample, CV_8UC3, Scalar(0, 0, 0)); - int codec = VideoWriter::fourcc('M', 'P', '4', '2'); // select desired codec (must be available at runtime) - double fps = 20.0; // framerate of the created video stream - std::string filename = "appsrc ! autovideoconvert ! filesink location=/media/nvme/delete/live.avi";//https://blog.csdn.net/ancientapesman/article/details/117324638 - m_VideoWriter.open(filename, codec, fps, m_matRgbImage->size(), true); + int codec = VideoWriter::fourcc('H', '2', '6', '4'); // select desired codec (must be available at runtime) + double fps = 20.0;// framerate of the created video stream + std::string filename = "appsrc ! autovideoconvert ! filesink location=/media/nvme/live.mp4";//https://blog.csdn.net/ancientapesman/article/details/117324638 +// std::string filename = "/media/nvme/live.mp4"; + auto ddddd=m_matRgbImage->size(); + m_VideoWriter.open(filename, codec, fps, Size(20, 1368), true); // VideoWriter video("test.avi", CV_FOURCC('M', 'J', 'P', 'G'), 25.0, Size(640, 480)); @@ -87,7 +211,50 @@ void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNum void rgbImage::FillOnerowofRgbImage(cv::Mat * matRgbImage, int rowNumber, unsigned short *datacube) { + //方式1:逐像素修改 +// unsigned short r, g, b; +// for (int j = 0; j < m_iSampleNumber; j++) +// { +// //取值:一帧影像中,从左到右的rgb像元值 +// r = *(datacube + 121 * m_iSampleNumber + j)*255/4096; +// g = *(datacube + 79 * m_iSampleNumber + j)*255/4096; +// b = *(datacube + 40 * m_iSampleNumber + j)*255/4096; +// +//// r = *(datacube + 121 * m_iSampleNumber + j); +//// g = *(datacube + 79 * m_iSampleNumber + j); +//// b = *(datacube + 40 * m_iSampleNumber + j); +// +// //将像元值赋值到cv::Mat中,操作像元值:https://zhuanlan.zhihu.com/p/51842288 +// //int dataType = m_matRgbImage->type();//当数据类型为CV_16UC3时,返回18 +// //std::cout << "m_matRgbImage数据类型为:" << dataType << std::endl; +// if (matRgbImage->type() == CV_8UC3) +// { +//// std::cout << "操作像素值!" << std::endl; +// matRgbImage->at(rowNumber, j)[2] = r; +// matRgbImage->at(rowNumber, j)[1] = g; +// matRgbImage->at(rowNumber, j)[0] = b; +// +//// QString savePath_cv = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_cv.png"; +//// cv::imwrite(savePath_cv.toStdString(), *matRgbImage); +// } +// +// int column = 800; +// if(j == column) +// { +// std::cout << "行:" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << r << " " << g << " " << b << std::endl; +// std::cout << "mat:第 " << column << " 列的 r g b 分别为 " << (unsigned short)matRgbImage->at(rowNumber, j)[2] << " " << (unsigned short)matRgbImage->at(rowNumber, j)[1] << " " << (unsigned short)matRgbImage->at(rowNumber, j)[0] << std::endl; +// } +// } + + //方式2:通过指针操作,更快 unsigned short r, g, b; + const int cols = matRgbImage->cols; + const int step = matRgbImage->channels(); + + unsigned char *p_row0_b = matRgbImage->ptr(rowNumber); + unsigned char *p_row0_g = matRgbImage->ptr(rowNumber) + 1; + unsigned char *p_row0_r = matRgbImage->ptr(rowNumber) + 2; + for (int j = 0; j < m_iSampleNumber; j++) { //取值:一帧影像中,从左到右的rgb像元值 @@ -95,86 +262,102 @@ void rgbImage::FillOnerowofRgbImage(cv::Mat * matRgbImage, int rowNumber, unsign g = *(datacube + 79 * m_iSampleNumber + j)*255/4096; b = *(datacube + 40 * m_iSampleNumber + j)*255/4096; - //将像元值赋值到cv::Mat中,操作像元值:https://zhuanlan.zhihu.com/p/51842288 - //int dataType = m_matRgbImage->type();//当数据类型为CV_16UC3时,返回18 - //std::cout << "m_matRgbImage数据类型为:" << dataType << std::endl; - if (matRgbImage->type() == CV_16UC3) - { - //std::cout << "操作像素值!" << std::endl; - matRgbImage->at(rowNumber, j)[2] = r; - matRgbImage->at(rowNumber, j)[1] = g; - matRgbImage->at(rowNumber, j)[0] = b; - } + *p_row0_b = b; + *p_row0_g = g; + *p_row0_r = r; + +// int column = 800; +// if(j == column) +// { +// std::cout << "行:" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << r << " " << g << " " << b << std::endl; +//// std::cout << "修改后" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << (unsigned short)*p_row0_r << " " << (unsigned short)*p_row0_g << " " << (unsigned short)*p_row0_b << std::endl; +//// std::cout << "mat:第 " << column << " 列的 r g b 分别为 " << (unsigned short)matRgbImage->at(rowNumber, j)[2] << " " << (unsigned short)matRgbImage->at(rowNumber, j)[1] << " " << (unsigned short)matRgbImage->at(rowNumber, j)[0] << std::endl; +// } + + p_row0_b += step; + p_row0_g += step; + p_row0_r += step; } + + //方式3:通过内存拷贝快速提取rgb +// if (matRgbImage->isContinuous())// check mat is continuous or not +// matRgbImage->reshape(1, matRgbImage->rows * matRgbImage->cols).col(0).setTo(Scalar(value)); +// else +// { +// for (int i = 0; i < matRgbImage->rows; i++) +// matRgbImage->row(i).reshape(1, matRgbImage->cols).col(0).setTo(Scalar(value)); +// } +} + +QImage rgbImage::Mat2QImage(cv::Mat cvImg)//https://www.cnblogs.com/annt/p/ant003.html +{ + QImage qImg; + if (cvImg.channels() == 3)//3 channels color image + { + cv::cvtColor(cvImg, cvImg, CV_BGR2RGB); + qImg = QImage((const unsigned char*)(cvImg.data), + cvImg.cols, cvImg.rows, + cvImg.cols*cvImg.channels(), + QImage::Format_RGB888); + } + else if (cvImg.channels() == 1)//grayscale image + { + qImg = QImage((const unsigned char*)(cvImg.data), + cvImg.cols, cvImg.rows, + cvImg.cols*cvImg.channels(), + QImage::Format_Indexed8); + } + else + { + qImg = QImage((const unsigned char*)(cvImg.data), + cvImg.cols, cvImg.rows, + cvImg.cols*cvImg.channels(), + QImage::Format_RGB888); + } + + return qImg; } void rgbImage::FillRgbImage(unsigned short *datacube) { - unsigned short *r_row, *g_row, *b_row; - - if(m_iFrameCounterrowRange(1, m_matRgbImage->rows).copyTo(m_matRgbImage->rowRange(0, m_matRgbImage->rows-1)); - for (int i = 1; i < m_matRgbImage->rows; ++i) - { -// std::cout << "大于:" << i << std::endl; - m_matRgbImage->col(i).copyTo(m_matRgbImage->col(i-1)); -// std::cout << "--------------" << i << std::endl; - } - - -// std::cout << "1111111111111111111111111111"<< std::endl; - //通过FillOnerowofRgbImage为m_iFrameNumber行赋值 - FillOnerowofRgbImage(m_matRgbImage, m_iFrameNumber-1, datacube); -// std::cout << "22222222222222222"<< std::endl; - - - - - -// //循环给每行像素赋值 -// r_row = datacube + 121 * m_iSampleNumber; -// g_row = datacube + 79 * m_iSampleNumber; -// b_row = datacube + 40 * m_iSampleNumber; -// for (int j = 0; j < m_iFrameNumber; j++) -// { -// p = m_matRgbImage.ptr(j); -// for ( j = 0; j < nCols; ++j){ -// p[j] = table[p[j]]; -// } + //通过行赋值将前m_iFrameNumber-1行向上移动一行,https://blog.csdn.net/u014686356/article/details/65937750 + //经tc验证:此行代码工作异常, +// m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1).copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows)); // -// } +// QString savePath_cv_3 = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "full0_cv.jpg"; +// cv::imwrite(savePath_cv_3.toStdString(), *m_matRgbImage); +// +// cv::Mat upperPart = m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1); +// QString savePath_cv_ = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "upperPart_cv.jpg"; +// cv::imwrite(savePath_cv_.toStdString(), upperPart); +// +// // 将上半部分的数据复制到下一行 +// upperPart.copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));//????????????????????????????????????????? +// QString savePath_cv_2 = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "full_cv.jpg"; +// cv::imwrite(savePath_cv_2.toStdString(), *m_matRgbImage); - - - //保存rgb图片 - if (m_iFrameCounter % m_iFramerate == 0 || m_iFrameCounter == m_iFrameNumber - 1) - { - ////保存文件 - //FileOperation * fileOperation = new FileOperation(); - //string directory = fileOperation->getDirectoryOfExe(); - //string rgbFilePathStrech = “/media/nvme/300TC/config/” + "\\tmp_image_strech.png";//没有拉伸图片 -// std::string rgbFilePathNoStrech = "/media/nvme/300TC/config/" + std::to_string(m_iFrameCounter) + "ctmp_image_no_strech.png"; - - //m_QRgbImage->save(QString::fromStdString(rgbFilePathNoStrech), "PNG"); - -// cv::imwrite(rgbFilePathNoStrech, *m_matRgbImage); - //cv::imwrite(rgbFilePathStrech, CStretch(*m_matRgbImage, 0.01)); - - } - m_VideoWriter.write(*m_matRgbImage); - std::string rgbFilePathNoStrech = "/media/nvme/delete/" + std::to_string(m_iFrameCounter) + "ctmp_image_no_strech.png"; - cv::imwrite(rgbFilePathNoStrech, *m_matRgbImage); + for (int i = m_matRgbImage->rows - 2; i >= 0; --i) + { +// std::cout << "大于:" << i << std::endl; + m_matRgbImage->row(i).copyTo(m_matRgbImage->row(i+1)); } + FillOnerowofRgbImage(m_matRgbImage, 0, datacube); + +// m_Qphoto = Mat2QImage(*m_matRgbImage); + + //保存rgb图片 +// if (m_iFrameCounter % m_iFramerate == 0 || m_iFrameCounter == m_iFrameNumber - 1) +// { +//// QString savePath = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_qt.jpg"; +//// m_Qphoto.save(savePath); +// +// QString savePath_cv = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_cv.jpg"; +// cv::imwrite(savePath_cv.toStdString(), *m_matRgbImage); +// } +// m_VideoWriter.write(*m_matRgbImage); +// std::string rgbFilePathNoStrech = "/media/nvme/delete/" + std::to_string(m_iFrameCounter) + "ctmp_image_no_strech.png"; +// cv::imwrite(rgbFilePathNoStrech, *m_matRgbImage); m_iFrameCounter++; } diff --git a/Source_Files/ximeaimager.cpp b/Source_Files/ximeaimager.cpp index 3d38eaa..22b53a0 100644 --- a/Source_Files/ximeaimager.cpp +++ b/Source_Files/ximeaimager.cpp @@ -101,7 +101,7 @@ void XimeaImager::openImger() if (ret) { m_imager.setEffectiveWindow(offsetx, width, offsety, height); - m_rgbImage->SetRgbImageWidthAndHeight(height, width, 20); + m_rgbImage->SetRgbImageWidthAndHeight(height, width, 720); std::cout<<"height:"<< height <codecpar->width = width; + videoStream->codecpar->height = height; + videoStream->codecpar->codec_id = AV_CODEC_ID_H264; // 设置为H.264编解码器 + videoStream->codecpar->format = AV_PIX_FMT_YUV420P; // 设置为YUV420P像素格式 + + // 配置视频流的参数 + AVCodecContext* codecContext = avcodec_alloc_context3(codec); + if (!codecContext) + { + qDebug() << "Error: Failed to allocate codec context"; + avformat_free_context(formatContext); + return; + } + + // 设置视频流的参数,例如分辨率、帧率等 + codecContext->width = width; + codecContext->height = height; + codecContext->time_base = {1, framerateVideo}; // 30 frames per second + codecContext->pix_fmt = AV_PIX_FMT_YUV420P; // 设置为YUV420P格式 +// codecContext->bit_rate = 1000000; // 设置比特率为 1000000 + + // 打开视频编码器 + if (avcodec_open2(codecContext, codec, nullptr) < 0) + { + qDebug() << "Error: Failed to open codec"; + avcodec_free_context(&codecContext); + avformat_free_context(formatContext); + return; + } + + // 打开输出文件 + if (avio_open(&formatContext->pb, outputVideoPath, AVIO_FLAG_WRITE) < 0) + { + qDebug() << "Error: Failed to open output file"; + avcodec_close(codecContext); + avcodec_free_context(&codecContext); + avformat_free_context(formatContext); + return; + } + + // 写入文件头 + avformat_write_header(formatContext, nullptr); + + // 使用sws_scale进行颜色空间转换 + SwsContext* swsContext = sws_getContext(width, height, AV_PIX_FMT_BGR24, + width, height, AV_PIX_FMT_YUV420P, + SWS_BICUBIC, nullptr, nullptr, nullptr); + if (!swsContext) + { + qDebug() << "Error: Failed to create sws context"; + avio_closep(&formatContext->pb); + avcodec_close(codecContext); + avcodec_free_context(&codecContext); + avformat_free_context(formatContext); + return; + } + + // 创建 AVFrame 作为目标图像 + AVFrame* dstFrame = av_frame_alloc(); + av_image_alloc(dstFrame->data, dstFrame->linesize, width, height, AV_PIX_FMT_YUV420P, 1); + + // 设置目标图像参数 + dstFrame->width = width; + dstFrame->height = height; + dstFrame->format = AV_PIX_FMT_YUV420P; + + AVFrame* frame = av_frame_alloc(); + av_image_alloc(frame->data, frame->linesize, width, height, AV_PIX_FMT_BGR24, 1); + + double framerate = getFramerate(); + int pushFlowFactor = framerate/framerateVideo; + + FILE * fp= fopen("/media/nvme/delete/300tc_fp.h264","wb"); + + QUdpSocket * m_udpSocket = new QUdpSocket(); + m_udpSocket->bind(666, QUdpSocket::ShareAddress); +// QHostAddress m_clientIpAddress=QHostAddress(QHostAddress::LocalHost); + QHostAddress m_clientIpAddress("192.168.1.21"); + int udpSendCounter=0; + int encodeCounter=0; + //tc-------------------------------------------------------------------------------------------------------------------------- + m_imager.start(); struct timeval timeStart, timeEnd; double runTime=0; @@ -606,6 +721,46 @@ void XimeaImager::startRecord(double TimeDifferenceBetweensOSAndSbg,QString base timeDifferenceBetweenSbgAndXimea = calculateTimeDifferenceBetweenSbgAndximea(&m_imager.m_image, TimeDifferenceBetweensOSAndSbg); } fwrite(m_imager.m_image.bp,1,m_iFrameSizeInByte, hFile); + //构造rgb图像,用于推流到m300遥控器 + m_rgbImage->FillRgbImage((unsigned short *)m_imager.m_image.bp); + + + if (m_iFrameCounter % pushFlowFactor == 0) + { + memcpy(frame->data[0], m_rgbImage->m_matRgbImage->data, m_rgbImage->m_matRgbImage->rows * m_rgbImage->m_matRgbImage->step[0]); +// memcpy(frame->data[0], m_rgbImage->m_Qphoto.bits(), m_rgbImage->m_Qphoto.byteCount()); + + // 使用sws_scale进行颜色空间转换 + sws_scale(swsContext, frame->data, frame->linesize, 0, height, + dstFrame->data, dstFrame->linesize); + dstFrame->pts = encodeCounter; + + // 将AVFrame编码为视频帧 + AVPacket pkt; + av_init_packet(&pkt); + pkt.data = nullptr; + pkt.size = 0; + + if (avcodec_send_frame(codecContext, dstFrame) == 0 && + avcodec_receive_packet(codecContext, &pkt) == 0) + { + fwrite(pkt.data, 1, pkt.size, fp); +// + m_udpSocket->writeDatagram((const char *)pkt.data,pkt.size,m_clientIpAddress, 666); +// + std::cout<< "第 " << m_iFrameCounter<< " 帧," << "编码第 " << udpSendCounter << " 帧数据大小: " << pkt.size << std::endl; + udpSendCounter++; + + // 将编码后的帧写入文件 +// pkt.stream_index = videoStream->index; +// av_interleaved_write_frame(formatContext, &pkt); +// av_write_frame(formatContext, &pkt); + + av_packet_unref(&pkt); + } + encodeCounter++; + } + indexofbuff = m_iFrameCounter % number_WriteDisk; @@ -641,6 +796,24 @@ void XimeaImager::startRecord(double TimeDifferenceBetweensOSAndSbg,QString base writeData2Disk->exitWriteData2Disk(); writeHdr(); + fclose(fp); + // 写入文件尾 + av_write_trailer(formatContext); + + // 释放AVFrame和相关资源 + av_freep(&frame->data[0]); + av_frame_free(&frame); + + // 释放资源 + sws_freeContext(swsContext); + av_freep(&dstFrame->data[0]); + av_frame_free(&dstFrame); +// av_packet_free(&pkt); + avcodec_close(codecContext); + avcodec_free_context(&codecContext); + avio_closep(&formatContext->pb); + avformat_free_context(formatContext); + delete[] sbgTimeBuffer; double frameInTheory=runTime * getFramerate(); @@ -1025,7 +1198,7 @@ void WriteData2Disk::write2Disk() r_qtx.unlock(); //构造rgb图像,用于推流到m300遥控器 -// m_rgbImage->FillRgbImage(dataBuffer); + m_rgbImage->FillRgbImage(dataBuffer); // std::cout<<"WriteData2Disk::write2Disk-----------------------正在写磁盘!" << m_pool->max_size() <