1、添加配置文件控制推流参数;
2、解决遥控器解码时帧序混乱的问题(gop_size = 1); 3、完善代码;
This commit is contained in:
@ -47,6 +47,12 @@ XimeaImager::XimeaImager()
|
||||
writeData2DiskThread->start(QThread::HighestPriority);
|
||||
connect(this, SIGNAL(startWriteDiskSignal()), writeData2Disk, SLOT(write2Disk()));
|
||||
|
||||
m_pushFlowThread=new QThread();
|
||||
m_pushFlow = new PushFlow();
|
||||
m_pushFlow->moveToThread(m_pushFlowThread);
|
||||
m_pushFlowThread->start();
|
||||
connect(this, SIGNAL(startPushFlowSignal()), m_pushFlow, SLOT(encodePushFlow()));
|
||||
|
||||
m_pool = new MemoryPool<DataBuffer>;
|
||||
q = new queue<DataBuffer *>;
|
||||
m_qFrameCounter = new queue<int>;
|
||||
@ -100,8 +106,21 @@ void XimeaImager::openImger()
|
||||
ret = m_configfile.getEffectiveWindow(width, offsetx, height, offsety);
|
||||
if (ret)
|
||||
{
|
||||
int rgbHeight;
|
||||
int framerateVideo;
|
||||
m_configfile.getPushFlowParam(m_iFlowSwitch, rgbHeight, framerateVideo);
|
||||
std::cout <<"rgbHeight:" << rgbHeight << ", framerateVideo:" << framerateVideo << std::endl;
|
||||
|
||||
m_imager.setEffectiveWindow(offsetx, width, offsety, height);
|
||||
m_rgbImage->SetRgbImageWidthAndHeight(height, width, 720);
|
||||
m_rgbImage->SetRgbImageWidthAndHeight(height, width, rgbHeight);
|
||||
m_pushFlow->setParm(m_rgbImage,width,rgbHeight,framerateVideo);
|
||||
|
||||
int redBandNumber;
|
||||
int greenBandNumber;
|
||||
int blueBandNumber;
|
||||
getRgbBandNumber(redBandNumber, greenBandNumber, blueBandNumber);
|
||||
m_rgbImage->SetRgbBandNumber(redBandNumber, greenBandNumber, blueBandNumber);
|
||||
|
||||
std::cout<<"height:"<< height <<std::endl;
|
||||
std::cout<<"width:"<< width <<std::endl;
|
||||
std::cout<<"每帧字节数:"<< width * height * 2 <<std::endl;
|
||||
@ -487,6 +506,65 @@ double XimeaImager::geWavelengthAtBand(int x)
|
||||
}
|
||||
}
|
||||
|
||||
void XimeaImager::getRgbBandNumber(int &redBandNumber, int &greenBandNumber, int &blueBandNumber)
|
||||
{
|
||||
vector<double> wavelengths;
|
||||
if (m_imager.getSpectralBin() == 1)
|
||||
{
|
||||
for (int i = getWindowStartBand(); i < getWindowEndBand(); i++)
|
||||
{
|
||||
wavelengths.push_back(geWavelengthAtBand(i));
|
||||
}
|
||||
}
|
||||
else if (m_imager.getSpectralBin() == 2)
|
||||
{
|
||||
for (int i = m_iOffsetyOfSpectralBin2; i < m_iOffsetyOfSpectralBin2 + m_iHeightOfSpectralBin2; i++)
|
||||
{
|
||||
if (i*2 + 1 > m_iOffsetyOfSpectralBin1 + m_iHeightOfSpectralBin1)
|
||||
{
|
||||
printf("XimeaImager::writeHdr 出现错误:窗口中,光谱 bin1 波段数小于 bin2 的 2 倍。\n");
|
||||
break;
|
||||
}
|
||||
wavelengths.push_back((geWavelengthAtBand(i*2) + geWavelengthAtBand(i*2 + 1)) / 2);
|
||||
}
|
||||
}
|
||||
|
||||
//envi打开文件时的红绿蓝波长(nm)
|
||||
int r_envi = 640;
|
||||
int g_envi = 550;
|
||||
int b_envi = 470;
|
||||
redBandNumber = findClosestIndex(wavelengths, r_envi);
|
||||
greenBandNumber = findClosestIndex(wavelengths, g_envi);
|
||||
blueBandNumber = findClosestIndex(wavelengths, b_envi);
|
||||
|
||||
// std::cout<<"红波段的波段号:"<< redBandNumber <<std::endl;
|
||||
// std::cout<<"绿波段的波段号:"<< greenBandNumber <<std::endl;
|
||||
// std::cout<<"蓝波段的波段号:"<< blueBandNumber <<std::endl;
|
||||
}
|
||||
|
||||
int XimeaImager::findClosestIndex(const std::vector<double>& numbers, double target)
|
||||
{
|
||||
if (numbers.empty()) {
|
||||
// 处理空向量的情况
|
||||
return -1;
|
||||
}
|
||||
|
||||
double minDifference = std::abs(numbers[0] - target);
|
||||
int closestIndex = 0;
|
||||
|
||||
for (int i = 1; i < numbers.size(); ++i)
|
||||
{
|
||||
double currentDifference = std::abs(numbers[i] - target);
|
||||
if (currentDifference < minDifference)
|
||||
{
|
||||
minDifference = currentDifference;
|
||||
closestIndex = i;
|
||||
}
|
||||
}
|
||||
|
||||
return closestIndex;
|
||||
}
|
||||
|
||||
int XimeaImager::getMaxValueOfOneFrame(unsigned short * data, int numberOfPixel)
|
||||
{
|
||||
//排序
|
||||
@ -592,124 +670,13 @@ void XimeaImager::startRecord(double TimeDifferenceBetweensOSAndSbg,QString base
|
||||
FILE *hFile=fopen(imageFileName.toStdString().c_str(),"w+b");
|
||||
double * imageBuffer = new double[number_WriteDisk];
|
||||
|
||||
//tc+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
// 创建输出视频的AVFormatContext
|
||||
const char* outputVideoPath = "/media/nvme/delete/300tc.h264";
|
||||
int width = 1368;
|
||||
int height = 720;
|
||||
int framerateVideo = 10;
|
||||
|
||||
AVFormatContext* formatContext = nullptr;
|
||||
avformat_alloc_output_context2(&formatContext, nullptr, "mp4", outputVideoPath);
|
||||
if (!formatContext)
|
||||
QString vedioFileName=m_baseFileName+".h264";
|
||||
m_pushFlow->setVedioFilePath(vedioFileName);
|
||||
if(m_iFlowSwitch == 1)
|
||||
{
|
||||
qDebug() << "Error: Failed to allocate output context";
|
||||
return;
|
||||
emit startPushFlowSignal();
|
||||
}
|
||||
|
||||
// 查找H.264编码器
|
||||
const AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
|
||||
if (!codec)
|
||||
{
|
||||
qDebug() << "Error: Codec not found";
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
|
||||
// 创建输出视频流
|
||||
AVStream* videoStream = avformat_new_stream(formatContext, codec);
|
||||
if (!videoStream)
|
||||
{
|
||||
qDebug() << "Error: Failed to create video stream";
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
// 设置视频流的参数,例如分辨率、帧率等
|
||||
videoStream->codecpar->width = width;
|
||||
videoStream->codecpar->height = height;
|
||||
videoStream->codecpar->codec_id = AV_CODEC_ID_H264; // 设置为H.264编解码器
|
||||
videoStream->codecpar->format = AV_PIX_FMT_YUV420P; // 设置为YUV420P像素格式
|
||||
|
||||
// 配置视频流的参数
|
||||
AVCodecContext* codecContext = avcodec_alloc_context3(codec);
|
||||
if (!codecContext)
|
||||
{
|
||||
qDebug() << "Error: Failed to allocate codec context";
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
|
||||
// 设置视频流的参数,例如分辨率、帧率等
|
||||
codecContext->width = width;
|
||||
codecContext->height = height;
|
||||
codecContext->time_base = {1, framerateVideo}; // 30 frames per second
|
||||
codecContext->pix_fmt = AV_PIX_FMT_YUV420P; // 设置为YUV420P格式
|
||||
codecContext->gop_size = 1;//多少帧产生一组关键帧
|
||||
codecContext->max_b_frames = 1;//b帧,参考帧
|
||||
// codecContext->bit_rate = 1000000; // 设置比特率为 1000000
|
||||
|
||||
// 打开视频编码器
|
||||
if (avcodec_open2(codecContext, codec, nullptr) < 0)
|
||||
{
|
||||
qDebug() << "Error: Failed to open codec";
|
||||
avcodec_free_context(&codecContext);
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
|
||||
// 打开输出文件
|
||||
if (avio_open(&formatContext->pb, outputVideoPath, AVIO_FLAG_WRITE) < 0)
|
||||
{
|
||||
qDebug() << "Error: Failed to open output file";
|
||||
avcodec_close(codecContext);
|
||||
avcodec_free_context(&codecContext);
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
|
||||
// 写入文件头
|
||||
avformat_write_header(formatContext, nullptr);
|
||||
|
||||
// 使用sws_scale进行颜色空间转换
|
||||
SwsContext* swsContext = sws_getContext(width, height, AV_PIX_FMT_BGR24,
|
||||
width, height, AV_PIX_FMT_YUV420P,
|
||||
SWS_BICUBIC, nullptr, nullptr, nullptr);
|
||||
if (!swsContext)
|
||||
{
|
||||
qDebug() << "Error: Failed to create sws context";
|
||||
avio_closep(&formatContext->pb);
|
||||
avcodec_close(codecContext);
|
||||
avcodec_free_context(&codecContext);
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
|
||||
// 创建 AVFrame 作为目标图像
|
||||
AVFrame* dstFrame = av_frame_alloc();
|
||||
av_image_alloc(dstFrame->data, dstFrame->linesize, width, height, AV_PIX_FMT_YUV420P, 1);
|
||||
|
||||
// 设置目标图像参数
|
||||
dstFrame->width = width;
|
||||
dstFrame->height = height;
|
||||
dstFrame->format = AV_PIX_FMT_YUV420P;
|
||||
|
||||
AVFrame* frame = av_frame_alloc();
|
||||
av_image_alloc(frame->data, frame->linesize, width, height, AV_PIX_FMT_BGR24, 1);
|
||||
|
||||
double framerate = getFramerate();
|
||||
int pushFlowFactor = framerate/framerateVideo;
|
||||
|
||||
FILE * fp= fopen("/media/nvme/delete/300tc_fp.h264","wb");
|
||||
|
||||
QUdpSocket * m_udpSocket = new QUdpSocket();
|
||||
m_udpSocket->bind(PUSH_FLOW_PORT, QUdpSocket::ShareAddress);
|
||||
QHostAddress m_clientIpAddress=QHostAddress(QHostAddress::LocalHost);
|
||||
// QHostAddress m_clientIpAddress("192.168.1.21");
|
||||
// QHostAddress m_clientIpAddress("192.168.111.1");
|
||||
int udpSendCounter=0;
|
||||
int encodeCounter=0;
|
||||
//tc--------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
m_imager.start();
|
||||
struct timeval timeStart, timeEnd;
|
||||
double runTime=0;
|
||||
@ -725,48 +692,11 @@ void XimeaImager::startRecord(double TimeDifferenceBetweensOSAndSbg,QString base
|
||||
}
|
||||
fwrite(m_imager.m_image.bp,1,m_iFrameSizeInByte, hFile);
|
||||
//构造rgb图像,用于推流到m300遥控器
|
||||
m_rgbImage->FillRgbImage((unsigned short *)m_imager.m_image.bp);
|
||||
|
||||
|
||||
if (m_iFrameCounter % pushFlowFactor == 0)
|
||||
if(m_iFlowSwitch == 1)
|
||||
{
|
||||
memcpy(frame->data[0], m_rgbImage->m_matRgbImage->data, m_rgbImage->m_matRgbImage->rows * m_rgbImage->m_matRgbImage->step[0]);
|
||||
// memcpy(frame->data[0], m_rgbImage->m_Qphoto.bits(), m_rgbImage->m_Qphoto.byteCount());
|
||||
|
||||
// 使用sws_scale进行颜色空间转换
|
||||
sws_scale(swsContext, frame->data, frame->linesize, 0, height,
|
||||
dstFrame->data, dstFrame->linesize);
|
||||
dstFrame->pts = encodeCounter;
|
||||
|
||||
// 将AVFrame编码为视频帧
|
||||
AVPacket pkt;
|
||||
av_init_packet(&pkt);
|
||||
pkt.data = nullptr;
|
||||
pkt.size = 0;
|
||||
|
||||
if (avcodec_send_frame(codecContext, dstFrame) == 0 &&
|
||||
avcodec_receive_packet(codecContext, &pkt) == 0)
|
||||
{
|
||||
// fwrite(pkt.data, 1, pkt.size, fp);
|
||||
//
|
||||
m_udpSocket->writeDatagram((const char *)pkt.data,pkt.size,m_clientIpAddress, PUSH_FLOW_PORT);
|
||||
//
|
||||
// std::cout<< "第 " << m_iFrameCounter<< " 帧," << "编码第 " << udpSendCounter << " 帧数据大小: " << pkt.size << std::endl;
|
||||
// std::cout<< "pkt.pts: " << pkt.pts << std::endl;
|
||||
// std::cout<< "pkt.dts: " << pkt.dts << std::endl << std::endl;
|
||||
udpSendCounter++;
|
||||
|
||||
// 将编码后的帧写入文件
|
||||
// pkt.stream_index = videoStream->index;
|
||||
// av_interleaved_write_frame(formatContext, &pkt);
|
||||
// av_write_frame(formatContext, &pkt);
|
||||
|
||||
av_packet_unref(&pkt);
|
||||
}
|
||||
encodeCounter++;
|
||||
m_rgbImage->FillRgbImage((unsigned short *)m_imager.m_image.bp);
|
||||
}
|
||||
|
||||
|
||||
indexofbuff = m_iFrameCounter % number_WriteDisk;
|
||||
|
||||
if (indexofbuff == 0)
|
||||
@ -801,23 +731,7 @@ void XimeaImager::startRecord(double TimeDifferenceBetweensOSAndSbg,QString base
|
||||
writeData2Disk->exitWriteData2Disk();
|
||||
writeHdr();
|
||||
|
||||
fclose(fp);
|
||||
// 写入文件尾
|
||||
av_write_trailer(formatContext);
|
||||
|
||||
// 释放AVFrame和相关资源
|
||||
av_freep(&frame->data[0]);
|
||||
av_frame_free(&frame);
|
||||
|
||||
// 释放资源
|
||||
sws_freeContext(swsContext);
|
||||
av_freep(&dstFrame->data[0]);
|
||||
av_frame_free(&dstFrame);
|
||||
// av_packet_free(&pkt);
|
||||
avcodec_close(codecContext);
|
||||
avcodec_free_context(&codecContext);
|
||||
avio_closep(&formatContext->pb);
|
||||
avformat_free_context(formatContext);
|
||||
m_pushFlow->exitPushFlow();
|
||||
|
||||
delete[] sbgTimeBuffer;
|
||||
|
||||
@ -1239,3 +1153,213 @@ void WriteData2Disk::setParm(queue<DataBuffer *> * q, queue<int> * qFrameCounter
|
||||
|
||||
m_rgbImage = rgbImage;
|
||||
}
|
||||
|
||||
PushFlow::PushFlow()
|
||||
{
|
||||
m_iWidth = 1368;
|
||||
m_iHeight = 720;
|
||||
m_iFramerateVideo = 5;
|
||||
}
|
||||
|
||||
void PushFlow::setParm(rgbImage * img, int width, int height, int framerateVideo)
|
||||
{
|
||||
m_rgbImage = img;
|
||||
|
||||
m_iWidth = width;
|
||||
m_iHeight = height;
|
||||
m_iFramerateVideo = framerateVideo;
|
||||
}
|
||||
|
||||
void PushFlow::setVedioFilePath(QString path)
|
||||
{
|
||||
m_QVedioFilePath = path;
|
||||
}
|
||||
|
||||
void PushFlow::exitPushFlow()
|
||||
{
|
||||
isExitPushFlow = true;
|
||||
}
|
||||
|
||||
void PushFlow::encodePushFlow()
|
||||
{
|
||||
// 创建输出视频的AVFormatContext
|
||||
const char* outputVideoPath = "/media/nvme/delete/300tc.h264";
|
||||
|
||||
FILE *fp = fopen(m_QVedioFilePath.toStdString().c_str(),"w+b");
|
||||
|
||||
AVFormatContext* formatContext = nullptr;
|
||||
avformat_alloc_output_context2(&formatContext, nullptr, "mp4", outputVideoPath);
|
||||
if (!formatContext)
|
||||
{
|
||||
qDebug() << "Error: Failed to allocate output context";
|
||||
return;
|
||||
}
|
||||
|
||||
// 查找H.264编码器
|
||||
const AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
|
||||
if (!codec)
|
||||
{
|
||||
qDebug() << "Error: Codec not found";
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
|
||||
// 创建输出视频流
|
||||
AVStream* videoStream = avformat_new_stream(formatContext, codec);
|
||||
if (!videoStream)
|
||||
{
|
||||
qDebug() << "Error: Failed to create video stream";
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
// 设置视频流的参数,例如分辨率、帧率等
|
||||
videoStream->codecpar->width = m_iWidth;
|
||||
videoStream->codecpar->height = m_iHeight;
|
||||
videoStream->codecpar->codec_id = AV_CODEC_ID_H264; // 设置为H.264编解码器
|
||||
videoStream->codecpar->format = AV_PIX_FMT_YUV420P; // 设置为YUV420P像素格式
|
||||
|
||||
// 配置视频流的参数
|
||||
AVCodecContext* codecContext = avcodec_alloc_context3(codec);
|
||||
if (!codecContext)
|
||||
{
|
||||
qDebug() << "Error: Failed to allocate codec context";
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
|
||||
// 设置视频流的参数,例如分辨率、帧率等
|
||||
codecContext->width = m_iWidth;
|
||||
codecContext->height = m_iHeight;
|
||||
codecContext->time_base = {1, m_iFramerateVideo};
|
||||
codecContext->pix_fmt = AV_PIX_FMT_YUV420P; // 设置为YUV420P格式
|
||||
codecContext->gop_size = 1;//多少帧产生一组关键帧
|
||||
codecContext->max_b_frames = 1;//b帧,参考帧
|
||||
// codecContext->bit_rate = 1000000; // 设置比特率为 1000000
|
||||
|
||||
// 打开视频编码器
|
||||
if (avcodec_open2(codecContext, codec, nullptr) < 0)
|
||||
{
|
||||
qDebug() << "Error: Failed to open codec";
|
||||
avcodec_free_context(&codecContext);
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
|
||||
// 打开输出文件
|
||||
if (avio_open(&formatContext->pb, outputVideoPath, AVIO_FLAG_WRITE) < 0)
|
||||
{
|
||||
qDebug() << "Error: Failed to open output file";
|
||||
avcodec_close(codecContext);
|
||||
avcodec_free_context(&codecContext);
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
|
||||
// 写入文件头
|
||||
avformat_write_header(formatContext, nullptr);
|
||||
|
||||
// 使用sws_scale进行颜色空间转换
|
||||
SwsContext* swsContext = sws_getContext(m_iWidth, m_iHeight, AV_PIX_FMT_BGR24,
|
||||
m_iWidth, m_iHeight, AV_PIX_FMT_YUV420P,
|
||||
SWS_BICUBIC, nullptr, nullptr, nullptr);
|
||||
if (!swsContext)
|
||||
{
|
||||
qDebug() << "Error: Failed to create sws context";
|
||||
avio_closep(&formatContext->pb);
|
||||
avcodec_close(codecContext);
|
||||
avcodec_free_context(&codecContext);
|
||||
avformat_free_context(formatContext);
|
||||
return;
|
||||
}
|
||||
|
||||
// 创建 AVFrame 作为目标图像
|
||||
AVFrame* dstFrame = av_frame_alloc();
|
||||
av_image_alloc(dstFrame->data, dstFrame->linesize, m_iWidth, m_iHeight, AV_PIX_FMT_YUV420P, 1);
|
||||
|
||||
// 设置目标图像参数
|
||||
dstFrame->width = m_iWidth;
|
||||
dstFrame->height = m_iHeight;
|
||||
dstFrame->format = AV_PIX_FMT_YUV420P;
|
||||
|
||||
AVFrame* frame = av_frame_alloc();
|
||||
av_image_alloc(frame->data, frame->linesize, m_iWidth, m_iHeight, AV_PIX_FMT_BGR24, 1);
|
||||
|
||||
QUdpSocket * m_udpSocket = new QUdpSocket();
|
||||
m_udpSocket->bind(PUSH_FLOW_PORT, QUdpSocket::ShareAddress);
|
||||
QHostAddress m_clientIpAddress=QHostAddress(QHostAddress::LocalHost);
|
||||
// QHostAddress m_clientIpAddress("192.168.1.30");
|
||||
// QHostAddress m_clientIpAddress("192.168.111.1");
|
||||
int udpSendCounter=0;
|
||||
int encodeCounter=0;
|
||||
|
||||
isExitPushFlow = false;
|
||||
|
||||
unsigned long sleepTime = 1/(float)m_iFramerateVideo * 1000;
|
||||
std::cout<< "推流帧率: " << m_iFramerateVideo << ", sleepTime:" << sleepTime << "ms." << std::endl;
|
||||
|
||||
while(true)
|
||||
{
|
||||
QThread::msleep(sleepTime);
|
||||
|
||||
memcpy(frame->data[0], m_rgbImage->m_matRgbImage->data, m_rgbImage->m_matRgbImage->rows * m_rgbImage->m_matRgbImage->step[0]);
|
||||
// memcpy(frame->data[0], m_rgbImage->m_Qphoto.bits(), m_rgbImage->m_Qphoto.byteCount());
|
||||
|
||||
// 使用sws_scale进行颜色空间转换
|
||||
sws_scale(swsContext, frame->data, frame->linesize, 0, m_iHeight,
|
||||
dstFrame->data, dstFrame->linesize);
|
||||
dstFrame->pts = encodeCounter;
|
||||
|
||||
// 将AVFrame编码为视频帧
|
||||
AVPacket pkt;
|
||||
av_init_packet(&pkt);
|
||||
pkt.data = nullptr;
|
||||
pkt.size = 0;
|
||||
|
||||
if (avcodec_send_frame(codecContext, dstFrame) == 0 &&
|
||||
avcodec_receive_packet(codecContext, &pkt) == 0)
|
||||
{
|
||||
fwrite(pkt.data, 1, pkt.size, fp);
|
||||
|
||||
m_udpSocket->writeDatagram((const char *)pkt.data,pkt.size,m_clientIpAddress, PUSH_FLOW_PORT);
|
||||
|
||||
// std::cout << "编码第 " << udpSendCounter << " 帧数据大小: " << pkt.size << std::endl;
|
||||
// std::cout<< "pkt.pts: " << pkt.pts << std::endl;
|
||||
// std::cout<< "pkt.dts: " << pkt.dts << std::endl << std::endl;
|
||||
udpSendCounter++;
|
||||
|
||||
// 将编码后的帧写入文件
|
||||
// pkt.stream_index = videoStream->index;
|
||||
// av_interleaved_write_frame(formatContext, &pkt);
|
||||
// av_write_frame(formatContext, &pkt);
|
||||
|
||||
av_packet_unref(&pkt);
|
||||
}
|
||||
encodeCounter++;
|
||||
|
||||
if(isExitPushFlow)
|
||||
{
|
||||
std::cout<<"PushFlow::encodePushFlow-----------------------推流线程将退出!"<<std::endl;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
fclose(fp);
|
||||
// 写入文件尾
|
||||
av_write_trailer(formatContext);
|
||||
|
||||
// 释放AVFrame和相关资源
|
||||
av_freep(&frame->data[0]);
|
||||
av_frame_free(&frame);
|
||||
|
||||
// 释放资源
|
||||
sws_freeContext(swsContext);
|
||||
av_freep(&dstFrame->data[0]);
|
||||
av_frame_free(&dstFrame);
|
||||
// av_packet_free(&pkt);
|
||||
avcodec_close(codecContext);
|
||||
avcodec_free_context(&codecContext);
|
||||
avio_closep(&formatContext->pb);
|
||||
avformat_free_context(formatContext);
|
||||
|
||||
std::cout<<"PushFlow::encodePushFlow-----------------------推流线程已经退出!" << std::endl;
|
||||
}
|
||||
|
Reference in New Issue
Block a user