#include "Header_Files/ximeaimager.h" XimeaImager::XimeaImager() { m_buffer=nullptr; m_bRecordControl=false; m_iFrameCounter=0; m_iImagerState=100; QString ximeaCfgFile = "/media/nvme/300TC/config/ximea.cfg"; m_configfile.setConfigfilePath(ximeaCfgFile.toStdString()); if(!m_configfile.isConfigfileExist()) m_configfile.createConfigFile(); m_configfile.parseConfigfile(); m_configfile.getWindowOffsety_HeightOfSpectral(m_iOffsetyOfSpectralBin1, m_iHeightOfSpectralBin1, "bin1"); m_configfile.getWindowOffsety_HeightOfSpectral(m_iOffsetyOfSpectralBin2, m_iHeightOfSpectralBin2, "bin2"); //检查 ximea.cfg 是否满足要求 if(m_iOffsetyOfSpectralBin2 != m_iOffsetyOfSpectralBin1 / 2) { std::cout<<"ximea.cfg 错误:m_iOffsetyOfSpectralBin2 != m_iOffsetyOfSpectralBin1 / 2!"<moveToThread(m_recordTempThread); m_recordTempThread->start(); connect(this, SIGNAL(recordXimeaTemperatureSignal(QString)),m_ximeaTemperature, SLOT(recordTemperature(QString))); writeData2DiskThread = new QThread(); writeData2Disk = new WriteData2Disk(); writeData2Disk->moveToThread(writeData2DiskThread); writeData2DiskThread->start(QThread::HighestPriority); connect(this, SIGNAL(startWriteDiskSignal()), writeData2Disk, SLOT(write2Disk())); m_pushFlowThread=new QThread(); m_pushFlow = new PushFlow(); m_pushFlow->moveToThread(m_pushFlowThread); m_pushFlowThread->start(); connect(this, SIGNAL(startPushFlowSignal()), m_pushFlow, SLOT(encodePushFlow())); m_pool = new MemoryPool; q = new queue; m_qFrameCounter = new queue; m_rgbImage = new rgbImage(); } XimeaImager::~XimeaImager() { } void XimeaImager::openImger() { if(m_iImagerState != 100)//如果相机已经打开或者已经出错,就直接返回 { emit ximeaImageStatus(m_iImagerState); return; } try { //std::cout<<"XimeaImager::openImger111111111111111111111:正在打开相机!"<SetRgbImageWidthAndHeight(height, width, rgbHeight); m_pushFlow->setParm(m_rgbImage,width,rgbHeight,framerateVideo); int redBandNumber; int greenBandNumber; int blueBandNumber; getRgbBandNumber(redBandNumber, greenBandNumber, blueBandNumber); m_rgbImage->SetRgbBandNumber(redBandNumber, greenBandNumber, blueBandNumber); std::cout<<"height:"<< height <stopRecordTemperature(); if(m_iImagerState==100) { emit ximeaImageStatus(m_iImagerState); return; } try { m_imager.disconnect(); m_iImagerState=100; emit ximeaImageStatus(m_iImagerState); } catch(int xiApiErrorCodes) { std::cout<<"XimeaImager::closeImger-------------------!"< maxExposureTimeInUs) { wrapSetExposureTime(maxExposureTimeInUs); } m_iImagerState=102; emit ximeaImageStatus(m_iImagerState); emit frameRateSignal(framerate); m_parameterConfigfile.setFrameRate(framerate); } catch(int xiApiErrorCodes) { processXiApiErrorCodes(xiApiErrorCodes); } } double XimeaImager::getExposureTime() { double exposureTime; try { exposureTime=m_imager.get_integration_time(); return exposureTime; } catch(int xiApiErrorCodes) { processXiApiErrorCodes(xiApiErrorCodes); return -1; } } double XimeaImager::setExposureTime(float exposureTime_in_us) { double integrationTime2Return; try { //计算最大积分时间 float currentFramerate=getFramerate(); float maxExposureTime_in_us=1/currentFramerate*1000000; //确保设置的积分时间比最大积分时间小 if(exposureTime_in_us wavelengths; if (m_imager.getSpectralBin() == 1) { for (int i = getWindowStartBand(); i < getWindowEndBand(); i++) { wavelengths.push_back(geWavelengthAtBand(i)); } } else if (m_imager.getSpectralBin() == 2) { for (int i = m_iOffsetyOfSpectralBin2; i < m_iOffsetyOfSpectralBin2 + m_iHeightOfSpectralBin2; i++) { if (i*2 + 1 > m_iOffsetyOfSpectralBin1 + m_iHeightOfSpectralBin1) { printf("XimeaImager::writeHdr 出现错误:窗口中,光谱 bin1 波段数小于 bin2 的 2 倍。\n"); break; } wavelengths.push_back((geWavelengthAtBand(i*2) + geWavelengthAtBand(i*2 + 1)) / 2); } } //envi打开文件时的红绿蓝波长(nm) int r_envi = 640; int g_envi = 550; int b_envi = 470; redBandNumber = findClosestIndex(wavelengths, r_envi); greenBandNumber = findClosestIndex(wavelengths, g_envi); blueBandNumber = findClosestIndex(wavelengths, b_envi); // std::cout<<"红波段的波段号:"<< redBandNumber <& numbers, double target) { if (numbers.empty()) { // 处理空向量的情况 return -1; } double minDifference = std::abs(numbers[0] - target); int closestIndex = 0; for (int i = 1; i < numbers.size(); ++i) { double currentDifference = std::abs(numbers[i] - target); if (currentDifference < minDifference) { minDifference = currentDifference; closestIndex = i; } } return closestIndex; } int XimeaImager::getMaxValueOfOneFrame(unsigned short * data, int numberOfPixel) { //排序 //bubbleSort(data,1000); //计算出最大的10%值的平均值 unsigned short maxValue=0; for(int i=0;imaxValue) { //std::cout<<"像素值为:"<< *(data + i) <tsSec + image->tsUSec/1000000; printf("XimeaImager::calculateTimeDifferenceBetweenSystemAndximea--ximeaTime: %f s\n", ximeaTime); //获取系统时间(纳秒) struct timespec systemTime; clock_gettime(CLOCK_REALTIME,&systemTime); tm systemTime_rili; localtime_r(&systemTime.tv_sec, &systemTime_rili); double secondSystem=(systemTime_rili.tm_mday-1)*24*60*60+systemTime_rili.tm_hour*60*60+systemTime_rili.tm_min*60+systemTime_rili.tm_sec; double timeOS=secondSystem+static_cast(systemTime.tv_nsec)/1000000000; printf("XimeaImager::calculateTimeDifferenceBetweenSystemAndximea--osTime: %f s\n", timeOS); //计算系统时间和gps时间之间的差距 double timeDifferenceBetweenSbgAndximea = timeOS - timeDifferenceBetweenSbgAndOS - ximeaTime; printf("XimeaImager::calculateTimeDifferenceBetweenSystemAndximea--timeDifferenceBetweenSbgAndximea: %f s\n", timeDifferenceBetweenSbgAndximea); return timeDifferenceBetweenSbgAndximea; } void XimeaImager::startRecord(double TimeDifferenceBetweensOSAndSbg,QString baseFileName) { try { if(m_iImagerState <= 99 || m_iImagerState==100 || m_iImagerState==104) { printf("已经开始采集----------------------------!\n"); return; } m_iImagerStateTemp=m_iImagerState; m_iImagerState=104; emit ximeaImageStatus(m_iImagerState); char * timeFormat="%Y%m%d_%H%M%S"; QString timeStr = formatTimeStr(timeFormat); printf("开始采集:%s!\n", timeStr.toStdString().c_str()); m_iFrameCounter=0; m_bRecordControl=true; m_baseFileName=baseFileName; std::cout << "帧率为:" << getFramerate() << "hz" <setParm(q, m_qFrameCounter,m_baseFileName,m_iFrameSizeInByte, number_WriteDisk, m_pool, m_rgbImage); // emit startWriteDiskSignal(); int indexofbuff; DataBuffer * buffer; QString timesFileName=m_baseFileName+".times"; FILE *hHimesFile=fopen(timesFileName.toStdString().c_str(),"w+"); // ofstream timesFile(timesFileName.toStdString()); double timeDifferenceBetweenSbgAndXimea; double * sbgTimeBuffer = new double[number_WriteDisk]; QString imageFileName=m_baseFileName+".bil"; FILE *hFile=fopen(imageFileName.toStdString().c_str(),"w+b"); double * imageBuffer = new double[number_WriteDisk]; QString vedioFileName=m_baseFileName+".h264"; m_pushFlow->setVedioFilePath(vedioFileName); if(m_iFlowSwitch == 1) { emit startPushFlowSignal(); } m_imager.start(); struct timeval timeStart, timeEnd; double runTime=0; gettimeofday(&timeStart, NULL); while (m_bRecordControl) { unsigned short *x=m_imager.get_frame(m_buffer); m_iFrameCounter+=1; if (m_iFrameCounter == 1) { timeDifferenceBetweenSbgAndXimea = calculateTimeDifferenceBetweenSbgAndximea(&m_imager.m_image, TimeDifferenceBetweensOSAndSbg); } fwrite(m_imager.m_image.bp,1,m_iFrameSizeInByte, hFile); //构造rgb图像,用于推流到m300遥控器 if(m_iFlowSwitch == 1) { m_rgbImage->FillRgbImage((unsigned short *)m_imager.m_image.bp); } indexofbuff = m_iFrameCounter % number_WriteDisk; if (indexofbuff == 0) { sbgTimeBuffer[number_WriteDisk - 1] = getSbgTime(&m_imager.m_image, timeDifferenceBetweenSbgAndXimea); } else { sbgTimeBuffer[indexofbuff - 1] = getSbgTime(&m_imager.m_image, timeDifferenceBetweenSbgAndXimea); } if (indexofbuff == 0) { for (int i = 0; i < number_WriteDisk; ++i) { fprintf(hHimesFile,"%f\n",sbgTimeBuffer[i]); } } } if (indexofbuff != 0) { for (int i = 0; i < indexofbuff; ++i) { fprintf(hHimesFile,"%f\n",sbgTimeBuffer[i]); } std::cout << "没凑满: " << indexofbuff <exitWriteData2Disk(); writeHdr(); m_pushFlow->exitPushFlow(); delete[] sbgTimeBuffer; double frameInTheory=runTime * getFramerate(); double frameLossed = m_imager.m_image.acq_nframe - m_iFrameCounter; double frameLossRate = frameLossed / m_imager.m_image.acq_nframe; std::cout<<"当前采集文件为: "< m_iOffsetyOfSpectralBin1 + m_iHeightOfSpectralBin1) { printf("XimeaImager::writeHdr 出现错误:窗口中,光谱 bin1 波段数小于 bin2 的 2 倍。\n"); break; } hdrFileHandle << (geWavelengthAtBand(i*2) + geWavelengthAtBand(i*2 + 1)) / 2; counter++; if (i < m_iOffsetyOfSpectralBin2 + m_iHeightOfSpectralBin2 - 1) hdrFileHandle << ", "; else { printf("头文件中写入了多少个波段:%d\n", counter); } } } hdrFileHandle << "}\n"; hdrFileHandle.close(); } /* #define MM40_OK 0 //!< Function call succeeded #define MM40_INVALID_HANDLE 1 //!< Invalid handle #define MM40_READREG 2 //!< Register read error #define MM40_WRITEREG 3 //!< Register write error #define MM40_FREE_RESOURCES 4 //!< Freeing resources error #define MM40_FREE_CHANNEL 5 //!< Freeing channel error #define MM40_FREE_BANDWIDTH 6 //!< Freeing bandwith error #define MM40_READBLK 7 //!< Read block error #define MM40_WRITEBLK 8 //!< Write block error #define MM40_NO_IMAGE 9 //!< No image #define MM40_TIMEOUT 10 //!< Timeout #define MM40_INVALID_ARG 11 //!< Invalid arguments supplied #define MM40_NOT_SUPPORTED 12 //!< Not supported #define MM40_ISOCH_ATTACH_BUFFERS 13 //!< Attach buffers error #define MM40_GET_OVERLAPPED_RESULT 14 //!< Overlapped result #define MM40_MEMORY_ALLOCATION 15 //!< Memory allocation error #define MM40_DLLCONTEXTISNULL 16 //!< DLL context is NULL #define MM40_DLLCONTEXTISNONZERO 17 //!< DLL context is non zero #define MM40_DLLCONTEXTEXIST 18 //!< DLL context exists #define MM40_TOOMANYDEVICES 19 //!< Too many devices connected #define MM40_ERRORCAMCONTEXT 20 //!< Camera context error #define MM40_UNKNOWN_HARDWARE 21 //!< Unknown hardware #define MM40_INVALID_TM_FILE 22 //!< Invalid TM file #define MM40_INVALID_TM_TAG 23 //!< Invalid TM tag #define MM40_INCOMPLETE_TM 24 //!< Incomplete TM #define MM40_BUS_RESET_FAILED 25 //!< Bus reset error #define MM40_NOT_IMPLEMENTED 26 //!< Not implemented #define MM40_SHADING_TOOBRIGHT 27 //!< Shading is too bright #define MM40_SHADING_TOODARK 28 //!< Shading is too dark #define MM40_TOO_LOW_GAIN 29 //!< Gain is too low #define MM40_INVALID_BPL 30 //!< Invalid sensor defect correction list #define MM40_BPL_REALLOC 31 //!< Error while sensor defect correction list reallocation #define MM40_INVALID_PIXEL_LIST 32 //!< Invalid pixel list #define MM40_INVALID_FFS 33 //!< Invalid Flash File System #define MM40_INVALID_PROFILE 34 //!< Invalid profile #define MM40_INVALID_CALIBRATION 35 //!< Invalid calibration #define MM40_INVALID_BUFFER 36 //!< Invalid buffer #define MM40_INVALID_DATA 38 //!< Invalid data #define MM40_TGBUSY 39 //!< Timing generator is busy #define MM40_IO_WRONG 40 //!< Wrong operation open/write/read/close #define MM40_ACQUISITION_ALREADY_UP 41 //!< Acquisition already started #define MM40_OLD_DRIVER_VERSION 42 //!< Old version of device driver installed to the system. #define MM40_GET_LAST_ERROR 43 //!< To get error code please call GetLastError function. #define MM40_CANT_PROCESS 44 //!< Data cannot be processed #define MM40_ACQUISITION_STOPED 45 //!< Acquisition is stopped. It needs to be started to perform operation. #define MM40_ACQUISITION_STOPED_WERR 46 //!< Acquisition has been stopped with an error. #define MM40_INVALID_INPUT_ICC_PROFILE 47 //!< Input ICC profile missing or corrupted #define MM40_INVALID_OUTPUT_ICC_PROFILE 48 //!< Output ICC profile missing or corrupted #define MM40_DEVICE_NOT_READY 49 //!< Device not ready to operate #define MM40_SHADING_TOOCONTRAST 50 //!< Shading is too contrast #define MM40_ALREADY_INITIALIZED 51 //!< Module already initialized #define MM40_NOT_ENOUGH_PRIVILEGES 52 //!< Application does not have enough privileges (one or more app) #define MM40_NOT_COMPATIBLE_DRIVER 53 //!< Installed driver is not compatible with current software #define MM40_TM_INVALID_RESOURCE 54 //!< TM file was not loaded successfully from resources #define MM40_DEVICE_HAS_BEEN_RESETED 55 //!< Device has been reset, abnormal initial state #define MM40_NO_DEVICES_FOUND 56 //!< No Devices Found #define MM40_RESOURCE_OR_FUNCTION_LOCKED 57 //!< Resource (device) or function locked by mutex #define MM40_BUFFER_SIZE_TOO_SMALL 58 //!< Buffer provided by user is too small #define MM40_COULDNT_INIT_PROCESSOR 59 //!< Could not initialize processor. #define MM40_NOT_INITIALIZED 60 //!< The object/module/procedure/process being referred to has not been started. #define MM40_RESOURCE_NOT_FOUND 61 //!< Resource not found(could be processor, file, item...). */ void XimeaImager::processXiApiErrorCodes(int xiApiErrorCodes) { using namespace std; switch (xiApiErrorCodes) { case 1: std::cout<<"XimeaImager::processXiApiErrorCodes-----------:Invalid handle!"< fileInfo = getFileInfo(filePath); bool ret = createDir(fileInfo[0]); ofstream ximeaTemperatureFile(filePath.toStdString().c_str(),ios::app); int counter = 0; while(m_bIsRecord) { counter++; float temp = m_imager->getTemperature(); if(temp > 80 && m_ximeaImager->getImagerState() == 104) { ximeaTemperatureFile.flush(); system("/home/300tc/projects/udpClient/udpClient 127.0.0.1 9,0"); } if(temp > 90) { ximeaTemperatureFile.flush(); system("/home/300tc/projects/udpClient/udpClient 127.0.0.1 2"); } // 获取剩余硬盘空间和剩余采集时间 FILE *fp; char buffer[128]; fp = popen("cat /sys/devices/virtual/thermal/thermal_zone1/temp", "r"); if (fp == NULL) { perror("popen"); } // 读取输出并处理 unsigned long long temper; if (fgets(buffer, sizeof(buffer), fp) != NULL) { temper = strtoull(buffer, NULL, 10); // printf("CPU温度: %.2f;\n", (float)temper/1000); } pclose(fp); QDateTime curDateTime = QDateTime::currentDateTime(); QString currentTime = curDateTime.toString("yyyy/MM/dd hh:mm:ss"); ximeaTemperatureFile << currentTime.toStdString() << "," << temp << "," << (float)temper/1000 << "\n"; // std::cout<<"RecordXimeaTemperature::recordTemperature----------------:ximea Temperature is "<< temp <getImagerState() <empty(); r_qtx.unlock(); if(bempty && isExitWriteData2Disk) { std::cout<<"WriteData2Disk::write2Disk-----------------------队列为空,采集线程已经退出!"<size()<front(); int frameNumber = m_qFrameCounter->front(); memcpy(dataBuffer,buffer->data,m_iFrameSizeInByte*frameNumber); // m_pool->destroy(m_q->front()); m_pool->deleteElement(buffer); m_q->pop(); m_qFrameCounter->pop(); r_qtx.unlock(); //构造rgb图像,用于推流到m300遥控器 m_rgbImage->FillRgbImage(dataBuffer); // std::cout<<"WriteData2Disk::write2Disk-----------------------正在写磁盘!" << m_pool->max_size() <m_VideoWriter.release(); fclose(hFile); delete[] dataBuffer; std::cout<<"WriteData2Disk::write2Disk-----------------------写磁盘线程将退出,内存池可达到的最多元素数:" << m_pool->max_size() < * q, queue * qFrameCounter, QString baseFileName, int frameSizeInByte, int number_WriteDisk, MemoryPool * pool, rgbImage * rgbImage) { m_q = q; m_qFrameCounter = qFrameCounter; m_QbaseFileName = baseFileName; m_iFrameSizeInByte = frameSizeInByte; m_iNumber_WriteDisk = number_WriteDisk; m_pool = pool; m_rgbImage = rgbImage; } PushFlow::PushFlow() { m_iWidth = 1368; m_iHeight = 720; m_iFramerateVideo = 5; } void PushFlow::setParm(rgbImage * img, int width, int height, int framerateVideo) { m_rgbImage = img; m_iWidth = width; m_iHeight = height; m_iFramerateVideo = framerateVideo; } void PushFlow::setVedioFilePath(QString path) { m_QVedioFilePath = path; } void PushFlow::exitPushFlow() { isExitPushFlow = true; } void PushFlow::encodePushFlow() { // 创建输出视频的AVFormatContext const char* outputVideoPath = "/media/nvme/delete/300tc.h264"; FILE *fp = fopen(m_QVedioFilePath.toStdString().c_str(),"w+b"); AVFormatContext* formatContext = nullptr; avformat_alloc_output_context2(&formatContext, nullptr, "mp4", outputVideoPath); if (!formatContext) { qDebug() << "Error: Failed to allocate output context"; return; } // 查找H.264编码器 const AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264); if (!codec) { qDebug() << "Error: Codec not found"; avformat_free_context(formatContext); return; } // 创建输出视频流 AVStream* videoStream = avformat_new_stream(formatContext, codec); if (!videoStream) { qDebug() << "Error: Failed to create video stream"; avformat_free_context(formatContext); return; } // 设置视频流的参数,例如分辨率、帧率等 videoStream->codecpar->width = m_iWidth; videoStream->codecpar->height = m_iHeight; videoStream->codecpar->codec_id = AV_CODEC_ID_H264; // 设置为H.264编解码器 videoStream->codecpar->format = AV_PIX_FMT_YUV420P; // 设置为YUV420P像素格式 // 配置视频流的参数 AVCodecContext* codecContext = avcodec_alloc_context3(codec); if (!codecContext) { qDebug() << "Error: Failed to allocate codec context"; avformat_free_context(formatContext); return; } // 设置视频流的参数,例如分辨率、帧率等 codecContext->width = m_iWidth; codecContext->height = m_iHeight; codecContext->time_base = {1, m_iFramerateVideo}; codecContext->pix_fmt = AV_PIX_FMT_YUV420P; // 设置为YUV420P格式 codecContext->gop_size = 1;//多少帧产生一组关键帧 codecContext->max_b_frames = 1;//b帧,参考帧 // codecContext->bit_rate = 1000000; // 设置比特率为 1000000 // 打开视频编码器 if (avcodec_open2(codecContext, codec, nullptr) < 0) { qDebug() << "Error: Failed to open codec"; avcodec_free_context(&codecContext); avformat_free_context(formatContext); return; } // 打开输出文件 if (avio_open(&formatContext->pb, outputVideoPath, AVIO_FLAG_WRITE) < 0) { qDebug() << "Error: Failed to open output file"; avcodec_close(codecContext); avcodec_free_context(&codecContext); avformat_free_context(formatContext); return; } // 写入文件头 avformat_write_header(formatContext, nullptr); // 使用sws_scale进行颜色空间转换 SwsContext* swsContext = sws_getContext(m_iWidth, m_iHeight, AV_PIX_FMT_BGR24, m_iWidth, m_iHeight, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr); if (!swsContext) { qDebug() << "Error: Failed to create sws context"; avio_closep(&formatContext->pb); avcodec_close(codecContext); avcodec_free_context(&codecContext); avformat_free_context(formatContext); return; } // 创建 AVFrame 作为目标图像 AVFrame* dstFrame = av_frame_alloc(); av_image_alloc(dstFrame->data, dstFrame->linesize, m_iWidth, m_iHeight, AV_PIX_FMT_YUV420P, 1); // 设置目标图像参数 dstFrame->width = m_iWidth; dstFrame->height = m_iHeight; dstFrame->format = AV_PIX_FMT_YUV420P; AVFrame* frame = av_frame_alloc(); av_image_alloc(frame->data, frame->linesize, m_iWidth, m_iHeight, AV_PIX_FMT_BGR24, 1); QUdpSocket * m_udpSocket = new QUdpSocket(); m_udpSocket->bind(PUSH_FLOW_PORT, QUdpSocket::ShareAddress); QHostAddress m_clientIpAddress=QHostAddress(QHostAddress::LocalHost); // QHostAddress m_clientIpAddress("192.168.1.30"); // QHostAddress m_clientIpAddress("192.168.111.1"); int udpSendCounter=0; int encodeCounter=0; isExitPushFlow = false; unsigned long sleepTime = 1/(float)m_iFramerateVideo * 1000; std::cout<< "推流帧率: " << m_iFramerateVideo << ", sleepTime:" << sleepTime << "ms." << std::endl; while(true) { QThread::msleep(sleepTime); memcpy(frame->data[0], m_rgbImage->m_matRgbImage->data, m_rgbImage->m_matRgbImage->rows * m_rgbImage->m_matRgbImage->step[0]); // memcpy(frame->data[0], m_rgbImage->m_Qphoto.bits(), m_rgbImage->m_Qphoto.byteCount()); // 使用sws_scale进行颜色空间转换 sws_scale(swsContext, frame->data, frame->linesize, 0, m_iHeight, dstFrame->data, dstFrame->linesize); dstFrame->pts = encodeCounter; // 将AVFrame编码为视频帧 AVPacket pkt; av_init_packet(&pkt); pkt.data = nullptr; pkt.size = 0; if (avcodec_send_frame(codecContext, dstFrame) == 0 && avcodec_receive_packet(codecContext, &pkt) == 0) { fwrite(pkt.data, 1, pkt.size, fp); m_udpSocket->writeDatagram((const char *)pkt.data,pkt.size,m_clientIpAddress, PUSH_FLOW_PORT); // std::cout << "编码第 " << udpSendCounter << " 帧数据大小: " << pkt.size << std::endl; // std::cout<< "pkt.pts: " << pkt.pts << std::endl; // std::cout<< "pkt.dts: " << pkt.dts << std::endl << std::endl; udpSendCounter++; // 将编码后的帧写入文件 // pkt.stream_index = videoStream->index; // av_interleaved_write_frame(formatContext, &pkt); // av_write_frame(formatContext, &pkt); av_packet_unref(&pkt); } encodeCounter++; if(isExitPushFlow) { std::cout<<"PushFlow::encodePushFlow-----------------------推流线程将退出!"<data[0]); av_frame_free(&frame); // 释放资源 sws_freeContext(swsContext); av_freep(&dstFrame->data[0]); av_frame_free(&dstFrame); // av_packet_free(&pkt); avcodec_close(codecContext); avcodec_free_context(&codecContext); avio_closep(&formatContext->pb); avformat_free_context(formatContext); std::cout<<"PushFlow::encodePushFlow-----------------------推流线程已经退出!" << std::endl; }