6 Commits
40 ... 44

Author SHA1 Message Date
faf83b2545 fix:
1、光谱bin2 hdr文件中的波长信息;
2、第一次运行创建配置文件时,添加推流配置;
2024-12-04 16:13:56 +08:00
e8760dcfe5 优化编码参数解决问题:
1、编码后数据量过大导致大疆图传中断;
2、大疆遥控器解码后,视频出现偶尔后退的情况;
2024-09-25 13:46:55 +08:00
bd4d055129 待解决bug:有的仪器使用avio_open/av_write_frame将视频写入文件会崩溃,而且写入的视频无法播放; 2024-03-01 10:15:27 +08:00
a91f5f5b04 1、添加配置文件控制推流参数;
2、解决遥控器解码时帧序混乱的问题(gop_size = 1);
3、完善代码;
2024-01-29 17:21:07 +08:00
2e4679aaef 实现功能:提取rgb波段并通过h264编码; 2024-01-23 15:33:18 +08:00
061e1f83bd add:记录cpu温度; 2023-10-25 15:37:15 +08:00
8 changed files with 598 additions and 146 deletions

View File

@ -23,6 +23,9 @@ find_package(OpenCV 4.2.0 REQUIRED)
include_directories(/usr/local/include/opencv4/)
link_directories(/usr/local/lib)
include_directories(/home/300tc/library/ffmpeg_build/include)
link_directories(/home/300tc/library/ffmpeg_build/lib)
add_executable(${CMAKE_PROJECT_NAME}
Source_Files/fileoperation.cpp
Header_Files/fileoperation.h
@ -53,4 +56,8 @@ target_link_libraries(${CMAKE_PROJECT_NAME}
irisXimeaImager
libconfig.so
libconfig++.so
${OpenCV_LIBS})
${OpenCV_LIBS}
avformat
avcodec
swscale
avutil)

View File

@ -42,6 +42,8 @@ public:
bool getBufferPolicy(int &bufferPolicy);
bool getAcqBufferSize(int &acqBufferSize);
bool getPushFlowParam(int &flowSwitch, int &rgbHeight, int &framerateVideo);
bool createConfigFile();
bool updateConfigFile();

View File

@ -11,6 +11,18 @@
#include <QObject>
#include <QImage>
#include <opencv2/opencv.hpp>//包含了所有东西,编译很慢
#include "opencv2/imgproc/types_c.h"
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavutil/opt.h>
#include "libavutil/pixfmt.h"
#include "libswscale/swscale.h"
#include <libavutil/imgutils.h>
#include <libavutil/avutil.h>
#include "libavdevice/avdevice.h"
}
using namespace cv;
class rgbImage :public QObject
@ -21,38 +33,33 @@ public:
rgbImage(QWidget* pParent = NULL);
~rgbImage();
void SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNumber);
void SetRgbImageWidthAndHeight(int BandCount, int Width, int height);
void SetRgbBandNumber(int redBandNumber, int greenBandNumber, int blueBandNumber);
void FillRgbImage(unsigned short *datacube);
void FillFocusGrayImage(unsigned short *datacube);
void FillFocusGrayQImage(unsigned short * datacube);
void FillOnerowofRgbImage(cv::Mat * matRgbImage, int rowNumber, unsigned short *datacube);
QImage Mat2QImage(cv::Mat cvImg);//https://www.cnblogs.com/annt/p/ant003.html
QImage *m_QRgbImage;
cv::Mat *m_matRgbImage;
QImage m_Qphoto;
QImage *m_qimageFocusGrayImage;
cv::Mat *m_matFocusGrayImage;//用于调焦时,显示一帧的灰度图
//cv::Mat m_matFocusGrayImage;//用于调焦时,显示一帧的灰度图
CvVideoWriter *m_frame_writer;
VideoWriter m_VideoWriter;
// VideoWriter m_video("appsrc ! autovideoconvert ! filesink location=/media/nvme/delete/live.avi", CV_FOURCC('M', 'J', 'P', 'G'), 25.0, Size(640, 480));
// VideoWriter video("test.avi", CV_FOURCC('M', 'J', 'P', 'G'), 25.0, Size(640, 480));//
//控制该填充rgb图像第几帧数据
//以下两种情况需要重置为01调用函数SetRgbImageWidthAndHeight2每次开始填充数据前
int m_iFrameCounter;
int m_iFramerate;//
protected:
private:
@ -61,15 +68,17 @@ private:
int m_iFrameNumber;//
void initffmpeg();
int m_iRedBandNumber;
int m_iGreenBandNumber;
int m_iBlueBandNumber;
public slots:
signals :
void sendstr(QString str);
void sendstr1(QString str);
void refreslabelimg(QImage* img1);
};
#endif //XIMEAAIRBORNESYSTEM_RGBIMAGE_H

View File

@ -27,6 +27,8 @@
#include <exception>
#include <fcntl.h>
#include <sys/mman.h>
#include <cmath>
#include <vector>
#include <QObject>
#include <QDateTime>
@ -42,9 +44,12 @@
#include "MemoryPool.h"
#include <queue>
#include <QMutex>
#include <QUdpSocket>
#include "rgbImage.h"
#define PUSH_FLOW_PORT 666
//#ifdef WIN32
@ -122,6 +127,32 @@ public slots:
signals:
};
class PushFlow : public QObject
{
Q_OBJECT
public:
PushFlow();
void setParm(rgbImage * img, int width, int height, int framerateVideo);
void exitPushFlow();
void setVedioFilePath(QString path);
private:
QString m_QVedioFilePath;
bool isExitPushFlow;
rgbImage * m_rgbImage;
int m_iWidth;
int m_iHeight;
int m_iFramerateVideo;
public slots:
void encodePushFlow();
signals:
};
class XimeaImager : public QObject
{
Q_OBJECT
@ -145,6 +176,9 @@ public:
int getWindowEndBand();
double geWavelengthAtBand(int x);
static int findClosestIndex(const std::vector<double>& numbers, double target);
void getRgbBandNumber(int &redBandNumber, int &greenBandNumber, int &blueBandNumber);
void stopRecord();
int getFrameCounter();
void writeXiApiErrorCodes(QString filePath, int xiApiErrorCodes);
@ -169,6 +203,10 @@ private:
queue<int> * m_qFrameCounter;
MemoryPool<DataBuffer> * m_pool;
QThread * m_pushFlowThread;
PushFlow * m_pushFlow;
int m_iFlowSwitch;
QString m_baseFileName;
QString m_ximeaTemperatureCSVPath;
@ -207,6 +245,7 @@ signals:
void recordXimeaTemperatureSignal(QString);
void startWriteDiskSignal();
void startPushFlowSignal();
void autoExposeMaxValueOfOneFrame(int, double);
void frameRateSignal(double);

View File

@ -166,6 +166,64 @@ bool Configfile::getEffectiveWindowRoi(int &width, int &offsetx)
return true;
}
bool Configfile::getPushFlowParam(int &flowSwitch, int &rgbHeight, int &framerateVideo)
{
Setting& root = cfg.getRoot();
if (!root.exists("push_flow_param"))
{
// 配置项不存在,添加配置项
Setting & push_flow_param = root.add("push_flow_param", Setting::TypeGroup);
push_flow_param.add("flow_switch", Setting::TypeInt) = 0;
push_flow_param.add("rgb_height", Setting::TypeInt) = 720;
push_flow_param.add("framerate_video", Setting::TypeInt) = 5;
// 保存修改后的配置到文件
try
{
QList<QString> fileInfo = getFileInfo(QString::fromStdString(m_configfilePath));
bool ret = createDir(fileInfo[0]);
cfg.writeFile(m_configfilePath.c_str());
std::cout << "Config item 'push_flow_param' added." << std::endl;
flowSwitch = 0;
rgbHeight = 720;
framerateVideo = 5;
return true;
}
catch (const libconfig::FileIOException &fioex)
{
std::cerr << "I/O error while writing file." << std::endl;
return false;
}
}
else
{
try
{
const Setting &push_flow_param = root["push_flow_param"];
if(!(push_flow_param.lookupValue("rgb_height", rgbHeight)
&& push_flow_param.lookupValue("framerate_video", framerateVideo)
&& push_flow_param.lookupValue("flow_switch", flowSwitch)
))
{
return false;
}
}
catch(const SettingNotFoundException &nfex)
{
// Ignore.
return false;
}
return true;
}
}
bool Configfile::getWindowOffsety_HeightOfSpectral(int &offsety, int &height, string spectralBinString)
{
const Setting& root = cfg.getRoot();
@ -378,6 +436,10 @@ bool Configfile::createConfigFile()
ximeadll.add("buffer_policy", Setting::TypeInt) = 0;
ximeadll.add("acq_buffer_size", Setting::TypeInt) = 400;
Setting &push_flow_param = root.add("push_flow_param", Setting::TypeGroup);
push_flow_param.add("flow_switch", Setting::TypeInt) = 1;
push_flow_param.add("rgb_height", Setting::TypeInt) = 720;
push_flow_param.add("framerate_video", Setting::TypeInt) = 5;
// Write out the new configuration.
QString output_file = "/media/nvme/300TC/config/ximea.cfg";

View File

@ -3,7 +3,7 @@
int main(int argc, char *argv[])
{
std::cout<<"ximeaAirborneSystem 版本:"<< "40." <<std::endl;
std::cout<<"ximeaAirborneSystem 版本:"<< "44." <<std::endl;
QCoreApplication a(argc, argv);
//UdpServer* x=new UdpServer();

View File

@ -4,7 +4,6 @@
#include "../Header_Files/rgbImage.h"
rgbImage::rgbImage(QWidget* pParent)
{
m_QRgbImage = nullptr;
@ -12,8 +11,9 @@ rgbImage::rgbImage(QWidget* pParent)
m_matFocusGrayImage = nullptr;
m_qimageFocusGrayImage = nullptr;
m_iRedBandNumber = 0;
m_iGreenBandNumber = 0;
m_iBlueBandNumber = 0;
}
rgbImage::~rgbImage()
@ -21,8 +21,18 @@ rgbImage::~rgbImage()
}
void rgbImage::SetRgbBandNumber(int redBandNumber, int greenBandNumber, int blueBandNumber)
{
m_iRedBandNumber = redBandNumber;
m_iGreenBandNumber = greenBandNumber;
m_iBlueBandNumber = blueBandNumber;
void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNumber)
// std::cout<<"rgbImage::SetRgbBandNumber红波段的波段号"<< redBandNumber <<std::endl;
// std::cout<<"rgbImage::SetRgbBandNumber绿波段的波段号"<< greenBandNumber <<std::endl;
// std::cout<<"rgbImage::SetRgbBandNumber蓝波段的波段号"<< blueBandNumber <<std::endl;
}
void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Width, int height)
{
using namespace cv;
@ -30,19 +40,21 @@ void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNum
{
delete m_QRgbImage;//有问题????????????????????????????????????????????????
}
//m_QRgbImage = new QImage(Sample, FrameNumber, QImage::Format_RGB888);
//m_QRgbImage = new QImage(Width, height, QImage::Format_RGB888);
if (m_matRgbImage != nullptr)
{
delete m_matRgbImage;
}
m_matRgbImage = new Mat(FrameNumber, Sample, CV_8UC3, Scalar(0, 0, 0));
m_matRgbImage = new Mat(height, Width, CV_8UC3, Scalar(0, 0, 0));
int codec = VideoWriter::fourcc('M', 'P', '4', '2'); // select desired codec (must be available at runtime)
double fps = 20.0; // framerate of the created video stream
std::string filename = "appsrc ! autovideoconvert ! filesink location=/media/nvme/delete/live.avi";//https://blog.csdn.net/ancientapesman/article/details/117324638
m_VideoWriter.open(filename, codec, fps, m_matRgbImage->size(), true);
// int codec = VideoWriter::fourcc('H', '2', '6', '4'); // select desired codec (must be available at runtime)
// double fps = 20.0;// framerate of the created video stream
// std::string filename = "appsrc ! autovideoconvert ! filesink location=/media/nvme/live.mp4";//https://blog.csdn.net/ancientapesman/article/details/117324638
//// std::string filename = "/media/nvme/live.mp4";
// auto ddddd=m_matRgbImage->size();
// m_VideoWriter.open(filename, codec, fps, Size(20, 1368), true);
// VideoWriter video("test.avi", CV_FOURCC('M', 'J', 'P', 'G'), 25.0, Size(640, 480));
@ -54,127 +66,169 @@ void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNum
if (m_qimageFocusGrayImage == nullptr)
{
m_qimageFocusGrayImage = new QImage(Sample, BandCount, QImage::Format_RGB32);
m_qimageFocusGrayImage = new QImage(Width, BandCount, QImage::Format_RGB32);
}
if (m_matFocusGrayImage == nullptr)
{
m_matFocusGrayImage = new Mat(BandCount, Sample, CV_16U, Scalar(0));
m_matFocusGrayImage = new Mat(BandCount, Width, CV_16U, Scalar(0));
//cv::Mat matAdjustPreview = Mat::zeros(BandCount, Sample, CV_16U);
//cv::Mat matAdjustPreview = Mat::zeros(BandCount, Width, CV_16U);
}
//cv::Mat matAdjustPreview = Mat::zeros(BandCount, Sample, CV_16U);
//cv::Mat matAdjustPreview = Mat::zeros(BandCount, Width, CV_16U);
//m_matFocusGrayImage = matAdjustPreview;
std::cout << "设置帧数:" << FrameNumber << std::endl;
std::cout << "高光谱rgb图像设置高度" << height << std::endl;
m_iFrameCounter = 0;//每次都重置为0
m_iSampleNumber = Sample;
m_iSampleNumber = Width;
m_iBandNumber = BandCount;
m_iFrameNumber = FrameNumber;
m_iFrameNumber = height;
//std::cout << "rgb影像内存地址为" << m_QRgbImage << std::endl;
}
void rgbImage::FillOnerowofRgbImage(cv::Mat * matRgbImage, int rowNumber, unsigned short *datacube)
{
//方式1逐像素修改
// unsigned short r, g, b;
// for (int j = 0; j < m_iSampleNumber; j++)
// {
// //取值一帧影像中从左到右的rgb像元值
// r = *(datacube + m_iRedBandNumber * m_iSampleNumber + j)*255/4096;
// g = *(datacube + m_iGreenBandNumber * m_iSampleNumber + j)*255/4096;
// b = *(datacube + m_iBlueBandNumber * m_iSampleNumber + j)*255/4096;
//
//// r = *(datacube + m_iRedBandNumber * m_iSampleNumber + j);
//// g = *(datacube + m_iGreenBandNumber * m_iSampleNumber + j);
//// b = *(datacube + m_iBlueBandNumber * m_iSampleNumber + j);
//
// //将像元值赋值到cv::Mat中操作像元值https://zhuanlan.zhihu.com/p/51842288
// //int dataType = m_matRgbImage->type();//当数据类型为CV_16UC3时返回18
// //std::cout << "m_matRgbImage数据类型为" << dataType << std::endl;
// if (matRgbImage->type() == CV_8UC3)
// {
//// std::cout << "操作像素值!" << std::endl;
// matRgbImage->at<cv::Vec3b>(rowNumber, j)[2] = r;
// matRgbImage->at<cv::Vec3b>(rowNumber, j)[1] = g;
// matRgbImage->at<cv::Vec3b>(rowNumber, j)[0] = b;
//
//// QString savePath_cv = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_cv.png";
//// cv::imwrite(savePath_cv.toStdString(), *matRgbImage);
// }
//
// int column = 800;
// if(j == column)
// {
// std::cout << "行:" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << r << " " << g << " " << b << std::endl;
// std::cout << "mat第 " << column << " 列的 r g b 分别为 " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[2] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[1] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[0] << std::endl;
// }
// }
//方式2通过指针操作更快
unsigned short r, g, b;
const int cols = matRgbImage->cols;
const int step = matRgbImage->channels();
unsigned char *p_row0_b = matRgbImage->ptr(rowNumber);
unsigned char *p_row0_g = matRgbImage->ptr(rowNumber) + 1;
unsigned char *p_row0_r = matRgbImage->ptr(rowNumber) + 2;
for (int j = 0; j < m_iSampleNumber; j++)
{
//取值一帧影像中从左到右的rgb像元值
r = *(datacube + 121 * m_iSampleNumber + j)*255/4096;
g = *(datacube + 79 * m_iSampleNumber + j)*255/4096;
b = *(datacube + 40 * m_iSampleNumber + j)*255/4096;
//取值一帧影像中从左到右的rgb像元值,线性拉伸
r = *(datacube + m_iRedBandNumber * m_iSampleNumber + j)*255/4096;
g = *(datacube + m_iGreenBandNumber * m_iSampleNumber + j)*255/4096;
b = *(datacube + m_iBlueBandNumber * m_iSampleNumber + j)*255/4096;
//将像元值赋值到cv::Mat中操作像元值https://zhuanlan.zhihu.com/p/51842288
//int dataType = m_matRgbImage->type();//当数据类型为CV_16UC3时返回18
//std::cout << "m_matRgbImage数据类型为" << dataType << std::endl;
if (matRgbImage->type() == CV_16UC3)
{
//std::cout << "操作像素值!" << std::endl;
matRgbImage->at<cv::Vec3w>(rowNumber, j)[2] = r;
matRgbImage->at<cv::Vec3w>(rowNumber, j)[1] = g;
matRgbImage->at<cv::Vec3w>(rowNumber, j)[0] = b;
}
*p_row0_b = b;
*p_row0_g = g;
*p_row0_r = r;
// int column = 800;
// if(j == column)
// {
// std::cout << "行:" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << r << " " << g << " " << b << std::endl;
//// std::cout << "修改后" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << (unsigned short)*p_row0_r << " " << (unsigned short)*p_row0_g << " " << (unsigned short)*p_row0_b << std::endl;
//// std::cout << "mat第 " << column << " 列的 r g b 分别为 " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[2] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[1] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[0] << std::endl;
// }
p_row0_b += step;
p_row0_g += step;
p_row0_r += step;
}
//方式3通过内存拷贝快速提取rgb
// if (matRgbImage->isContinuous())// check mat is continuous or not
// matRgbImage->reshape(1, matRgbImage->rows * matRgbImage->cols).col(0).setTo(Scalar(value));
// else
// {
// for (int i = 0; i < matRgbImage->rows; i++)
// matRgbImage->row(i).reshape(1, matRgbImage->cols).col(0).setTo(Scalar(value));
// }
}
QImage rgbImage::Mat2QImage(cv::Mat cvImg)//https://www.cnblogs.com/annt/p/ant003.html
{
QImage qImg;
if (cvImg.channels() == 3)//3 channels color image
{
cv::cvtColor(cvImg, cvImg, CV_BGR2RGB);
qImg = QImage((const unsigned char*)(cvImg.data),
cvImg.cols, cvImg.rows,
cvImg.cols*cvImg.channels(),
QImage::Format_RGB888);
}
else if (cvImg.channels() == 1)//grayscale image
{
qImg = QImage((const unsigned char*)(cvImg.data),
cvImg.cols, cvImg.rows,
cvImg.cols*cvImg.channels(),
QImage::Format_Indexed8);
}
else
{
qImg = QImage((const unsigned char*)(cvImg.data),
cvImg.cols, cvImg.rows,
cvImg.cols*cvImg.channels(),
QImage::Format_RGB888);
}
return qImg;
}
void rgbImage::FillRgbImage(unsigned short *datacube)
{
unsigned short *r_row, *g_row, *b_row;
//从第二行开始向下移动一行https://blog.csdn.net/u014686356/article/details/65937750
// m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1).copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));//经tc验证此行代码工作异常为啥不加.clone()就异常??????
// m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1).clone().copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));//此方式ximea帧率130hz1min左右就出现漏帧
if(m_iFrameCounter<m_iFrameNumber)
// cv::Mat upperPart = m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1).clone();//此方式ximea帧率130hz1min左右就出现漏帧
// upperPart.copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));
for (int i = m_matRgbImage->rows - 2; i >= 0; --i)//此方式ximea帧率130hz4.5min左右出现漏帧 → 此方式效率最高
{
FillOnerowofRgbImage(m_matRgbImage, m_iFrameCounter, datacube);
// std::cout << "小于:" << m_iFrameNumber << std::endl;
m_matRgbImage->row(i).copyTo(m_matRgbImage->row(i+1));
}
else
{
// std::cout << "大于:" << m_iFrameNumber << std::endl;
//通过行赋值将前m_iFrameNumber-1行向上移动一行https://blog.csdn.net/u014686356/article/details/65937750
// m_matRgbImage->rowRange(1, m_matRgbImage->rows).copyTo(m_matRgbImage->rowRange(0, m_matRgbImage->rows-1));
for (int i = 1; i < m_matRgbImage->rows; ++i)
{
// std::cout << "大于:" << i << std::endl;
m_matRgbImage->col(i).copyTo(m_matRgbImage->col(i-1));
// std::cout << "--------------" << i << std::endl;
}
FillOnerowofRgbImage(m_matRgbImage, 0, datacube);
// m_Qphoto = Mat2QImage(*m_matRgbImage);
// std::cout << "1111111111111111111111111111"<< std::endl;
//通过FillOnerowofRgbImage为m_iFrameNumber行赋值
FillOnerowofRgbImage(m_matRgbImage, m_iFrameNumber-1, datacube);
// std::cout << "22222222222222222"<< std::endl;
// //循环给每行像素赋值
// r_row = datacube + 121 * m_iSampleNumber;
// g_row = datacube + 79 * m_iSampleNumber;
// b_row = datacube + 40 * m_iSampleNumber;
// for (int j = 0; j < m_iFrameNumber; j++)
// {
// p = m_matRgbImage.ptr<uchar>(j);
// for ( j = 0; j < nCols; ++j){
// p[j] = table[p[j]];
// }
//保存rgb图片
// if (m_iFrameCounter % m_iFramerate == 0 || m_iFrameCounter == m_iFrameNumber - 1)
// {
//// QString savePath = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_qt.jpg";
//// m_Qphoto.save(savePath);
//
// }
//保存rgb图片
if (m_iFrameCounter % m_iFramerate == 0 || m_iFrameCounter == m_iFrameNumber - 1)
{
////保存文件
//FileOperation * fileOperation = new FileOperation();
//string directory = fileOperation->getDirectoryOfExe();
//string rgbFilePathStrech = “/media/nvme/300TC/config/” + "\\tmp_image_strech.png";//没有拉伸图片
// std::string rgbFilePathNoStrech = "/media/nvme/300TC/config/" + std::to_string(m_iFrameCounter) + "ctmp_image_no_strech.png";
//m_QRgbImage->save(QString::fromStdString(rgbFilePathNoStrech), "PNG");
// cv::imwrite(rgbFilePathNoStrech, *m_matRgbImage);
//cv::imwrite(rgbFilePathStrech, CStretch(*m_matRgbImage, 0.01));
}
m_VideoWriter.write(*m_matRgbImage);
std::string rgbFilePathNoStrech = "/media/nvme/delete/" + std::to_string(m_iFrameCounter) + "ctmp_image_no_strech.png";
cv::imwrite(rgbFilePathNoStrech, *m_matRgbImage);
}
// QString savePath_cv = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_cv.jpg";
// cv::imwrite(savePath_cv.toStdString(), *m_matRgbImage);
// }
// m_VideoWriter.write(*m_matRgbImage);
// std::string rgbFilePathNoStrech = "/media/nvme/delete/" + std::to_string(m_iFrameCounter) + "ctmp_image_no_strech.png";
// cv::imwrite(rgbFilePathNoStrech, *m_matRgbImage);
m_iFrameCounter++;
}

View File

@ -47,6 +47,12 @@ XimeaImager::XimeaImager()
writeData2DiskThread->start(QThread::HighestPriority);
connect(this, SIGNAL(startWriteDiskSignal()), writeData2Disk, SLOT(write2Disk()));
m_pushFlowThread=new QThread();
m_pushFlow = new PushFlow();
m_pushFlow->moveToThread(m_pushFlowThread);
m_pushFlowThread->start();
connect(this, SIGNAL(startPushFlowSignal()), m_pushFlow, SLOT(encodePushFlow()));
m_pool = new MemoryPool<DataBuffer>;
q = new queue<DataBuffer *>;
m_qFrameCounter = new queue<int>;
@ -90,7 +96,7 @@ void XimeaImager::openImger()
}
float gain, offset;//用于生成头文件中的波长信息
ret = m_configfile.getGainOffsetOfSpectralBin1(gain, offset);
ret = m_configfile.getGainOffset(gain, offset);
if (ret)
{
m_imager.setGainOffset(gain, offset);
@ -100,8 +106,21 @@ void XimeaImager::openImger()
ret = m_configfile.getEffectiveWindow(width, offsetx, height, offsety);
if (ret)
{
int rgbHeight;
int framerateVideo;
m_configfile.getPushFlowParam(m_iFlowSwitch, rgbHeight, framerateVideo);
std::cout <<"rgbHeight" << rgbHeight << ", framerateVideo" << framerateVideo << std::endl;
m_imager.setEffectiveWindow(offsetx, width, offsety, height);
m_rgbImage->SetRgbImageWidthAndHeight(height, width, 20);
m_rgbImage->SetRgbImageWidthAndHeight(height, width, rgbHeight);
m_pushFlow->setParm(m_rgbImage,width,rgbHeight,framerateVideo);
int redBandNumber;
int greenBandNumber;
int blueBandNumber;
getRgbBandNumber(redBandNumber, greenBandNumber, blueBandNumber);
m_rgbImage->SetRgbBandNumber(redBandNumber, greenBandNumber, blueBandNumber);
std::cout<<"height"<< height <<std::endl;
std::cout<<"width"<< width <<std::endl;
std::cout<<"每帧字节数:"<< width * height * 2 <<std::endl;
@ -487,6 +506,50 @@ double XimeaImager::geWavelengthAtBand(int x)
}
}
void XimeaImager::getRgbBandNumber(int &redBandNumber, int &greenBandNumber, int &blueBandNumber)
{
vector<double> wavelengths;
for (int i = getWindowStartBand(); i < getWindowEndBand(); i++)
{
wavelengths.push_back(geWavelengthAtBand(i));
}
//envi打开文件时的红绿蓝波长nm
int r_envi = 640;
int g_envi = 550;
int b_envi = 470;
redBandNumber = findClosestIndex(wavelengths, r_envi);
greenBandNumber = findClosestIndex(wavelengths, g_envi);
blueBandNumber = findClosestIndex(wavelengths, b_envi);
// std::cout<<"红波段的波段号:"<< redBandNumber <<std::endl;
// std::cout<<"绿波段的波段号:"<< greenBandNumber <<std::endl;
// std::cout<<"蓝波段的波段号:"<< blueBandNumber <<std::endl;
}
int XimeaImager::findClosestIndex(const std::vector<double>& numbers, double target)
{
if (numbers.empty()) {
// 处理空向量的情况
return -1;
}
double minDifference = std::abs(numbers[0] - target);
int closestIndex = 0;
for (int i = 1; i < numbers.size(); ++i)
{
double currentDifference = std::abs(numbers[i] - target);
if (currentDifference < minDifference)
{
minDifference = currentDifference;
closestIndex = i;
}
}
return closestIndex;
}
int XimeaImager::getMaxValueOfOneFrame(unsigned short * data, int numberOfPixel)
{
//排序
@ -592,6 +655,13 @@ void XimeaImager::startRecord(double TimeDifferenceBetweensOSAndSbg,QString base
FILE *hFile=fopen(imageFileName.toStdString().c_str(),"w+b");
double * imageBuffer = new double[number_WriteDisk];
QString vedioFileName=m_baseFileName+".h264";
m_pushFlow->setVedioFilePath(vedioFileName);
if(m_iFlowSwitch == 1)
{
emit startPushFlowSignal();
}
m_imager.start();
struct timeval timeStart, timeEnd;
double runTime=0;
@ -606,6 +676,11 @@ void XimeaImager::startRecord(double TimeDifferenceBetweensOSAndSbg,QString base
timeDifferenceBetweenSbgAndXimea = calculateTimeDifferenceBetweenSbgAndximea(&m_imager.m_image, TimeDifferenceBetweensOSAndSbg);
}
fwrite(m_imager.m_image.bp,1,m_iFrameSizeInByte, hFile);
//构造rgb图像用于推流到m300遥控器
if(m_iFlowSwitch == 1)
{
m_rgbImage->FillRgbImage((unsigned short *)m_imager.m_image.bp);
}
indexofbuff = m_iFrameCounter % number_WriteDisk;
@ -641,6 +716,8 @@ void XimeaImager::startRecord(double TimeDifferenceBetweensOSAndSbg,QString base
writeData2Disk->exitWriteData2Disk();
writeHdr();
m_pushFlow->exitPushFlow();
delete[] sbgTimeBuffer;
double frameInTheory=runTime * getFramerate();
@ -709,41 +786,16 @@ void XimeaImager::writeHdr()
hdrFileHandle << "wavelength = {";
//hdrFileHandle << std::setprecision(5);
if (m_imager.getSpectralBin() == 1)
for (int i = getWindowStartBand(); i < getWindowEndBand(); i++)
{
for (int i = getWindowStartBand(); i < getWindowEndBand(); i++)
hdrFileHandle << geWavelengthAtBand(i);
if (i < getWindowEndBand() - 1)
hdrFileHandle << ", ";
else
{
hdrFileHandle << geWavelengthAtBand(i);
if (i < getWindowEndBand() - 1)
hdrFileHandle << ", ";
else
{
printf("头文件中写入了多少个波段:%d\n",i-getWindowStartBand()+1);//???????????????
}
printf("头文件中写入了多少个波段:%d\n",i-getWindowStartBand()+1);//???????????????
}
}
else if (m_imager.getSpectralBin() == 2)
{
int counter = 0;
for (int i = m_iOffsetyOfSpectralBin2; i < m_iOffsetyOfSpectralBin2 + m_iHeightOfSpectralBin2; i++)
{
if (i*2 + 1 > m_iOffsetyOfSpectralBin1 + m_iHeightOfSpectralBin1)
{
printf("XimeaImager::writeHdr 出现错误:窗口中,光谱 bin1 波段数小于 bin2 的 2 倍。\n");
break;
}
hdrFileHandle << (geWavelengthAtBand(i*2) + geWavelengthAtBand(i*2 + 1)) / 2;
counter++;
if (i < m_iOffsetyOfSpectralBin2 + m_iHeightOfSpectralBin2 - 1)
hdrFileHandle << ", ";
else
{
printf("头文件中写入了多少个波段:%d\n", counter);
}
}
}
hdrFileHandle << "}\n";
hdrFileHandle.close();
@ -940,10 +992,27 @@ void RecordXimeaTemperature::recordTemperature(QString filePath= nullptr)
system("/home/300tc/projects/udpClient/udpClient 127.0.0.1 2");
}
// 获取剩余硬盘空间和剩余采集时间
FILE *fp;
char buffer[128];
fp = popen("cat /sys/devices/virtual/thermal/thermal_zone1/temp", "r");
if (fp == NULL) {
perror("popen");
}
// 读取输出并处理
unsigned long long temper;
if (fgets(buffer, sizeof(buffer), fp) != NULL)
{
temper = strtoull(buffer, NULL, 10);
// printf("CPU温度: %.2f;\n", (float)temper/1000);
}
pclose(fp);
QDateTime curDateTime = QDateTime::currentDateTime();
QString currentTime = curDateTime.toString("yyyy/MM/dd hh:mm:ss");
ximeaTemperatureFile << currentTime.toStdString() << "," << temp << "\n";
ximeaTemperatureFile << currentTime.toStdString() << "," << temp << "," << (float)temper/1000 << "\n";
// std::cout<<"RecordXimeaTemperature::recordTemperature----------------:ximea Temperature is "<< temp <<std::endl;
// std::cout<<"RecordXimeaTemperature::recordTemperature----------------:ximea state is "<< m_ximeaImager->getImagerState() <<std::endl;
@ -1008,7 +1077,7 @@ void WriteData2Disk::write2Disk()
r_qtx.unlock();
//构造rgb图像用于推流到m300遥控器
// m_rgbImage->FillRgbImage(dataBuffer);
m_rgbImage->FillRgbImage(dataBuffer);
// std::cout<<"WriteData2Disk::write2Disk-----------------------正在写磁盘!" << m_pool->max_size() <<std::endl;//
@ -1044,3 +1113,213 @@ void WriteData2Disk::setParm(queue<DataBuffer *> * q, queue<int> * qFrameCounter
m_rgbImage = rgbImage;
}
PushFlow::PushFlow()
{
m_iWidth = 1368;
m_iHeight = 720;
m_iFramerateVideo = 5;
}
void PushFlow::setParm(rgbImage * img, int width, int height, int framerateVideo)
{
m_rgbImage = img;
m_iWidth = width;
m_iHeight = height;
m_iFramerateVideo = framerateVideo;
}
void PushFlow::setVedioFilePath(QString path)
{
m_QVedioFilePath = path;
}
void PushFlow::exitPushFlow()
{
isExitPushFlow = true;
}
void PushFlow::encodePushFlow()
{
// 创建输出视频的AVFormatContext
const char* outputVideoPath = "/media/nvme/delete/300tc.h264";
FILE *fp = fopen(m_QVedioFilePath.toStdString().c_str(),"w+b");
AVFormatContext* formatContext = nullptr;
avformat_alloc_output_context2(&formatContext, nullptr, "mp4", outputVideoPath);
if (!formatContext)
{
qDebug() << "Error: Failed to allocate output context";
return;
}
// 查找H.264编码器
const AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec)
{
qDebug() << "Error: Codec not found";
avformat_free_context(formatContext);
return;
}
// 创建输出视频流
AVStream* videoStream = avformat_new_stream(formatContext, codec);
if (!videoStream)
{
qDebug() << "Error: Failed to create video stream";
avformat_free_context(formatContext);
return;
}
// 设置视频流的参数,例如分辨率、帧率等
videoStream->codecpar->width = m_iWidth;
videoStream->codecpar->height = m_iHeight;
videoStream->codecpar->codec_id = AV_CODEC_ID_H264; // 设置为H.264编解码器
videoStream->codecpar->format = AV_PIX_FMT_YUV420P; // 设置为YUV420P像素格式
// 配置视频流的参数
AVCodecContext* codecContext = avcodec_alloc_context3(codec);
if (!codecContext)
{
qDebug() << "Error: Failed to allocate codec context";
avformat_free_context(formatContext);
return;
}
// 设置视频流的参数,例如分辨率、帧率等
codecContext->width = m_iWidth;
codecContext->height = m_iHeight;
codecContext->time_base = {1, m_iFramerateVideo};
codecContext->pix_fmt = AV_PIX_FMT_YUV420P; // 设置为YUV420P格式
codecContext->gop_size = 50;//多少帧产生一组关键帧
codecContext->max_b_frames = 0;//b帧参考帧设置为1会导致视频回退的现象
// codecContext->bit_rate = 1000000; // 设置比特率为 1000000
// 打开视频编码器
if (avcodec_open2(codecContext, codec, nullptr) < 0)
{
qDebug() << "Error: Failed to open codec";
avcodec_free_context(&codecContext);
avformat_free_context(formatContext);
return;
}
// 打开输出文件
// if (avio_open(&formatContext->pb, outputVideoPath, AVIO_FLAG_WRITE) < 0)
// {
// qDebug() << "Error: Failed to open output file";
// avcodec_close(codecContext);
// avcodec_free_context(&codecContext);
// avformat_free_context(formatContext);
// return;
// }
// 写入文件头
// avformat_write_header(formatContext, nullptr);
// 使用sws_scale进行颜色空间转换
SwsContext* swsContext = sws_getContext(m_iWidth, m_iHeight, AV_PIX_FMT_BGR24,
m_iWidth, m_iHeight, AV_PIX_FMT_YUV420P,
SWS_BICUBIC, nullptr, nullptr, nullptr);
if (!swsContext)
{
qDebug() << "Error: Failed to create sws context";
avio_closep(&formatContext->pb);
avcodec_close(codecContext);
avcodec_free_context(&codecContext);
avformat_free_context(formatContext);
return;
}
// 创建 AVFrame 作为目标图像
AVFrame* dstFrame = av_frame_alloc();
av_image_alloc(dstFrame->data, dstFrame->linesize, m_iWidth, m_iHeight, AV_PIX_FMT_YUV420P, 1);
// 设置目标图像参数
dstFrame->width = m_iWidth;
dstFrame->height = m_iHeight;
dstFrame->format = AV_PIX_FMT_YUV420P;
AVFrame* frame = av_frame_alloc();
av_image_alloc(frame->data, frame->linesize, m_iWidth, m_iHeight, AV_PIX_FMT_BGR24, 1);
QUdpSocket * m_udpSocket = new QUdpSocket();
m_udpSocket->bind(PUSH_FLOW_PORT, QUdpSocket::ShareAddress);
QHostAddress m_clientIpAddress=QHostAddress(QHostAddress::LocalHost);
// QHostAddress m_clientIpAddress("192.168.1.30");
// QHostAddress m_clientIpAddress("192.168.111.1");
int udpSendCounter=0;
int encodeCounter=0;
isExitPushFlow = false;
unsigned long sleepTime = 1/(float)m_iFramerateVideo * 1000;
std::cout<< "推流帧率: " << m_iFramerateVideo << ", sleepTime" << sleepTime << "ms." << std::endl;
while(true)
{
QThread::msleep(sleepTime);
memcpy(frame->data[0], m_rgbImage->m_matRgbImage->data, m_rgbImage->m_matRgbImage->rows * m_rgbImage->m_matRgbImage->step[0]);
// memcpy(frame->data[0], m_rgbImage->m_Qphoto.bits(), m_rgbImage->m_Qphoto.byteCount());
// 使用sws_scale进行颜色空间转换
sws_scale(swsContext, frame->data, frame->linesize, 0, m_iHeight,
dstFrame->data, dstFrame->linesize);
dstFrame->pts = encodeCounter;
// 将AVFrame编码为视频帧
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = nullptr;
pkt.size = 0;
if (avcodec_send_frame(codecContext, dstFrame) == 0 &&
avcodec_receive_packet(codecContext, &pkt) == 0)
{
fwrite(pkt.data, 1, pkt.size, fp);
m_udpSocket->writeDatagram((const char *)pkt.data,pkt.size,m_clientIpAddress, PUSH_FLOW_PORT);
// std::cout << "编码第 " << udpSendCounter << " 帧数据大小: " << pkt.size << std::endl;
// std::cout<< "pkt.pts: " << pkt.pts << std::endl;
// std::cout<< "pkt.dts: " << pkt.dts << std::endl << std::endl;
udpSendCounter++;
// 将编码后的帧写入文件
// pkt.stream_index = videoStream->index;
// av_interleaved_write_frame(formatContext, &pkt);
// av_write_frame(formatContext, &pkt);
av_packet_unref(&pkt);
}
encodeCounter++;
if(isExitPushFlow)
{
std::cout<<"PushFlow::encodePushFlow-----------------------推流线程将退出!"<<std::endl;
break;
}
}
fclose(fp);
// 写入文件尾
// av_write_trailer(formatContext);
// 释放AVFrame和相关资源
av_freep(&frame->data[0]);
av_frame_free(&frame);
// 释放资源
sws_freeContext(swsContext);
av_freep(&dstFrame->data[0]);
av_frame_free(&dstFrame);
// av_packet_free(&pkt);
avcodec_close(codecContext);
avcodec_free_context(&codecContext);
avio_closep(&formatContext->pb);
avformat_free_context(formatContext);
std::cout<<"PushFlow::encodePushFlow-----------------------推流线程已经退出!" << std::endl;
}