Files
ximeaAirborneSystem/Source_Files/rgbImage.cpp
2024-01-23 15:33:18 +08:00

437 lines
15 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

//
// Created by tangchao on 2022/12/24.
//
#include "../Header_Files/rgbImage.h"
Encode::Encode()
{
fp= fopen("/media/nvme/delete/av.h264","wb");
index1 = 0;
}
void Encode::initffmpeg(int width, int height)
{
const AVCodec *codec;
int i, ret, x, y, got_output;
std::cout<<"init ok";
codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec)
{
fprintf(stderr, "Codec not found\n");
exit(1);
}
// 根据编码器,创建相对应的编码器上下文
avcodeccontext = avcodec_alloc_context3(codec);
if (!avcodeccontext) {
fprintf(stderr, "Could not allocate video codec context\n");
exit(1);
}
avcodeccontext->bit_rate = 400000;
avcodeccontext->width = width;
avcodeccontext->height = height;
/* frames per second */
//时间基每一秒25帧每一刻度25分之1(时间基根据帧率而变化)
avcodeccontext->time_base = (AVRational){1, 25};
//帧率
avcodeccontext->framerate = (AVRational){25, 1};
/* emit one intra frame every ten frames
* check frame pict_type before passing frame
* to encoder, if frame->pict_type is AV_PICTURE_TYPE_I
* then gop_size is ignored and the output of encoder
* will always be I frame irrespective to gop_size
*/
//多少帧产生一组关键帧
avcodeccontext->gop_size = 10;
//b帧参考帧
avcodeccontext->max_b_frames = 1;
//编码的原始数据的YUV格式
avcodeccontext->pix_fmt = AV_PIX_FMT_YUV420P;
//如果编码器id 是 h264
if (codec->id == AV_CODEC_ID_H264)
// preset表示采用一个预先设定好的h264参数集级别是slowslow表示压缩速度是慢的慢的可以保证视频质量用快的会降低视频质量
av_opt_set(avcodeccontext->priv_data, "preset", "slow", 0);
/* open it */
//打开编码器
if (avcodec_open2(avcodeccontext, codec, NULL) < 0) {
fprintf(stderr, "Could not open codec\n");
exit(1);
}
// avcodeccontext=c;
std::cout<<"init ok";
inpic = av_frame_alloc();
outpic = av_frame_alloc();
//avpicture_fill sets all of the data pointers in the AVFrame structures
//to the right places in the data buffers. It does not copy the data so
//the QImage and out_buffer still need to live after calling these.
inpic->width=width;
inpic->height=height;
inpic->format=AV_PIX_FMT_ARGB;
inpic->linesize[0]=width;
outpic->width=width;
outpic->height=height;
outpic->format=AV_PIX_FMT_YUV420P;
outpic->linesize[0]=width;
isinit= true;
}
void Encode::savedata(AVFrame *frame)
{
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
frame->pts = index1;
AVCodecInternal *avci = avcodeccontext->internal;
// if (avci->draining)
// return AVERROR_EOF;
// if (avci->buffer_frame->data[0])
// return AVERROR(EAGAIN);
encode(avcodeccontext,frame,&pkt,fp);
av_packet_unref(&pkt);
index1++;
}
void Encode::encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, FILE *outfile)
{
int ret;
/* send the frame to the encoder */
if (frame)
// printf("Send frame %3\"PRId64\"\n", frame->pts);
ret = avcodec_send_frame(enc_ctx, frame);
if (ret < 0) {
fprintf(stderr, "Error sending a frame for encoding\n");
exit(1);
}
while (ret >= 0) {
ret = avcodec_receive_packet(enc_ctx, pkt);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
return;
else if (ret < 0) {
fprintf(stderr, "Error during encoding\n");
exit(1);
}
// printf("Write packet %3\"PRId64\" (size=%5d)\n", pkt->pts, pkt->size);
fwrite(pkt->data, 1, pkt->size, outfile);
QByteArray buf;
buf.append((char *)pkt->data,pkt->size);
// emit senddata(buf);
av_packet_unref(pkt);
}
}
rgbImage::rgbImage(QWidget* pParent)
{
m_QRgbImage = nullptr;
m_matRgbImage = nullptr;
m_matFocusGrayImage = nullptr;
m_qimageFocusGrayImage = nullptr;
}
rgbImage::~rgbImage()
{
}
void rgbImage::SetRgbImageWidthAndHeight(int BandCount, int Sample, int FrameNumber)
{
using namespace cv;
if (m_QRgbImage != nullptr)
{
delete m_QRgbImage;//有问题????????????????????????????????????????????????
}
//m_QRgbImage = new QImage(Sample, FrameNumber, QImage::Format_RGB888);
if (m_matRgbImage != nullptr)
{
delete m_matRgbImage;
}
m_matRgbImage = new Mat(FrameNumber, Sample, CV_8UC3, Scalar(0, 0, 0));
int codec = VideoWriter::fourcc('H', '2', '6', '4'); // select desired codec (must be available at runtime)
double fps = 20.0;// framerate of the created video stream
std::string filename = "appsrc ! autovideoconvert ! filesink location=/media/nvme/live.mp4";//https://blog.csdn.net/ancientapesman/article/details/117324638
// std::string filename = "/media/nvme/live.mp4";
auto ddddd=m_matRgbImage->size();
m_VideoWriter.open(filename, codec, fps, Size(20, 1368), true);
// VideoWriter video("test.avi", CV_FOURCC('M', 'J', 'P', 'G'), 25.0, Size(640, 480));
// m_frame_writer = cvCreateVideoWriter("/media/nvme/delete/live.avi", cv::VideoWriter::fourcc('M', 'J', 'P', 'G'), 20.0, Size(688, 688), false);
if (m_qimageFocusGrayImage == nullptr)
{
m_qimageFocusGrayImage = new QImage(Sample, BandCount, QImage::Format_RGB32);
}
if (m_matFocusGrayImage == nullptr)
{
m_matFocusGrayImage = new Mat(BandCount, Sample, CV_16U, Scalar(0));
//cv::Mat matAdjustPreview = Mat::zeros(BandCount, Sample, CV_16U);
}
//cv::Mat matAdjustPreview = Mat::zeros(BandCount, Sample, CV_16U);
//m_matFocusGrayImage = matAdjustPreview;
std::cout << "设置帧数:" << FrameNumber << std::endl;
m_iFrameCounter = 0;//每次都重置为0
m_iSampleNumber = Sample;
m_iBandNumber = BandCount;
m_iFrameNumber = FrameNumber;
//std::cout << "rgb影像内存地址为" << m_QRgbImage << std::endl;
}
void rgbImage::FillOnerowofRgbImage(cv::Mat * matRgbImage, int rowNumber, unsigned short *datacube)
{
//方式1逐像素修改
// unsigned short r, g, b;
// for (int j = 0; j < m_iSampleNumber; j++)
// {
// //取值一帧影像中从左到右的rgb像元值
// r = *(datacube + 121 * m_iSampleNumber + j)*255/4096;
// g = *(datacube + 79 * m_iSampleNumber + j)*255/4096;
// b = *(datacube + 40 * m_iSampleNumber + j)*255/4096;
//
//// r = *(datacube + 121 * m_iSampleNumber + j);
//// g = *(datacube + 79 * m_iSampleNumber + j);
//// b = *(datacube + 40 * m_iSampleNumber + j);
//
// //将像元值赋值到cv::Mat中操作像元值https://zhuanlan.zhihu.com/p/51842288
// //int dataType = m_matRgbImage->type();//当数据类型为CV_16UC3时返回18
// //std::cout << "m_matRgbImage数据类型为" << dataType << std::endl;
// if (matRgbImage->type() == CV_8UC3)
// {
//// std::cout << "操作像素值!" << std::endl;
// matRgbImage->at<cv::Vec3b>(rowNumber, j)[2] = r;
// matRgbImage->at<cv::Vec3b>(rowNumber, j)[1] = g;
// matRgbImage->at<cv::Vec3b>(rowNumber, j)[0] = b;
//
//// QString savePath_cv = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_cv.png";
//// cv::imwrite(savePath_cv.toStdString(), *matRgbImage);
// }
//
// int column = 800;
// if(j == column)
// {
// std::cout << "行:" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << r << " " << g << " " << b << std::endl;
// std::cout << "mat第 " << column << " 列的 r g b 分别为 " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[2] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[1] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[0] << std::endl;
// }
// }
//方式2通过指针操作更快
unsigned short r, g, b;
const int cols = matRgbImage->cols;
const int step = matRgbImage->channels();
unsigned char *p_row0_b = matRgbImage->ptr(rowNumber);
unsigned char *p_row0_g = matRgbImage->ptr(rowNumber) + 1;
unsigned char *p_row0_r = matRgbImage->ptr(rowNumber) + 2;
for (int j = 0; j < m_iSampleNumber; j++)
{
//取值一帧影像中从左到右的rgb像元值
r = *(datacube + 121 * m_iSampleNumber + j)*255/4096;
g = *(datacube + 79 * m_iSampleNumber + j)*255/4096;
b = *(datacube + 40 * m_iSampleNumber + j)*255/4096;
*p_row0_b = b;
*p_row0_g = g;
*p_row0_r = r;
// int column = 800;
// if(j == column)
// {
// std::cout << "行:" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << r << " " << g << " " << b << std::endl;
//// std::cout << "修改后" << rowNumber << "提取:第 " << column << " 列的 r g b 分别为 " << (unsigned short)*p_row0_r << " " << (unsigned short)*p_row0_g << " " << (unsigned short)*p_row0_b << std::endl;
//// std::cout << "mat第 " << column << " 列的 r g b 分别为 " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[2] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[1] << " " << (unsigned short)matRgbImage->at<cv::Vec3b>(rowNumber, j)[0] << std::endl;
// }
p_row0_b += step;
p_row0_g += step;
p_row0_r += step;
}
//方式3通过内存拷贝快速提取rgb
// if (matRgbImage->isContinuous())// check mat is continuous or not
// matRgbImage->reshape(1, matRgbImage->rows * matRgbImage->cols).col(0).setTo(Scalar(value));
// else
// {
// for (int i = 0; i < matRgbImage->rows; i++)
// matRgbImage->row(i).reshape(1, matRgbImage->cols).col(0).setTo(Scalar(value));
// }
}
QImage rgbImage::Mat2QImage(cv::Mat cvImg)//https://www.cnblogs.com/annt/p/ant003.html
{
QImage qImg;
if (cvImg.channels() == 3)//3 channels color image
{
cv::cvtColor(cvImg, cvImg, CV_BGR2RGB);
qImg = QImage((const unsigned char*)(cvImg.data),
cvImg.cols, cvImg.rows,
cvImg.cols*cvImg.channels(),
QImage::Format_RGB888);
}
else if (cvImg.channels() == 1)//grayscale image
{
qImg = QImage((const unsigned char*)(cvImg.data),
cvImg.cols, cvImg.rows,
cvImg.cols*cvImg.channels(),
QImage::Format_Indexed8);
}
else
{
qImg = QImage((const unsigned char*)(cvImg.data),
cvImg.cols, cvImg.rows,
cvImg.cols*cvImg.channels(),
QImage::Format_RGB888);
}
return qImg;
}
void rgbImage::FillRgbImage(unsigned short *datacube)
{
//通过行赋值将前m_iFrameNumber-1行向上移动一行https://blog.csdn.net/u014686356/article/details/65937750
//经tc验证此行代码工作异常
// m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1).copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));
//
// QString savePath_cv_3 = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "full0_cv.jpg";
// cv::imwrite(savePath_cv_3.toStdString(), *m_matRgbImage);
//
// cv::Mat upperPart = m_matRgbImage->rowRange(0, m_matRgbImage->rows - 1);
// QString savePath_cv_ = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "upperPart_cv.jpg";
// cv::imwrite(savePath_cv_.toStdString(), upperPart);
//
// // 将上半部分的数据复制到下一行
// upperPart.copyTo(m_matRgbImage->rowRange(1, m_matRgbImage->rows));//?????????????????????????????????????????
// QString savePath_cv_2 = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "full_cv.jpg";
// cv::imwrite(savePath_cv_2.toStdString(), *m_matRgbImage);
for (int i = m_matRgbImage->rows - 2; i >= 0; --i)
{
// std::cout << "大于:" << i << std::endl;
m_matRgbImage->row(i).copyTo(m_matRgbImage->row(i+1));
}
FillOnerowofRgbImage(m_matRgbImage, 0, datacube);
// m_Qphoto = Mat2QImage(*m_matRgbImage);
//保存rgb图片
// if (m_iFrameCounter % m_iFramerate == 0 || m_iFrameCounter == m_iFrameNumber - 1)
// {
//// QString savePath = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_qt.jpg";
//// m_Qphoto.save(savePath);
//
// QString savePath_cv = "/media/nvme/delete/" + QString::number(m_iFrameCounter) + "_cv.jpg";
// cv::imwrite(savePath_cv.toStdString(), *m_matRgbImage);
// }
// m_VideoWriter.write(*m_matRgbImage);
// std::string rgbFilePathNoStrech = "/media/nvme/delete/" + std::to_string(m_iFrameCounter) + "ctmp_image_no_strech.png";
// cv::imwrite(rgbFilePathNoStrech, *m_matRgbImage);
m_iFrameCounter++;
}
void rgbImage::FillFocusGrayImage(unsigned short * datacube)
{
int rowCount = m_matFocusGrayImage->rows;
int colCount = m_matFocusGrayImage->cols;
for (unsigned short i = 0; i < m_matFocusGrayImage->rows; i++)
{
for (unsigned short j = 0; j < m_matFocusGrayImage->cols; j++)
{
//m_matFocusGrayImage->at<ushort>(i, j) = *(datacube + m_matFocusGrayImage->cols*i + j);
m_matFocusGrayImage->at<ushort>(i, j) = datacube[m_matFocusGrayImage->cols*i + j];
}
}
//int rowCount = m_matFocusGrayImage.rows;
//int colCount = m_matFocusGrayImage.cols;
////memcpy(m_matFocusGrayImage.data, datacube, rowCount*colCount);
//for (unsigned short i = 0; i < m_matFocusGrayImage.rows; i++)
//{
// for (unsigned short j = 0; j < m_matFocusGrayImage.cols; j++)
// {
// m_matFocusGrayImage.at<ushort>(i, j) = *(datacube + m_matFocusGrayImage.cols*i + j);
// //m_matFocusGrayImage.at<ushort>(i, j) = datacube[colCount*i + j];
// }
//}
//将mat保存成文件
//cv::imwrite("D:/delete/2222222222/test.bmp", m_matFocusGrayImage);
}
void rgbImage::FillFocusGrayQImage(unsigned short * datacube)
{
float two_eight = pow(2.0, 8);
float two_sixteen = pow(2.0, 12);
int width = m_qimageFocusGrayImage->width();
int height = m_qimageFocusGrayImage->height();
for (unsigned short i = 0; i < height; i++)
{
for (unsigned short j = 0; j < width; j++)
{
//uint tmp = (two_eight* *(datacube + width * i + j)) / two_sixteen;
uint tmp = (two_eight* datacube[width*i + j]) / two_sixteen;
//uint tmp = datacube[width*i + j];
//m_qimageFocusGrayImage->setPixel(j, i, tmp);
m_qimageFocusGrayImage->setPixel(j, i, qRgb((unsigned char)tmp, (unsigned char)tmp, (unsigned char)tmp));
}
}
m_qimageFocusGrayImage->save("D:/delete/2222222222/test.bmp");
/*float two_eight = pow(2.0, 8);
float two_sixteen = pow(2.0, 16);
QImage *qi = new QImage(imwidth, imheight, QImage::Format_RGB32);
for (int i = 0; i < imheight; i++)
{
for (int j = 0; j < imwidth; j++)
{
floatData[i*imwidth + j] = (two_eight* floatData[i*imwidth + j]) / two_sixteen;
qi->setPixel(j, i, qRgb((unsigned char)floatData[i*imwidth + j], (unsigned char)floatData[i*imwidth + j], (unsigned char)floatData[i*imwidth + j]));
}
}*/
}