推流:代码移植后,运行成功,但是遥控器无法正常显示摄像头画面;
This commit is contained in:
176
samples/sample_c/module_sample/camera_emu/ffmpeg_tc.c
Normal file
176
samples/sample_c/module_sample/camera_emu/ffmpeg_tc.c
Normal file
@ -0,0 +1,176 @@
|
||||
//
|
||||
// Created by tangchao on 2021/11/16.
|
||||
//
|
||||
|
||||
|
||||
#include "ffmpeg_tc.h"
|
||||
|
||||
char *getsystemtime()
|
||||
{
|
||||
//获取系统时间
|
||||
time_t timer;//time_t就是long int 类型
|
||||
struct tm *tblock;
|
||||
timer = time(NULL);//返回秒数(精度为秒),从1970-1-1,00:00:00 可以当成整型输出或用于其它函数
|
||||
tblock = localtime(&timer);
|
||||
//printf("Local time is: %s\n", asctime(tblock));
|
||||
|
||||
//格式化时间为需要的格式
|
||||
char fileNameTmp[256] = { 0 };
|
||||
char dirNameTmp[256] = { 0 };
|
||||
strftime(fileNameTmp, sizeof(fileNameTmp), "%Y%m%d_%H%M%S", tblock);
|
||||
return fileNameTmp;
|
||||
}
|
||||
|
||||
AVFrame *get_video_frame(IntputDev* input)//tc改动
|
||||
{
|
||||
clock_t start,finish;
|
||||
start = clock(); // 设置开始clock
|
||||
|
||||
int ret;
|
||||
AVFrame * ret_frame=NULL;
|
||||
|
||||
if(av_read_frame(input->v_ifmtCtx, input->in_packet)>=0)
|
||||
{
|
||||
if(input->in_packet->stream_index==input->videoindex)
|
||||
{
|
||||
ret = avcodec_send_packet(input->pCodecCtx, input->in_packet);
|
||||
if (ret < 0) {
|
||||
fprintf(stderr, "Error sending a packet for decoding\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
ret = avcodec_receive_frame(input->pCodecCtx, input->pFrame);
|
||||
if(ret<0)
|
||||
{
|
||||
printf("Decode Error.\n");
|
||||
av_packet_unref(input->in_packet);
|
||||
return NULL;
|
||||
}
|
||||
sws_scale(input->img_convert_ctx, (const unsigned char* const*)input->pFrame->data, input->pFrame->linesize, 0, input->pCodecCtx->height, input->pFrameYUV->data, input->pFrameYUV->linesize);
|
||||
input->pFrameYUV->pts=input->next_pts++;
|
||||
ret_frame= input->pFrameYUV;
|
||||
}
|
||||
av_packet_unref(input->in_packet);
|
||||
}
|
||||
|
||||
finish = clock();// 设置结束clock
|
||||
double duration = (double)(finish - start) / CLOCKS_PER_SEC;//转换浮点型
|
||||
// printf( "采集视频帧时间:%lf seconds\n", duration );
|
||||
|
||||
return ret_frame;
|
||||
}
|
||||
//
|
||||
//static void encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt)
|
||||
//{
|
||||
// int ret;
|
||||
//
|
||||
// /* send the frame to the encoder */
|
||||
// if (frame)
|
||||
// printf("Send frame %3"PRId64"\n", frame->pts);
|
||||
//
|
||||
// ret = avcodec_send_frame(enc_ctx, frame);//返回-21?????????????????????????????????????????????
|
||||
// if (ret < 0) {
|
||||
// fprintf(stderr, "Error sending a frame for encoding\n");
|
||||
// exit(1);
|
||||
// }
|
||||
//
|
||||
// while (ret >= 0) {
|
||||
// ret = avcodec_receive_packet(enc_ctx, pkt);
|
||||
// if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
|
||||
// return;
|
||||
// else if (ret < 0) {
|
||||
// fprintf(stderr, "Error during encoding\n");
|
||||
// exit(1);
|
||||
// }
|
||||
//
|
||||
//// printf("Write packet %3"PRId64" (size=%5d)\n", pkt->pts, pkt->size);
|
||||
//// fwrite(pkt->data, 1, pkt->size, outfile);
|
||||
// av_packet_unref(pkt);
|
||||
// }
|
||||
//}
|
||||
|
||||
void encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, char **data, int *datasize)
|
||||
{
|
||||
int ret;
|
||||
|
||||
/* send the frame to the encoder */
|
||||
// if (frame)
|
||||
// printf("Send frame %3"PRId64"\n", frame->pts);
|
||||
|
||||
ret = avcodec_send_frame(enc_ctx, frame);//返回-21?????????????????????????????????????????????
|
||||
if (ret < 0) {
|
||||
fprintf(stderr, "Error sending a frame for encoding\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
while (ret >= 0) {
|
||||
ret = avcodec_receive_packet(enc_ctx, pkt);
|
||||
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
|
||||
{
|
||||
printf("11111111111111111111\n");
|
||||
return;
|
||||
}
|
||||
else if (ret < 0) {
|
||||
fprintf(stderr, "Error during encoding\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
// printf("Write packet %3"PRId64" (size=%5d)\n", pkt->pts, pkt->size);
|
||||
// fwrite(pkt->data, 1, pkt->size, outfile);
|
||||
|
||||
//tc
|
||||
char *dataBuffer = calloc(pkt->size, 1);
|
||||
memcpy(dataBuffer,pkt->data,pkt->size);
|
||||
|
||||
*data = dataBuffer;
|
||||
*datasize = pkt->size;
|
||||
|
||||
av_packet_unref(pkt);
|
||||
}
|
||||
printf("22222222222222222\n");
|
||||
}
|
||||
|
||||
char * encode2(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, int *datasize)
|
||||
{
|
||||
clock_t start,finish;
|
||||
start = clock(); // 设置开始clock
|
||||
|
||||
int ret;
|
||||
char *dataBuffer;
|
||||
|
||||
ret = avcodec_send_frame(enc_ctx, frame);
|
||||
if (ret < 0) {
|
||||
fprintf(stderr, "Error sending a frame for encoding\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
while (ret >= 0) {
|
||||
ret = avcodec_receive_packet(enc_ctx, pkt);
|
||||
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
|
||||
{
|
||||
printf("11111111111111111111\n");
|
||||
return NULL;
|
||||
}
|
||||
else if (ret < 0) {
|
||||
fprintf(stderr, "Error during encoding\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
// printf("Write packet %3"PRId64" (size=%5d)\n", pkt->pts, pkt->size);
|
||||
// fwrite(pkt->data, 1, pkt->size, outfile);
|
||||
|
||||
//tc
|
||||
dataBuffer = calloc(pkt->size, 1);
|
||||
memcpy(dataBuffer,pkt->data,pkt->size);
|
||||
|
||||
*datasize = pkt->size;
|
||||
|
||||
av_packet_unref(pkt);
|
||||
|
||||
finish = clock();// 设置结束clock
|
||||
double duration = (double)(finish - start) / CLOCKS_PER_SEC;//转换浮点型
|
||||
// printf( "编码视频帧时间:%lf seconds\n", duration );
|
||||
|
||||
return dataBuffer;
|
||||
}
|
||||
}
|
63
samples/sample_c/module_sample/camera_emu/ffmpeg_tc.h
Normal file
63
samples/sample_c/module_sample/camera_emu/ffmpeg_tc.h
Normal file
@ -0,0 +1,63 @@
|
||||
//
|
||||
// Created by tangchao on 2021/11/16.
|
||||
//
|
||||
|
||||
#ifndef PSDK_DEMO_FFMPEG_TC_H
|
||||
#define PSDK_DEMO_FFMPEG_TC_H
|
||||
|
||||
|
||||
//tc开始
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <math.h>
|
||||
|
||||
#include <libavutil/avassert.h>
|
||||
#include <libavutil/channel_layout.h>
|
||||
#include <libavutil/opt.h>
|
||||
#include <libavutil/mathematics.h>
|
||||
#include <libavutil/timestamp.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavformat/avformat.h>
|
||||
#include <libswscale/swscale.h>
|
||||
#include <libswresample/swresample.h>
|
||||
#include "libavdevice/avdevice.h"
|
||||
|
||||
#include <libavutil/imgutils.h>
|
||||
|
||||
#include <unistd.h>//usleep
|
||||
|
||||
|
||||
#define STREAM_DURATION 50.0 /*录制视频的持续时间 秒*/
|
||||
#define STREAM_FRAME_RATE 15 /* images/s 这里可以根据摄像头的采集速度来设置帧率 */
|
||||
#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
|
||||
#define SCALE_FLAGS SWS_BICUBIC
|
||||
//tc结束
|
||||
|
||||
|
||||
//存放视频的宽度和高度
|
||||
int video_width;
|
||||
int video_height;
|
||||
|
||||
|
||||
typedef struct IntputDev
|
||||
{
|
||||
AVCodecContext *pCodecCtx;
|
||||
AVCodec *pCodec;
|
||||
AVFormatContext *v_ifmtCtx;
|
||||
int videoindex;
|
||||
struct SwsContext *img_convert_ctx;
|
||||
AVPacket *in_packet;
|
||||
AVFrame *pFrame,*pFrameYUV;
|
||||
|
||||
/*下一帧的点数*/
|
||||
int64_t next_pts;
|
||||
}IntputDev;
|
||||
|
||||
char *getsystemtime();
|
||||
AVFrame *get_video_frame(IntputDev* input);
|
||||
//static void encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt);
|
||||
void encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, char **data, int *datasize);
|
||||
char * encode2(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, int *datasize);
|
||||
|
||||
#endif //PSDK_DEMO_FFMPEG_TC_H
|
@ -92,6 +92,8 @@ static T_DjiReturnCode DjiPlayback_GetFrameRateOfVideoFile(const char *path, flo
|
||||
static T_DjiReturnCode
|
||||
DjiPlayback_GetFrameNumberByTime(T_TestPayloadCameraVideoFrameInfo *frameInfo, uint32_t frameCount,
|
||||
uint32_t *frameNumber, uint32_t timeMs);
|
||||
|
||||
//用于s_psdkCameraMedia中的回调函数:相机类负载设备的下载回放功能
|
||||
static T_DjiReturnCode GetMediaFileDir(char *dirPath);
|
||||
static T_DjiReturnCode GetMediaFileOriginData(const char *filePath, uint32_t offset, uint32_t length,
|
||||
uint8_t *data);
|
||||
@ -120,12 +122,13 @@ static T_DjiReturnCode StartDownloadNotification(void);
|
||||
static T_DjiReturnCode StopDownloadNotification(void);
|
||||
|
||||
_Noreturn static void *UserCameraMedia_SendVideoTask(void *arg);
|
||||
static void *UserCameraMedia_SendVideoTask_tc(void *arg);
|
||||
|
||||
/* Private variables -------------------------------------------------------------*/
|
||||
static T_DjiCameraMediaDownloadPlaybackHandler s_psdkCameraMedia = {0};
|
||||
static T_DjiPlaybackInfo s_playbackInfo = {0};
|
||||
static T_DjiCameraMediaDownloadPlaybackHandler s_psdkCameraMedia = {0};//控制相机类负载设备执行媒体文件下载回放功能
|
||||
static T_DjiPlaybackInfo s_playbackInfo = {0};//非常重要的变量!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
static T_DjiTaskHandle s_userSendVideoThread;
|
||||
static T_UtilBuffer s_mediaPlayCommandBufferHandler = {0};
|
||||
static T_UtilBuffer s_mediaPlayCommandBufferHandler = {0};//非常重要的变量,用户自定义功能读取此变量中的命令执行相应的操作!!!!!!!!!!!!!!!!!!!
|
||||
static T_DjiMutexHandle s_mediaPlayCommandBufferMutex = {0};
|
||||
static uint8_t s_mediaPlayCommandBuffer[sizeof(T_TestPayloadCameraPlaybackCommand) * 32] = {0};
|
||||
static const char *s_frameKeyChar = "[PACKET]";
|
||||
@ -152,30 +155,36 @@ T_DjiReturnCode DjiTest_CameraEmuMediaStartService(void)
|
||||
return DJI_ERROR_SYSTEM_MODULE_CODE_UNKNOWN;
|
||||
}
|
||||
|
||||
// 实现获取媒体文件信息的功能
|
||||
s_psdkCameraMedia.GetMediaFileDir = GetMediaFileDir;
|
||||
s_psdkCameraMedia.GetMediaFileOriginInfo = DjiTest_CameraMediaGetFileInfo;
|
||||
s_psdkCameraMedia.GetMediaFileOriginData = GetMediaFileOriginData;
|
||||
s_psdkCameraMedia.GetMediaFileOriginInfo = DjiTest_CameraMediaGetFileInfo;//非常重要!!!!!获取视频文件信息(类型、帧率、分辨率等)
|
||||
s_psdkCameraMedia.GetMediaFileOriginData = GetMediaFileOriginData;//获取视频文件数据
|
||||
|
||||
// 实现获取媒体文件缩略图的功能
|
||||
s_psdkCameraMedia.CreateMediaFileThumbNail = CreateMediaFileThumbNail;
|
||||
s_psdkCameraMedia.GetMediaFileThumbNailInfo = GetMediaFileThumbNailInfo;
|
||||
s_psdkCameraMedia.GetMediaFileThumbNailData = GetMediaFileThumbNailData;
|
||||
s_psdkCameraMedia.DestroyMediaFileThumbNail = DestroyMediaFileThumbNail;
|
||||
|
||||
// 实现获取媒体文件截屏图的功能
|
||||
s_psdkCameraMedia.CreateMediaFileScreenNail = CreateMediaFileScreenNail;
|
||||
s_psdkCameraMedia.GetMediaFileScreenNailInfo = GetMediaFileScreenNailInfo;
|
||||
s_psdkCameraMedia.GetMediaFileScreenNailData = GetMediaFileScreenNailData;
|
||||
s_psdkCameraMedia.DestroyMediaFileScreenNail = DestroyMediaFileScreenNail;
|
||||
|
||||
// 实现删除媒体文件的功能
|
||||
s_psdkCameraMedia.DeleteMediaFile = DeleteMediaFile;
|
||||
|
||||
// 实现控制媒体文件回放的功能
|
||||
s_psdkCameraMedia.SetMediaPlaybackFile = SetMediaPlaybackFile;
|
||||
|
||||
s_psdkCameraMedia.StartMediaPlayback = StartMediaPlayback;
|
||||
s_psdkCameraMedia.StopMediaPlayback = StopMediaPlayback;
|
||||
s_psdkCameraMedia.PauseMediaPlayback = PauseMediaPlayback;
|
||||
s_psdkCameraMedia.StartMediaPlayback = StartMediaPlayback;//主要作用是将命令写入到s_mediaPlayCommandBufferHandler
|
||||
s_psdkCameraMedia.StopMediaPlayback = StopMediaPlayback;//主要作用是将命令写入到s_mediaPlayCommandBufferHandler
|
||||
s_psdkCameraMedia.PauseMediaPlayback = PauseMediaPlayback;//主要作用是将命令写入到s_mediaPlayCommandBufferHandler
|
||||
s_psdkCameraMedia.SeekMediaPlayback = SeekMediaPlayback;
|
||||
s_psdkCameraMedia.GetMediaPlaybackStatus = GetMediaPlaybackStatus;
|
||||
|
||||
// 实现下载媒体文件的功能
|
||||
s_psdkCameraMedia.StartDownloadNotification = StartDownloadNotification;
|
||||
s_psdkCameraMedia.StopDownloadNotification = StopDownloadNotification;
|
||||
|
||||
@ -186,6 +195,7 @@ T_DjiReturnCode DjiTest_CameraEmuMediaStartService(void)
|
||||
|
||||
UtilBuffer_Init(&s_mediaPlayCommandBufferHandler, s_mediaPlayCommandBuffer, sizeof(s_mediaPlayCommandBuffer));
|
||||
|
||||
// 注册相机类负载设备的下载回放功能
|
||||
if (aircraftInfoBaseInfo.aircraftType == DJI_AIRCRAFT_TYPE_M300_RTK ||
|
||||
aircraftInfoBaseInfo.aircraftType == DJI_AIRCRAFT_TYPE_M350_RTK) {
|
||||
returnCode = DjiPayloadCamera_RegMediaDownloadPlaybackHandler(&s_psdkCameraMedia);
|
||||
@ -201,14 +211,15 @@ T_DjiReturnCode DjiTest_CameraEmuMediaStartService(void)
|
||||
return DJI_ERROR_SYSTEM_MODULE_CODE_UNKNOWN;
|
||||
}
|
||||
|
||||
if (DjiPlatform_GetHalNetworkHandler() != NULL || DjiPlatform_GetHalUsbBulkHandler() != NULL) {
|
||||
returnCode = osalHandler->TaskCreate("user_camera_media_task", UserCameraMedia_SendVideoTask, 2048,
|
||||
NULL, &s_userSendVideoThread);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("user send video task create error.");
|
||||
return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
|
||||
}
|
||||
}
|
||||
// 创建线程执行用户自定义函数
|
||||
// if (DjiPlatform_GetHalNetworkHandler() != NULL || DjiPlatform_GetHalUsbBulkHandler() != NULL) {
|
||||
// returnCode = osalHandler->TaskCreate("user_camera_media_task", UserCameraMedia_SendVideoTask_tc, 2048,
|
||||
// NULL, &s_userSendVideoThread);
|
||||
// if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
// USER_LOG_ERROR("user send video task create error.");
|
||||
// return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
|
||||
// }
|
||||
// }
|
||||
|
||||
return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
|
||||
}
|
||||
@ -227,7 +238,7 @@ T_DjiReturnCode DjiTest_CameraMediaGetFileInfo(const char *filePath, T_DjiCamera
|
||||
T_DjiReturnCode returnCode;
|
||||
T_DjiMediaFileHandle mediaFileHandle;
|
||||
|
||||
returnCode = DjiMediaFile_CreateHandle(filePath, &mediaFileHandle);
|
||||
returnCode = DjiMediaFile_CreateHandle(filePath, &mediaFileHandle);//mediaFileHandle是重点!!!!其中注册了很多处理媒体文件的函数
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("Media file create handle error stat:0x%08llX", returnCode);
|
||||
return returnCode;
|
||||
@ -252,7 +263,7 @@ T_DjiReturnCode DjiTest_CameraMediaGetFileInfo(const char *filePath, T_DjiCamera
|
||||
}
|
||||
|
||||
out:
|
||||
returnCode = DjiMediaFile_DestroyHandle(mediaFileHandle);
|
||||
returnCode = DjiMediaFile_DestroyHandle(mediaFileHandle);//释放内存空间
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("Media file destroy handle error stat:0x%08llX", returnCode);
|
||||
return returnCode;
|
||||
@ -291,6 +302,7 @@ static T_DjiReturnCode DjiPlayback_PausePlay(T_DjiPlaybackInfo *playbackInfo)
|
||||
return DJI_ERROR_SYSTEM_MODULE_CODE_UNKNOWN;
|
||||
}
|
||||
|
||||
//内存空间>=一个命令???????????????????????????????????????????????????????????????????????????
|
||||
if (UtilBuffer_GetUnusedSize(&s_mediaPlayCommandBufferHandler) >= sizeof(T_TestPayloadCameraPlaybackCommand)) {
|
||||
UtilBuffer_Put(&s_mediaPlayCommandBufferHandler, (const uint8_t *) &playbackCommand,
|
||||
sizeof(T_TestPayloadCameraPlaybackCommand));
|
||||
@ -336,12 +348,14 @@ static T_DjiReturnCode DjiPlayback_SeekPlay(T_DjiPlaybackInfo *playbackInfo, uin
|
||||
T_DjiRunTimeStamps ti;
|
||||
T_DjiReturnCode returnCode;
|
||||
|
||||
//先暂停播放
|
||||
returnCode = DjiPlayback_PausePlay(playbackInfo);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("pause play error \n");
|
||||
return returnCode;
|
||||
}
|
||||
|
||||
//将需要播放的位置写入到变量playbackInfo中
|
||||
playbackInfo->playPosMs = seekPos;
|
||||
returnCode = DjiPlayback_StartPlayProcess(playbackInfo->filePath, playbackInfo->playPosMs);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
@ -545,6 +559,7 @@ static T_DjiReturnCode DjiPlayback_VideoFileTranscode(const char *inPath, const
|
||||
snprintf(ffmpegCmdStr, FFMPEG_CMD_BUF_SIZE,
|
||||
"echo \"y\" | ffmpeg -i \"%s\" -codec copy -f \"%s\" \"%s\" 1>/dev/null 2>&1", inPath,
|
||||
outFormat, outPath);
|
||||
// printf(ffmpegCmdStr);
|
||||
fpCommand = popen(ffmpegCmdStr, "r");
|
||||
if (fpCommand == NULL) {
|
||||
USER_LOG_ERROR("execute transcode command fail.");
|
||||
@ -620,6 +635,7 @@ DjiPlayback_GetFrameInfoOfVideoFile(const char *path, T_TestPayloadCameraVideoFr
|
||||
goto out1;
|
||||
}
|
||||
|
||||
//%%表示这里就是一个“%”符,让程序不要理解成格式符
|
||||
ret = snprintf(frameParameterFormat, sizeof(frameParameterFormat), "%s=%%f", s_frameDurationTimeKeyChar);
|
||||
if (ret < 0) {
|
||||
USER_LOG_ERROR("snprintf frameParameterFormat fail.");
|
||||
@ -627,6 +643,7 @@ DjiPlayback_GetFrameInfoOfVideoFile(const char *path, T_TestPayloadCameraVideoFr
|
||||
goto out1;
|
||||
}
|
||||
|
||||
//将DurationTime从frameDurationTimeLocation读入到frameDurationTimeS
|
||||
ret = sscanf(frameDurationTimeLocation, frameParameterFormat, &frameDurationTimeS);
|
||||
if (ret <= 0) {
|
||||
USER_LOG_ERROR("can not find pkt_duration_time.");
|
||||
@ -681,7 +698,7 @@ DjiPlayback_GetFrameInfoOfVideoFile(const char *path, T_TestPayloadCameraVideoFr
|
||||
}
|
||||
frameInfo[frameNumber].size = frameSize;
|
||||
|
||||
frameLocation += strlen(s_frameKeyChar);
|
||||
frameLocation += strlen(s_frameKeyChar);//排除已经找到的帧
|
||||
frameNumber++;
|
||||
(*frameCount)++;
|
||||
|
||||
@ -1020,6 +1037,7 @@ static T_DjiReturnCode SetMediaPlaybackFile(const char *filePath)
|
||||
return returnCode;
|
||||
}
|
||||
|
||||
//设置完成回放文件后,就开始回放(播放回放文件)
|
||||
returnCode = DjiPlayback_StartPlay(&s_playbackInfo);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
return returnCode;
|
||||
@ -1033,7 +1051,7 @@ static T_DjiReturnCode StartMediaPlayback(void)
|
||||
T_DjiReturnCode returnCode;
|
||||
|
||||
USER_LOG_INFO("start media playback");
|
||||
returnCode = DjiPlayback_StartPlay(&s_playbackInfo);
|
||||
returnCode = DjiPlayback_StartPlay(&s_playbackInfo);//s_playbackInfo是哪里设置的????????????????????
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("start media playback status error, stat:0x%08llX", returnCode);
|
||||
return returnCode;
|
||||
@ -1149,6 +1167,7 @@ static T_DjiReturnCode StopDownloadNotification(void)
|
||||
#pragma GCC diagnostic ignored "-Wreturn-type"
|
||||
#endif
|
||||
|
||||
//https://developer.dji.com/doc/payload-sdk-tutorial/cn/function-set/advanced-function/camera-video-stream-transmission.html
|
||||
static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
{
|
||||
int ret;
|
||||
@ -1159,12 +1178,12 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
uint16_t lengthOfDataToBeSent = 0;
|
||||
int lengthOfDataHaveBeenSent = 0;
|
||||
char *dataBuffer = NULL;
|
||||
T_TestPayloadCameraPlaybackCommand playbackCommand = {0};
|
||||
T_TestPayloadCameraPlaybackCommand playbackCommand = {0};//??????????????????????
|
||||
uint16_t bufferReadSize = 0;
|
||||
char *videoFilePath = NULL;
|
||||
char *transcodedFilePath = NULL;
|
||||
char *transcodedFilePath = NULL;//转换视频编码后的文件路径
|
||||
float frameRate = 1.0f;
|
||||
T_TestPayloadCameraVideoFrameInfo *frameInfo = NULL;
|
||||
T_TestPayloadCameraVideoFrameInfo *frameInfo = NULL;//时长,大小,此帧在文件中的位置
|
||||
uint32_t frameNumber = 0;
|
||||
uint32_t frameCount = 0;
|
||||
uint32_t startTimeMs = 0;
|
||||
@ -1180,6 +1199,7 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
|
||||
USER_UTIL_UNUSED(arg);
|
||||
|
||||
//获取视频流文件信息(1)
|
||||
returnCode = DjiUserUtil_GetCurrentFileDirPath(__FILE__, DJI_FILE_PATH_SIZE_MAX, curFileDirPath);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("Get file current path error, stat = 0x%08llX", returnCode);
|
||||
@ -1191,6 +1211,7 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
snprintf(tempPath, DJI_FILE_PATH_SIZE_MAX, "%smedia_file/PSDK_0005.h264", curFileDirPath);
|
||||
}
|
||||
|
||||
//使用PSDK 开发的相机类负载设备在创建视频流处理线程后,需要先初始化线程状态并向相机类负载设备申请用于缓存视频流文件的内存空间。
|
||||
videoFilePath = osalHandler->Malloc(DJI_FILE_PATH_SIZE_MAX);
|
||||
if (videoFilePath == NULL) {
|
||||
USER_LOG_ERROR("malloc memory for video file path fail.");
|
||||
@ -1210,6 +1231,7 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
}
|
||||
memset(frameInfo, 0, VIDEO_FRAME_MAX_COUNT * sizeof(T_TestPayloadCameraVideoFrameInfo));
|
||||
|
||||
//???????????????????????????????????????????????????????????????
|
||||
returnCode = DjiPlayback_StopPlayProcess();
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("stop playback and start liveview error: 0x%08llX.", returnCode);
|
||||
@ -1225,6 +1247,7 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
continue;
|
||||
}
|
||||
|
||||
//将参数从s_mediaPlayCommandBufferHandler拷贝到playbackCommand
|
||||
bufferReadSize = UtilBuffer_Get(&s_mediaPlayCommandBufferHandler, (uint8_t *) &playbackCommand,
|
||||
sizeof(T_TestPayloadCameraPlaybackCommand));
|
||||
|
||||
@ -1233,10 +1256,10 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
continue;
|
||||
}
|
||||
|
||||
if (bufferReadSize != sizeof(T_TestPayloadCameraPlaybackCommand))
|
||||
if (bufferReadSize != sizeof(T_TestPayloadCameraPlaybackCommand))//?????????????????????????????
|
||||
goto send;
|
||||
|
||||
switch (playbackCommand.command) {
|
||||
switch (playbackCommand.command) {//测试开机后不做任何操作的情况下,这个command是什么类型
|
||||
case TEST_PAYLOAD_CAMERA_MEDIA_PLAY_COMMAND_STOP:
|
||||
snprintf(videoFilePath, DJI_FILE_PATH_SIZE_MAX, "%s", tempPath);
|
||||
startTimeMs = 0;
|
||||
@ -1259,6 +1282,9 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
}
|
||||
|
||||
// video send preprocess
|
||||
//获取视频流文件信息(2)
|
||||
//获取H.264 文件的信息
|
||||
//???????????????????????????????????????????????????????
|
||||
returnCode = DjiPlayback_VideoFileTranscode(videoFilePath, "h264", transcodedFilePath,
|
||||
DJI_FILE_PATH_SIZE_MAX);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
@ -1272,6 +1298,7 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
continue;
|
||||
}
|
||||
|
||||
//????????????????????????????????????????????????????????????????????
|
||||
returnCode = DjiPlayback_GetFrameInfoOfVideoFile(transcodedFilePath, frameInfo, VIDEO_FRAME_MAX_COUNT,
|
||||
&frameCount);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
@ -1280,7 +1307,7 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
}
|
||||
|
||||
returnCode = DjiPlayback_GetFrameNumberByTime(frameInfo, frameCount, &frameNumber,
|
||||
startTimeMs);
|
||||
startTimeMs);//获取指定时间所在帧的位置(帧号)
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("get start frame number error: 0x%08llX.", returnCode);
|
||||
continue;
|
||||
@ -1289,15 +1316,15 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
if (fpFile != NULL)
|
||||
fclose(fpFile);
|
||||
|
||||
fpFile = fopen(transcodedFilePath, "rb+");
|
||||
fpFile = fopen(transcodedFilePath, "rb+");//打开编码后的文件
|
||||
if (fpFile == NULL) {
|
||||
USER_LOG_ERROR("open video file fail.");
|
||||
USER_LOG_ERROR("open video file fail.111111111111111111111111");
|
||||
continue;
|
||||
}
|
||||
|
||||
send:
|
||||
if (fpFile == NULL) {
|
||||
USER_LOG_ERROR("open video file fail.");
|
||||
USER_LOG_ERROR("open video file fail.222222222222222222222222222");
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -1314,14 +1341,19 @@ send:
|
||||
continue;
|
||||
}
|
||||
|
||||
//????????????????????????????????
|
||||
if (mode == DJI_CAMERA_MODE_PLAYBACK && s_playbackInfo.isInPlayProcess == false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
//????????????????????????????????
|
||||
if (!USER_UTIL_IS_WORK_TURN(sendVideoStep++, frameRate, SEND_VIDEO_TASK_FREQ))
|
||||
continue;
|
||||
|
||||
frameBufSize = frameInfo[frameNumber].size;
|
||||
|
||||
//3. 解析视频流文件
|
||||
//基于PSDK 开发的相机类负载设备获取视频流文件的信息后,将解析视频流文件的内容,识别视频流文件的帧头。
|
||||
frameBufSize = frameInfo[frameNumber].size;//这行几行代码的意思:每一帧的大小不一样
|
||||
if (videoStreamType == DJI_CAMERA_VIDEO_STREAM_TYPE_H264_DJI_FORMAT) {
|
||||
frameBufSize = frameBufSize + VIDEO_FRAME_AUD_LEN;
|
||||
}
|
||||
@ -1345,11 +1377,21 @@ send:
|
||||
USER_LOG_DEBUG("read data from video file success, len = %d B\r\n", dataLength);
|
||||
}
|
||||
|
||||
//4. 发送视频流数据
|
||||
//基于PSDK 开发的相机类负载设备在解析视频流文件并识别视频流文件的帧头后,调用视频流发送接口PsdkPayloadCamera_SendVideoStream,
|
||||
//以逐帧的方式发送视频流数据。若视频流格式为DJI H264格式,需要在每一帧的最后增加AUD信息,用于标识一帧的结束。
|
||||
if (videoStreamType == DJI_CAMERA_VIDEO_STREAM_TYPE_H264_DJI_FORMAT) {
|
||||
memcpy(&dataBuffer[frameInfo[frameNumber].size], s_frameAudInfo, VIDEO_FRAME_AUD_LEN);
|
||||
dataLength = dataLength + VIDEO_FRAME_AUD_LEN;
|
||||
}
|
||||
|
||||
// for(int i=0;i<dataLength - VIDEO_FRAME_AUD_LEN;i++)
|
||||
// {
|
||||
//// printf("%c",data[i]);
|
||||
// printf("%02X ",dataBuffer[i]);
|
||||
// }
|
||||
// printf("\n");
|
||||
|
||||
lengthOfDataHaveBeenSent = 0;
|
||||
while (dataLength - lengthOfDataHaveBeenSent) {
|
||||
lengthOfDataToBeSent = USER_UTIL_MIN(DATA_SEND_FROM_VIDEO_STREAM_MAX_LEN,
|
||||
@ -1370,6 +1412,8 @@ send:
|
||||
sendVideoFlag = false;
|
||||
}
|
||||
|
||||
//5. 获取视频流状态
|
||||
//使用PSDK 开发的相机类负载设备能够获取视频流发送的实时状态,方便用户调整视频流的码率,确保图传画面的稳定显示。
|
||||
returnCode = DjiPayloadCamera_GetVideoStreamState(&videoStreamState);
|
||||
if (returnCode == DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_DEBUG(
|
||||
@ -1386,6 +1430,457 @@ free:
|
||||
}
|
||||
}
|
||||
|
||||
static void *UserCameraMedia_SendVideoTask_tc(void *arg)
|
||||
{
|
||||
int ret;
|
||||
T_DjiReturnCode returnCode;
|
||||
static uint32_t sendVideoStep = 0;
|
||||
FILE *fpFile = NULL;
|
||||
unsigned long dataLength = 0;
|
||||
uint16_t lengthOfDataToBeSent = 0;
|
||||
int lengthOfDataHaveBeenSent = 0;
|
||||
char *dataBuffer = NULL;
|
||||
T_TestPayloadCameraPlaybackCommand playbackCommand = {0};//??????????????????????
|
||||
uint16_t bufferReadSize = 0;
|
||||
char *videoFilePath = NULL;
|
||||
char *transcodedFilePath = NULL;//转换视频编码后的文件路径
|
||||
float frameRate = 1.0f;
|
||||
T_TestPayloadCameraVideoFrameInfo *frameInfo = NULL;//时长,大小,此帧在文件中的位置
|
||||
uint32_t frameNumber = 0;
|
||||
uint32_t frameCount = 0;
|
||||
uint32_t startTimeMs = 0;
|
||||
bool sendVideoFlag = true;
|
||||
bool sendOneTimeFlag = false;
|
||||
T_DjiDataChannelState videoStreamState = {0};
|
||||
E_DjiCameraMode mode = DJI_CAMERA_MODE_SHOOT_PHOTO;
|
||||
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
|
||||
uint32_t frameBufSize = 0;
|
||||
E_DjiCameraVideoStreamType videoStreamType;
|
||||
char curFileDirPath[DJI_FILE_PATH_SIZE_MAX];
|
||||
char tempPath[DJI_FILE_PATH_SIZE_MAX];
|
||||
|
||||
USER_UTIL_UNUSED(arg);
|
||||
|
||||
//获取视频流文件信息(1)
|
||||
returnCode = DjiUserUtil_GetCurrentFileDirPath(__FILE__, DJI_FILE_PATH_SIZE_MAX, curFileDirPath);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("Get file current path error, stat = 0x%08llX", returnCode);
|
||||
exit(1);
|
||||
}
|
||||
if (s_isMediaFileDirPathConfigured == true) {
|
||||
snprintf(tempPath, DJI_FILE_PATH_SIZE_MAX, "%sPSDK_0005.h264", s_mediaFileDirPath);
|
||||
} else {
|
||||
snprintf(tempPath, DJI_FILE_PATH_SIZE_MAX, "%smedia_file/PSDK_0005.h264", curFileDirPath);
|
||||
}
|
||||
|
||||
//使用PSDK 开发的相机类负载设备在创建视频流处理线程后,需要先初始化线程状态并向相机类负载设备申请用于缓存视频流文件的内存空间。
|
||||
videoFilePath = osalHandler->Malloc(DJI_FILE_PATH_SIZE_MAX);
|
||||
if (videoFilePath == NULL) {
|
||||
USER_LOG_ERROR("malloc memory for video file path fail.");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
transcodedFilePath = osalHandler->Malloc(DJI_FILE_PATH_SIZE_MAX);
|
||||
if (transcodedFilePath == NULL) {
|
||||
USER_LOG_ERROR("malloc memory for transcoded file path fail.");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
frameInfo = osalHandler->Malloc(VIDEO_FRAME_MAX_COUNT * sizeof(T_TestPayloadCameraVideoFrameInfo));
|
||||
if (frameInfo == NULL) {
|
||||
USER_LOG_ERROR("malloc memory for frame info fail.");
|
||||
exit(1);
|
||||
}
|
||||
memset(frameInfo, 0, VIDEO_FRAME_MAX_COUNT * sizeof(T_TestPayloadCameraVideoFrameInfo));
|
||||
|
||||
//???????????????????????????????????????????????????????????????
|
||||
returnCode = DjiPlayback_StopPlayProcess();
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("stop playback and start liveview error: 0x%08llX.", returnCode);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
/*tc111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111*/
|
||||
// int ret;
|
||||
int have_video = 0, have_audio = 0;
|
||||
int encode_video = 0, encode_audio = 0;
|
||||
AVDictionary *opt = NULL;
|
||||
int i;
|
||||
|
||||
/*添加摄像头******************************************************************************************************************************/
|
||||
|
||||
char *dev_name = "/dev/video10";
|
||||
|
||||
//输入文件格式
|
||||
AVFormatContext *v_ifmtCtx;
|
||||
v_ifmtCtx = avformat_alloc_context();
|
||||
|
||||
AVInputFormat *ifmt=av_find_input_format("video4linux2");
|
||||
|
||||
avdevice_register_all();
|
||||
|
||||
ret = avformat_open_input(&v_ifmtCtx, dev_name, ifmt, NULL);//根据摄像头名字(dev_name)和格式(ifmt) 打开流(摄像头,v_ifmtCtx)
|
||||
if(ret!=0)
|
||||
{
|
||||
printf("无法打开摄像头输入流:%d\n",ret);
|
||||
return -1;
|
||||
}
|
||||
// printf("摄像头输入流的封装格式为:%s\n",ifmt->name);//因为ifmt=null,所以此行代码出现段错误
|
||||
|
||||
//input video initialize
|
||||
if(avformat_find_stream_info(v_ifmtCtx,NULL)<0)//获取流(摄像头)的信息
|
||||
{
|
||||
printf("找不到流信息.\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
int videoindex = -1;
|
||||
for (i = 0; i < v_ifmtCtx->nb_streams; i++)
|
||||
{
|
||||
if (v_ifmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
|
||||
{
|
||||
videoindex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(videoindex==-1)
|
||||
{
|
||||
printf("找不到视频流。\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
//获取帧率信息
|
||||
int aa = v_ifmtCtx->streams[videoindex]->r_frame_rate.num;
|
||||
int bb = v_ifmtCtx->streams[videoindex]->r_frame_rate.den;
|
||||
double dInputFps = v_ifmtCtx->streams[videoindex]->r_frame_rate.num*1.0 / v_ifmtCtx->streams[videoindex]->r_frame_rate.den;
|
||||
printf("摄像头的帧率%d\n",(int)dInputFps);
|
||||
|
||||
|
||||
//视频流的编解码器
|
||||
AVCodecContext *pCodecCtx;
|
||||
AVCodec *pCodec;
|
||||
|
||||
pCodec=avcodec_find_decoder(v_ifmtCtx->streams[videoindex]->codecpar->codec_id);
|
||||
printf("编码器名字:%%s\n",pCodec->name);
|
||||
|
||||
if(pCodec==NULL)
|
||||
{
|
||||
printf("找不到编解码器。\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
// pCodecCtx=v_ifmtCtx->streams[videoindex]->codecpar;//老版api方式,新版放弃
|
||||
// pCodecCtx=avcodec_alloc_context3(NULL);
|
||||
pCodecCtx=avcodec_alloc_context3(pCodec);//tc
|
||||
if (pCodecCtx == NULL)
|
||||
{
|
||||
printf("Could not allocate AVCodecContext\n");
|
||||
return -1;
|
||||
}
|
||||
avcodec_parameters_to_context(pCodecCtx, v_ifmtCtx->streams[videoindex]->codecpar);
|
||||
|
||||
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
|
||||
{
|
||||
printf("无法打开编解码器。\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
AVFrame *pFrame,*pFrameYUV;
|
||||
pFrame=av_frame_alloc();
|
||||
pFrameYUV=av_frame_alloc();
|
||||
|
||||
|
||||
|
||||
unsigned char *out_buffer=(unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height,16));
|
||||
// av_image_fill_arrays((AVPicture *)pFrameYUV->data,(AVPicture *)pFrameYUV->linesize, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height,16);
|
||||
av_image_fill_arrays(pFrameYUV->data,pFrameYUV->linesize, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height,16);
|
||||
|
||||
video_width=pCodecCtx->width;
|
||||
video_height=pCodecCtx->height;
|
||||
printf("摄像头尺寸(WxH): %d x %d \n",pCodecCtx->width, pCodecCtx->height);
|
||||
|
||||
//tc:
|
||||
pFrameYUV->format = AV_PIX_FMT_YUV420P;
|
||||
pFrameYUV->width = video_width;
|
||||
pFrameYUV->height = video_height;
|
||||
|
||||
struct SwsContext *img_convert_ctx;
|
||||
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
|
||||
|
||||
IntputDev video_input={0};
|
||||
|
||||
AVPacket *in_packet=(AVPacket *)av_malloc(sizeof(AVPacket));
|
||||
video_input.img_convert_ctx=img_convert_ctx;
|
||||
video_input.in_packet=in_packet;
|
||||
video_input.pCodecCtx=pCodecCtx;
|
||||
video_input.pCodec=pCodec;
|
||||
video_input.v_ifmtCtx=v_ifmtCtx;
|
||||
video_input.videoindex=videoindex;
|
||||
video_input.pFrame=pFrame;
|
||||
video_input.pFrameYUV=pFrameYUV;
|
||||
|
||||
/*添加摄像头结束*******************************************************************************************************************************************/
|
||||
|
||||
/*添加输出信息*******************************************************************************************************************************************/
|
||||
// if (argc <= 2) {
|
||||
// fprintf(stderr, "Usage: %s <output file> <codec name>\n", argv[0]);//libx264
|
||||
// exit(0);
|
||||
// }
|
||||
const char *filename, *codec_name;
|
||||
const AVCodec *codec;
|
||||
AVCodecContext *c= NULL;
|
||||
// int i, ret, x, y;
|
||||
FILE *f;
|
||||
AVFrame *frame;
|
||||
AVPacket *pkt;
|
||||
|
||||
// filename = argv[1];
|
||||
// codec_name = argv[2];
|
||||
|
||||
filename = "delete.h264";
|
||||
codec_name = "libx264";
|
||||
|
||||
|
||||
|
||||
/* find the mpeg1video encoder */
|
||||
codec = avcodec_find_encoder_by_name(codec_name);
|
||||
if (!codec) {
|
||||
fprintf(stderr, "Codec '%s' not found\n", codec_name);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
c = avcodec_alloc_context3(codec);
|
||||
if (!c) {
|
||||
fprintf(stderr, "Could not allocate video codec context\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
pkt = av_packet_alloc();
|
||||
if (!pkt)
|
||||
exit(1);
|
||||
|
||||
c->codec_id = codec->id;
|
||||
c->bit_rate = 400000; //平均比特率,例子代码默认值是400000
|
||||
/* 分辨率必须是2的倍数。*/
|
||||
c->width=video_width;
|
||||
c->height=video_height;
|
||||
|
||||
|
||||
/*时基:这是基本的时间单位(以秒为单位)
|
||||
*表示其中的帧时间戳。 对于固定fps内容,
|
||||
*时基应为1 /framerate,时间戳增量应为
|
||||
*等于1。*/
|
||||
c->time_base = (AVRational){1, 15};
|
||||
c->framerate = (AVRational){15, 1};
|
||||
c->gop_size = 15; /* emit one intra frame every twelve frames at most */
|
||||
c->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||
|
||||
c->max_b_frames = 0;
|
||||
av_opt_set(c->priv_data, "tune", "zerolatency", 0);//解决编码延时
|
||||
av_opt_set(c->priv_data, "preset", "superfast", 0);
|
||||
|
||||
|
||||
if(c->codec_id == AV_CODEC_ID_MPEG2VIDEO)
|
||||
{
|
||||
/* 只是为了测试,我们还添加了B帧 */
|
||||
c->max_b_frames = 2;
|
||||
}
|
||||
if(c->codec_id == AV_CODEC_ID_MPEG1VIDEO)
|
||||
{
|
||||
/*需要避免使用其中一些系数溢出的宏块。
|
||||
*普通视频不会发生这种情况,因为
|
||||
*色度平面的运动与亮度平面不匹配。 */
|
||||
c->mb_decision = 2;
|
||||
}
|
||||
|
||||
/* open it */
|
||||
ret = avcodec_open2(c, codec, NULL);
|
||||
if (ret < 0) {
|
||||
fprintf(stderr, "Could not open codec: %s\n", av_err2str(ret));
|
||||
exit(1);
|
||||
}
|
||||
|
||||
AVFrame *frame_tmp;
|
||||
char *data;
|
||||
int datasize;
|
||||
clock_t start,finish;
|
||||
/*tc222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222*/
|
||||
|
||||
while (1) {
|
||||
// response playback command
|
||||
if (osalHandler->MutexLock(s_mediaPlayCommandBufferMutex) != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("mutex lock error");
|
||||
continue;
|
||||
}
|
||||
|
||||
//将参数从s_mediaPlayCommandBufferHandler拷贝到playbackCommand
|
||||
bufferReadSize = UtilBuffer_Get(&s_mediaPlayCommandBufferHandler, (uint8_t *) &playbackCommand,
|
||||
sizeof(T_TestPayloadCameraPlaybackCommand));
|
||||
|
||||
if (osalHandler->MutexUnlock(s_mediaPlayCommandBufferMutex) != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("mutex unlock error");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (bufferReadSize != sizeof(T_TestPayloadCameraPlaybackCommand))//?????????????????????????????
|
||||
goto send;
|
||||
|
||||
//(1)测试开机后不做任何操作的情况下,这个command是什么类型;(2)这个switch有啥用?
|
||||
switch (playbackCommand.command) {
|
||||
case TEST_PAYLOAD_CAMERA_MEDIA_PLAY_COMMAND_STOP:
|
||||
snprintf(videoFilePath, DJI_FILE_PATH_SIZE_MAX, "%s", tempPath);
|
||||
startTimeMs = 0;
|
||||
sendVideoFlag = true;
|
||||
sendOneTimeFlag = false;
|
||||
break;
|
||||
case TEST_PAYLOAD_CAMERA_MEDIA_PLAY_COMMAND_PAUSE:
|
||||
sendVideoFlag = false;
|
||||
goto send;
|
||||
case TEST_PAYLOAD_CAMERA_MEDIA_PLAY_COMMAND_START:
|
||||
snprintf(videoFilePath, DJI_FILE_PATH_SIZE_MAX, "%s", playbackCommand.path);
|
||||
startTimeMs = playbackCommand.timeMs;
|
||||
sendVideoFlag = true;
|
||||
sendOneTimeFlag = true;
|
||||
break;
|
||||
default:
|
||||
USER_LOG_ERROR("playback command invalid: %d.", playbackCommand.command);
|
||||
sendVideoFlag = false;
|
||||
goto send;
|
||||
}
|
||||
|
||||
// video send preprocess
|
||||
//获取视频流文件信息(2)
|
||||
//获取H.264 文件的信息
|
||||
//将输入视频文件(videoFilePath)编码为h264,并写到视频文件中(transcodedFilePath)
|
||||
returnCode = DjiPlayback_VideoFileTranscode(videoFilePath, "h264", transcodedFilePath,
|
||||
DJI_FILE_PATH_SIZE_MAX);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("transcode video file error: 0x%08llX.", returnCode);
|
||||
continue;
|
||||
}
|
||||
|
||||
returnCode = DjiPlayback_GetFrameRateOfVideoFile(transcodedFilePath, &frameRate);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("get frame rate of video error: 0x%08llX.", returnCode);
|
||||
continue;
|
||||
}
|
||||
|
||||
//获取每一帧的信息并存入frameInfo,frameCount代表此文件共有多少帧
|
||||
returnCode = DjiPlayback_GetFrameInfoOfVideoFile(transcodedFilePath, frameInfo, VIDEO_FRAME_MAX_COUNT,
|
||||
&frameCount);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("get frame info of video error: 0x%08llX.", returnCode);
|
||||
continue;
|
||||
}
|
||||
|
||||
//获取指定时间所在帧的位置(帧号)
|
||||
returnCode = DjiPlayback_GetFrameNumberByTime(frameInfo, frameCount, &frameNumber,
|
||||
startTimeMs);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("get start frame number error: 0x%08llX.", returnCode);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (fpFile != NULL)
|
||||
fclose(fpFile);
|
||||
|
||||
fpFile = fopen(transcodedFilePath, "rb+");//打开编码后的文件
|
||||
if (fpFile == NULL) {
|
||||
USER_LOG_ERROR("open video file fail.111111111111111111111111");
|
||||
continue;
|
||||
}
|
||||
|
||||
send:
|
||||
frame_tmp=get_video_frame(&video_input);
|
||||
|
||||
// encode(c, frame_tmp, pkt, &data, &datasize);
|
||||
data = encode2(c, frame_tmp, pkt, &datasize);
|
||||
|
||||
if(data == NULL)
|
||||
{
|
||||
// printf("编码后数据为空!!!!!!!!!!!!!!!!!!!!!!!!!!\n");
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
//3. 解析视频流文件
|
||||
//基于PSDK 开发的相机类负载设备获取视频流文件的信息后,将解析视频流文件的内容,识别视频流文件的帧头。
|
||||
frameBufSize = datasize;
|
||||
if (videoStreamType == DJI_CAMERA_VIDEO_STREAM_TYPE_H264_DJI_FORMAT) {
|
||||
frameBufSize = frameBufSize + VIDEO_FRAME_AUD_LEN;
|
||||
}
|
||||
|
||||
dataBuffer = calloc(frameBufSize, 1);
|
||||
if (dataBuffer == NULL) {
|
||||
USER_LOG_ERROR("malloc fail.");
|
||||
goto free;
|
||||
}
|
||||
|
||||
memcpy(dataBuffer,data,datasize);
|
||||
dataLength = datasize;
|
||||
|
||||
|
||||
|
||||
//4. 发送视频流数据
|
||||
//基于PSDK 开发的相机类负载设备在解析视频流文件并识别视频流文件的帧头后,调用视频流发送接口PsdkPayloadCamera_SendVideoStream,
|
||||
//以逐帧的方式发送视频流数据。若视频流格式为DJI H264格式,需要在每一帧的最后增加AUD信息,用于标识一帧的结束。
|
||||
if (videoStreamType == DJI_CAMERA_VIDEO_STREAM_TYPE_H264_DJI_FORMAT) {
|
||||
memcpy(&dataBuffer[datasize], s_frameAudInfo, VIDEO_FRAME_AUD_LEN);
|
||||
dataLength = dataLength + VIDEO_FRAME_AUD_LEN;
|
||||
}
|
||||
|
||||
//tc:将视频帧打印出来
|
||||
// char *deletestr = calloc(dataLength+5,1);
|
||||
// memcpy(deletestr, data, datasize);
|
||||
// printf("正在显示的视频帧:%s\n", deletestr);
|
||||
// free(deletestr);
|
||||
//
|
||||
// for(i=0;i<datasize;i++)
|
||||
// {
|
||||
//// printf("%c",data[i]);
|
||||
// printf("%02X ",data[i]);
|
||||
// }
|
||||
// printf("\n");
|
||||
|
||||
start = clock(); // 设置开始clock
|
||||
|
||||
lengthOfDataHaveBeenSent = 0;
|
||||
while (dataLength - lengthOfDataHaveBeenSent) {
|
||||
lengthOfDataToBeSent = USER_UTIL_MIN(DATA_SEND_FROM_VIDEO_STREAM_MAX_LEN,
|
||||
dataLength - lengthOfDataHaveBeenSent);
|
||||
returnCode = DjiPayloadCamera_SendVideoStream((const uint8_t *) dataBuffer + lengthOfDataHaveBeenSent,
|
||||
lengthOfDataToBeSent);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("send video stream error: 0x%08llX.", returnCode);
|
||||
}
|
||||
lengthOfDataHaveBeenSent += lengthOfDataToBeSent;
|
||||
}
|
||||
|
||||
finish = clock();// 设置结束clock
|
||||
double duration = (double)(finish - start) / CLOCKS_PER_SEC;//转换浮点型
|
||||
// printf( "发送视频帧时间:%lf seconds\n", duration );
|
||||
|
||||
//5. 获取视频流状态
|
||||
//使用PSDK 开发的相机类负载设备能够获取视频流发送的实时状态,方便用户调整视频流的码率,确保图传画面的稳定显示。
|
||||
returnCode = DjiPayloadCamera_GetVideoStreamState(&videoStreamState);
|
||||
if (returnCode == DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_DEBUG(
|
||||
"video stream state: realtimeBandwidthLimit: %d, realtimeBandwidthBeforeFlowController: %d, realtimeBandwidthAfterFlowController:%d busyState: %d.",
|
||||
videoStreamState.realtimeBandwidthLimit, videoStreamState.realtimeBandwidthBeforeFlowController,
|
||||
videoStreamState.realtimeBandwidthAfterFlowController,
|
||||
videoStreamState.busyState);
|
||||
} else {
|
||||
USER_LOG_ERROR("get video stream state error.");
|
||||
}
|
||||
|
||||
free:
|
||||
free(dataBuffer);
|
||||
free(data);
|
||||
}
|
||||
}
|
||||
|
||||
#ifndef __CC_ARM
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
@ -31,6 +31,8 @@
|
||||
#include "dji_typedef.h"
|
||||
#include "dji_payload_camera.h"
|
||||
|
||||
#include "ffmpeg_tc.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
Reference in New Issue
Block a user