通过udp接收h264编码后的数据,然后通过大疆的函数DjiPayloadCamera_SendVideoStream进行推流;
This commit is contained in:
@ -42,6 +42,7 @@
|
||||
#define VIDEO_FRAME_MAX_COUNT 18000 // max video duration 10 minutes
|
||||
#define VIDEO_FRAME_AUD_LEN 6
|
||||
#define DATA_SEND_FROM_VIDEO_STREAM_MAX_LEN 60000
|
||||
#define MAXDATASIZE 100000
|
||||
|
||||
/* Private types -------------------------------------------------------------*/
|
||||
typedef enum {
|
||||
@ -123,6 +124,7 @@ static T_DjiReturnCode StopDownloadNotification(void);
|
||||
|
||||
_Noreturn static void *UserCameraMedia_SendVideoTask(void *arg);
|
||||
static void *UserCameraMedia_SendVideoTask_tc(void *arg);
|
||||
static void *UserCameraMedia_SendVideoTask_ximea(void *arg);
|
||||
|
||||
/* Private variables -------------------------------------------------------------*/
|
||||
static T_DjiCameraMediaDownloadPlaybackHandler s_psdkCameraMedia = {0};//控制相机类负载设备执行媒体文件下载回放功能
|
||||
@ -212,14 +214,24 @@ T_DjiReturnCode DjiTest_CameraEmuMediaStartService(void)
|
||||
}
|
||||
|
||||
// 创建线程执行用户自定义函数
|
||||
// if (DjiPlatform_GetHalNetworkHandler() != NULL || DjiPlatform_GetHalUsbBulkHandler() != NULL) {
|
||||
// returnCode = osalHandler->TaskCreate("user_camera_media_task", UserCameraMedia_SendVideoTask_tc, 2048,
|
||||
// NULL, &s_userSendVideoThread);
|
||||
// if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
// USER_LOG_ERROR("user send video task create error.");
|
||||
// return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
|
||||
// }
|
||||
// }
|
||||
if (DjiPlatform_GetHalNetworkHandler() != NULL || DjiPlatform_GetHalUsbBulkHandler() != NULL) {
|
||||
returnCode = osalHandler->TaskCreate("user_camera_media_task", UserCameraMedia_SendVideoTask_ximea, 2048,
|
||||
NULL, &s_userSendVideoThread);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("user send video task create error.");
|
||||
return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
|
||||
}
|
||||
}
|
||||
|
||||
char ipAddr[100];
|
||||
uint16_t port;
|
||||
returnCode = DjiPayloadCamera_GetVideoStreamRemoteAddress(ipAddr, &port);
|
||||
if (returnCode == DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_INFO("Get video stream remote address-------------------------------------: %s, port: %d", ipAddr, port);
|
||||
} else {
|
||||
USER_LOG_INFO("get video stream remote address error------------------------------------.");
|
||||
return DJI_ERROR_SYSTEM_MODULE_CODE_UNKNOWN;
|
||||
}
|
||||
|
||||
return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
|
||||
}
|
||||
@ -559,7 +571,10 @@ static T_DjiReturnCode DjiPlayback_VideoFileTranscode(const char *inPath, const
|
||||
snprintf(ffmpegCmdStr, FFMPEG_CMD_BUF_SIZE,
|
||||
"echo \"y\" | ffmpeg -i \"%s\" -codec copy -f \"%s\" \"%s\" 1>/dev/null 2>&1", inPath,
|
||||
outFormat, outPath);
|
||||
|
||||
// echo "y" | ffmpeg -i "/home/300tc/projects_source/Payload-SDK/samples/sample_c/module_sample/camera_emu/media_file/PSDK_0005.h264" -codec copy -f "h264" "/home/300tc/projects_source/Payload-SDK/samples/sample_c/module_sample/camera_emu/media_file/out.h264" 1>/dev/null 2>&1
|
||||
// printf(ffmpegCmdStr);
|
||||
|
||||
fpCommand = popen(ffmpegCmdStr, "r");
|
||||
if (fpCommand == NULL) {
|
||||
USER_LOG_ERROR("execute transcode command fail.");
|
||||
@ -1178,7 +1193,7 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
uint16_t lengthOfDataToBeSent = 0;
|
||||
int lengthOfDataHaveBeenSent = 0;
|
||||
char *dataBuffer = NULL;
|
||||
T_TestPayloadCameraPlaybackCommand playbackCommand = {0};//??????????????????????
|
||||
T_TestPayloadCameraPlaybackCommand playbackCommand = {0};//取出将要执行的命令,放入此变量
|
||||
uint16_t bufferReadSize = 0;
|
||||
char *videoFilePath = NULL;
|
||||
char *transcodedFilePath = NULL;//转换视频编码后的文件路径
|
||||
@ -1238,6 +1253,8 @@ static void *UserCameraMedia_SendVideoTask(void *arg)
|
||||
exit(1);
|
||||
}
|
||||
|
||||
int frameNumberSendCounter=0;
|
||||
|
||||
while (1) {
|
||||
osalHandler->TaskSleepMs(1000 / SEND_VIDEO_TASK_FREQ);
|
||||
|
||||
@ -1354,6 +1371,8 @@ send:
|
||||
//3. 解析视频流文件
|
||||
//基于PSDK 开发的相机类负载设备获取视频流文件的信息后,将解析视频流文件的内容,识别视频流文件的帧头。
|
||||
frameBufSize = frameInfo[frameNumber].size;//这行几行代码的意思:每一帧的大小不一样
|
||||
printf("第 %d 帧大小:%d\n", frameNumberSendCounter, frameBufSize);
|
||||
frameNumberSendCounter++;
|
||||
if (videoStreamType == DJI_CAMERA_VIDEO_STREAM_TYPE_H264_DJI_FORMAT) {
|
||||
frameBufSize = frameBufSize + VIDEO_FRAME_AUD_LEN;
|
||||
}
|
||||
@ -1554,7 +1573,7 @@ static void *UserCameraMedia_SendVideoTask_tc(void *arg)
|
||||
int aa = v_ifmtCtx->streams[videoindex]->r_frame_rate.num;
|
||||
int bb = v_ifmtCtx->streams[videoindex]->r_frame_rate.den;
|
||||
double dInputFps = v_ifmtCtx->streams[videoindex]->r_frame_rate.num*1.0 / v_ifmtCtx->streams[videoindex]->r_frame_rate.den;
|
||||
printf("摄像头的帧率%d\n",(int)dInputFps);
|
||||
printf("摄像头的帧率: %d\n",(int)dInputFps);
|
||||
|
||||
|
||||
//视频流的编解码器
|
||||
@ -1562,7 +1581,7 @@ static void *UserCameraMedia_SendVideoTask_tc(void *arg)
|
||||
AVCodec *pCodec;
|
||||
|
||||
pCodec=avcodec_find_decoder(v_ifmtCtx->streams[videoindex]->codecpar->codec_id);
|
||||
printf("编码器名字:%%s\n",pCodec->name);
|
||||
printf("编码器名字:%s\n",pCodec->name);
|
||||
|
||||
if(pCodec==NULL)
|
||||
{
|
||||
@ -1808,6 +1827,7 @@ static void *UserCameraMedia_SendVideoTask_tc(void *arg)
|
||||
//3. 解析视频流文件
|
||||
//基于PSDK 开发的相机类负载设备获取视频流文件的信息后,将解析视频流文件的内容,识别视频流文件的帧头。
|
||||
frameBufSize = datasize;
|
||||
videoStreamType = DJI_CAMERA_VIDEO_STREAM_TYPE_H264_DJI_FORMAT;
|
||||
if (videoStreamType == DJI_CAMERA_VIDEO_STREAM_TYPE_H264_DJI_FORMAT) {
|
||||
frameBufSize = frameBufSize + VIDEO_FRAME_AUD_LEN;
|
||||
}
|
||||
@ -1852,6 +1872,7 @@ static void *UserCameraMedia_SendVideoTask_tc(void *arg)
|
||||
dataLength - lengthOfDataHaveBeenSent);
|
||||
returnCode = DjiPayloadCamera_SendVideoStream((const uint8_t *) dataBuffer + lengthOfDataHaveBeenSent,
|
||||
lengthOfDataToBeSent);
|
||||
printf("fasong: %d!!!!\n", lengthOfDataToBeSent);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("send video stream error: 0x%08llX.", returnCode);
|
||||
}
|
||||
@ -1881,6 +1902,186 @@ static void *UserCameraMedia_SendVideoTask_tc(void *arg)
|
||||
}
|
||||
}
|
||||
|
||||
static void *UserCameraMedia_SendVideoTask_ximea(void *arg)
|
||||
{
|
||||
int ret;
|
||||
T_DjiReturnCode returnCode;
|
||||
static uint32_t sendVideoStep = 0;
|
||||
FILE *fpFile = NULL;
|
||||
unsigned long dataLength = 0;
|
||||
uint16_t lengthOfDataToBeSent = 0;
|
||||
int lengthOfDataHaveBeenSent = 0;
|
||||
char *dataBuffer = NULL;
|
||||
T_TestPayloadCameraPlaybackCommand playbackCommand = {0};//取出将要执行的命令,放入此变量
|
||||
uint16_t bufferReadSize = 0;
|
||||
char *videoFilePath = NULL;
|
||||
char *transcodedFilePath = NULL;//转换视频编码后的文件路径
|
||||
float frameRate = 1.0f;
|
||||
T_TestPayloadCameraVideoFrameInfo *frameInfo = NULL;//时长,大小,此帧在文件中的位置
|
||||
uint32_t frameNumber = 0;
|
||||
uint32_t frameCount = 0;
|
||||
uint32_t startTimeMs = 0;
|
||||
bool sendVideoFlag = true;
|
||||
bool sendOneTimeFlag = false;
|
||||
T_DjiDataChannelState videoStreamState = {0};
|
||||
E_DjiCameraMode mode = DJI_CAMERA_MODE_SHOOT_PHOTO;
|
||||
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
|
||||
uint32_t frameBufSize = 0;
|
||||
E_DjiCameraVideoStreamType videoStreamType;
|
||||
char curFileDirPath[DJI_FILE_PATH_SIZE_MAX];
|
||||
char tempPath[DJI_FILE_PATH_SIZE_MAX];
|
||||
|
||||
USER_UTIL_UNUSED(arg);
|
||||
|
||||
//获取视频流文件信息(1)
|
||||
returnCode = DjiUserUtil_GetCurrentFileDirPath(__FILE__, DJI_FILE_PATH_SIZE_MAX, curFileDirPath);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("Get file current path error, stat = 0x%08llX", returnCode);
|
||||
exit(1);
|
||||
}
|
||||
if (s_isMediaFileDirPathConfigured == true) {
|
||||
snprintf(tempPath, DJI_FILE_PATH_SIZE_MAX, "%sPSDK_0005.h264", s_mediaFileDirPath);
|
||||
} else {
|
||||
snprintf(tempPath, DJI_FILE_PATH_SIZE_MAX, "%smedia_file/PSDK_0005.h264", curFileDirPath);
|
||||
}
|
||||
|
||||
//使用PSDK 开发的相机类负载设备在创建视频流处理线程后,需要先初始化线程状态并向相机类负载设备申请用于缓存视频流文件的内存空间。
|
||||
videoFilePath = osalHandler->Malloc(DJI_FILE_PATH_SIZE_MAX);
|
||||
if (videoFilePath == NULL) {
|
||||
USER_LOG_ERROR("malloc memory for video file path fail.");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
transcodedFilePath = osalHandler->Malloc(DJI_FILE_PATH_SIZE_MAX);
|
||||
if (transcodedFilePath == NULL) {
|
||||
USER_LOG_ERROR("malloc memory for transcoded file path fail.");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
frameInfo = osalHandler->Malloc(VIDEO_FRAME_MAX_COUNT * sizeof(T_TestPayloadCameraVideoFrameInfo));
|
||||
if (frameInfo == NULL) {
|
||||
USER_LOG_ERROR("malloc memory for frame info fail.");
|
||||
exit(1);
|
||||
}
|
||||
memset(frameInfo, 0, VIDEO_FRAME_MAX_COUNT * sizeof(T_TestPayloadCameraVideoFrameInfo));
|
||||
|
||||
//???????????????????????????????????????????????????????????????
|
||||
returnCode = DjiPlayback_StopPlayProcess();
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_ERROR("stop playback and start liveview error: 0x%08llX.", returnCode);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
//tc+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
int sockfd;
|
||||
struct sockaddr_in server;
|
||||
struct sockaddr_in client;
|
||||
socklen_t addrlen;
|
||||
int num;
|
||||
char buf[MAXDATASIZE];
|
||||
|
||||
if ((sockfd = socket(AF_INET, SOCK_DGRAM, 0)) == -1) {
|
||||
perror("Creatingsocket failed.");
|
||||
}
|
||||
|
||||
bzero(&server, sizeof(server));
|
||||
server.sin_family = AF_INET;
|
||||
server.sin_port = htons(PORT);
|
||||
server.sin_addr.s_addr = htonl(INADDR_ANY);
|
||||
if (bind(sockfd, (struct sockaddr *) &server, sizeof(server)) == -1)
|
||||
{
|
||||
perror("Bind() error.");
|
||||
}
|
||||
|
||||
int udpReceivedCounter=0;
|
||||
//tc-------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
while (1)
|
||||
{
|
||||
send:
|
||||
num = recvfrom(sockfd, buf, MAXDATASIZE, 0, (struct sockaddr *) &client, &addrlen);
|
||||
if (num < 0) {
|
||||
perror("recvfrom() error\n");
|
||||
continue;
|
||||
}
|
||||
|
||||
// printf("You got a message (%d bytes) from client. \nId is %d\n", num, udpReceivedCounter);
|
||||
udpReceivedCounter++;
|
||||
|
||||
//3. 解析视频流文件
|
||||
//基于PSDK 开发的相机类负载设备获取视频流文件的信息后,将解析视频流文件的内容,识别视频流文件的帧头。
|
||||
videoStreamType = DJI_CAMERA_VIDEO_STREAM_TYPE_H264_DJI_FORMAT;
|
||||
frameBufSize = num;//这行几行代码的意思:每一帧的大小不一样
|
||||
if (videoStreamType == DJI_CAMERA_VIDEO_STREAM_TYPE_H264_DJI_FORMAT) {
|
||||
frameBufSize = frameBufSize + VIDEO_FRAME_AUD_LEN;
|
||||
}
|
||||
|
||||
dataBuffer = calloc(frameBufSize, 1);
|
||||
if (dataBuffer == NULL) {
|
||||
USER_LOG_ERROR("malloc fail.");
|
||||
goto free;
|
||||
}
|
||||
|
||||
memcpy(dataBuffer,buf,num);
|
||||
dataLength = num;
|
||||
|
||||
|
||||
|
||||
//4. 发送视频流数据
|
||||
//基于PSDK 开发的相机类负载设备在解析视频流文件并识别视频流文件的帧头后,调用视频流发送接口PsdkPayloadCamera_SendVideoStream,
|
||||
//以逐帧的方式发送视频流数据。若视频流格式为DJI H264格式,需要在每一帧的最后增加AUD信息,用于标识一帧的结束。
|
||||
if (videoStreamType == DJI_CAMERA_VIDEO_STREAM_TYPE_H264_DJI_FORMAT) {
|
||||
memcpy(&dataBuffer[num], s_frameAudInfo, VIDEO_FRAME_AUD_LEN);
|
||||
dataLength = dataLength + VIDEO_FRAME_AUD_LEN;
|
||||
}
|
||||
|
||||
// for(int i=0;i<dataLength - VIDEO_FRAME_AUD_LEN;i++)
|
||||
// {
|
||||
//// printf("%c",data[i]);
|
||||
// printf("%02X ",dataBuffer[i]);
|
||||
// }
|
||||
// printf("\n");
|
||||
|
||||
lengthOfDataHaveBeenSent = 0;
|
||||
while (dataLength - lengthOfDataHaveBeenSent)
|
||||
{
|
||||
lengthOfDataToBeSent = USER_UTIL_MIN(DATA_SEND_FROM_VIDEO_STREAM_MAX_LEN,
|
||||
dataLength - lengthOfDataHaveBeenSent);
|
||||
returnCode = DjiPayloadCamera_SendVideoStream((const uint8_t *) dataBuffer + lengthOfDataHaveBeenSent,
|
||||
lengthOfDataToBeSent);
|
||||
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
|
||||
{
|
||||
USER_LOG_ERROR("send video stream error: 0x%08llX.", returnCode);
|
||||
}
|
||||
lengthOfDataHaveBeenSent += lengthOfDataToBeSent;
|
||||
}
|
||||
|
||||
if ((frameNumber++) >= frameCount) {
|
||||
USER_LOG_DEBUG("reach file tail.");
|
||||
frameNumber = 0;
|
||||
|
||||
if (sendOneTimeFlag == true)
|
||||
sendVideoFlag = false;
|
||||
}
|
||||
|
||||
//5. 获取视频流状态
|
||||
//使用PSDK 开发的相机类负载设备能够获取视频流发送的实时状态,方便用户调整视频流的码率,确保图传画面的稳定显示。
|
||||
returnCode = DjiPayloadCamera_GetVideoStreamState(&videoStreamState);
|
||||
if (returnCode == DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
|
||||
USER_LOG_DEBUG(
|
||||
"video stream state: realtimeBandwidthLimit: %d, realtimeBandwidthBeforeFlowController: %d, realtimeBandwidthAfterFlowController:%d busyState: %d.",
|
||||
videoStreamState.realtimeBandwidthLimit, videoStreamState.realtimeBandwidthBeforeFlowController,
|
||||
videoStreamState.realtimeBandwidthAfterFlowController,
|
||||
videoStreamState.busyState);
|
||||
} else {
|
||||
USER_LOG_ERROR("get video stream state error.");
|
||||
}
|
||||
|
||||
free:
|
||||
free(dataBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
#ifndef __CC_ARM
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
@ -33,6 +33,17 @@
|
||||
|
||||
#include "ffmpeg_tc.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
#include <string.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <netinet/in.h>
|
||||
#include <netdb.h>
|
||||
|
||||
#define PORT 666
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
Reference in New Issue
Block a user