NEW: release DJI Payload-SDK version 3.12.0

This commit is contained in:
DJI
2025-06-27 22:36:34 +08:00
parent 54b9f6c6c1
commit 326b8698dd
381 changed files with 122574 additions and 451 deletions

View File

@ -109,18 +109,10 @@ void DjiUser_RunCameraManagerSample(void)
<< "| Available position: |"
<<
endl;
cout
<< "| [1] Select gimbal mount position at NO.1 payload port |"
<<
endl;
cout
<< "| [2] Select gimbal mount position at NO.2 payload port |"
<<
endl;
cout
<< "| [3] Select gimbal mount position at NO.3 payload port |"
<<
endl;
std::cout
<< "| [1 ~ 4] Select camera mount position at NO.1~NO.4 |"
<<
std::endl;
cout
<< "| [q] Quit |"
<<
@ -134,7 +126,7 @@ void DjiUser_RunCameraManagerSample(void)
posNum = atoi(mountPositionStr.c_str());
if (posNum > 3 || posNum < 1) {
if (posNum > 4 || posNum < 1) {
USER_LOG_ERROR("Input mount position is invalid");
continue;
} else {

View File

@ -84,6 +84,7 @@ static T_DjiFcSubscriptionControlDevice DjiUser_FlightControlGetValueOfControlDe
static T_DjiFcSubscriptionSingleBatteryInfo DjiUser_FlightControlGetValueOfBattery1(void);
static T_DjiFcSubscriptionSingleBatteryInfo DjiUser_FlightControlGetValueOfBattery2(void);
static T_DjiReturnCode DjiUser_FlightControlUpdateConfig(void);
static T_DjiReturnCode DjiUser_ShowCommandFlyingMenu(void);
/* Exported functions definition ---------------------------------------------*/
void DjiUser_RunFlightControllerCommandFlyingSample(void)
@ -107,7 +108,9 @@ void DjiUser_RunFlightControllerCommandFlyingSample(void)
return;
}
osalHandler->TaskSleepMs(1000);
osalHandler->TaskSleepMs(3000);
DjiUser_ShowCommandFlyingMenu();
while (1) {
osalHandler->TaskSleepMs(1);
@ -119,7 +122,6 @@ void DjiUser_RunFlightControllerCommandFlyingSample(void)
s_flyingCommand.z = 0;
s_flyingCommand.yaw = 0;
s_inputFlag = 0;
USER_LOG_INFO(" - Front\r\n");
break;
case 'S':
case 's':
@ -128,7 +130,6 @@ void DjiUser_RunFlightControllerCommandFlyingSample(void)
s_flyingCommand.z = 0;
s_flyingCommand.yaw = 0;
s_inputFlag = 0;
USER_LOG_INFO(" - Near\r\n");
break;
case 'A':
case 'a':
@ -137,7 +138,6 @@ void DjiUser_RunFlightControllerCommandFlyingSample(void)
s_flyingCommand.z = 0;
s_flyingCommand.yaw = 0;
s_inputFlag = 0;
USER_LOG_INFO(" - Left\r\n");
break;
case 'D':
case 'd':
@ -146,7 +146,6 @@ void DjiUser_RunFlightControllerCommandFlyingSample(void)
s_flyingCommand.z = 0;
s_flyingCommand.yaw = 0;
s_inputFlag = 0;
USER_LOG_INFO(" - Right\r\n");
break;
case 'Q':
case 'q':
@ -155,7 +154,6 @@ void DjiUser_RunFlightControllerCommandFlyingSample(void)
s_flyingCommand.z = s_flyingSpeed;
s_flyingCommand.yaw = 0;
s_inputFlag = 0;
USER_LOG_INFO(" - Up\r\n");
break;
case 'E':
case 'e':
@ -164,7 +162,6 @@ void DjiUser_RunFlightControllerCommandFlyingSample(void)
s_flyingCommand.z = -s_flyingSpeed;
s_flyingCommand.yaw = 0;
s_inputFlag = 0;
USER_LOG_INFO(" - Down\r\n");
break;
case 'Z':
case 'z':
@ -173,7 +170,6 @@ void DjiUser_RunFlightControllerCommandFlyingSample(void)
s_flyingCommand.z = 0;
s_flyingCommand.yaw = -30;
s_inputFlag = 0;
USER_LOG_INFO(" - Yaw--\r\n");
break;
case 'C':
case 'c':
@ -182,96 +178,112 @@ void DjiUser_RunFlightControllerCommandFlyingSample(void)
s_flyingCommand.z = 0;
s_flyingCommand.yaw = 30;
s_inputFlag = 0;
USER_LOG_INFO(" - Yaw++\r\n");
break;
case 'R':
case 'r':
DjiFlightController_ObtainJoystickCtrlAuthority();
DjiFlightController_StartTakeoff();
USER_LOG_INFO(" - Take off\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'F':
case 'f':
DjiFlightController_StartForceLanding();
USER_LOG_INFO(" - Force landing\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'H':
case 'h':
DjiFlightController_StartGoHome();
USER_LOG_INFO(" - Start go home\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'Y':
case 'y':
DjiFlightController_CancelGoHome();
USER_LOG_INFO(" - Cancel go home\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'G':
case 'g':
DjiFlightController_StartLanding();
USER_LOG_INFO(" - Start landing\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'T':
case 't':
DjiFlightController_CancelLanding();
USER_LOG_INFO(" - Cancel landing\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'V':
case 'v':
DjiFlightController_StartConfirmLanding();
USER_LOG_INFO(" - Confirm landing\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'X':
case 'x':
s_homeLocation.longitude = (dji_f64_t) s_gpsPosition.x / 10000000;
s_homeLocation.latitude = (dji_f64_t) s_gpsPosition.y / 10000000;
DjiFlightController_SetHomeLocationUsingCurrentAircraftLocation();
USER_LOG_INFO(" - Set home location\r\n");
USER_LOG_INFO(" - Set home location (%.4f, %.4f)\r\n", s_homeLocation.longitude, s_homeLocation.latitude);
DjiUser_ShowCommandFlyingMenu();
break;
case 'P':
case 'p':
DjiFlightController_EmergencyStopMotor(DJI_FLIGHT_CONTROLLER_ENABLE_EMERGENCY_STOP_MOTOR,
(char *) "Test is ok");
USER_LOG_INFO(" - Emergency stop motor\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'B':
case 'b':
DjiFlightController_TurnOnMotors();
USER_LOG_INFO(" - Turn on motors\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'N':
case 'n':
DjiFlightController_TurnOffMotors();
USER_LOG_INFO(" - Turn off motors\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'J':
case 'j':
DjiUser_FlightControlUpdateConfig();
USER_LOG_INFO(" - Update config\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'I':
case 'i':
DjiFlightController_ArrestFlying();
USER_LOG_INFO(" - Enable arrest flying\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'O':
case 'o':
DjiFlightController_CancelArrestFlying();
USER_LOG_INFO(" - Disable arrest flying\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'K':
case 'k':
DjiFlightController_ExecuteEmergencyBrakeAction();
USER_LOG_INFO(" - Brake\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'L':
case 'l':
DjiFlightController_CancelEmergencyBrakeAction();
USER_LOG_INFO(" - Disable Brake\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
case 'M':
case 'm':
DjiFlightController_ObtainJoystickCtrlAuthority();
USER_LOG_INFO(" - Obtain joystick ctrl authority\r\n");
DjiUser_ShowCommandFlyingMenu();
break;
}
}
@ -1086,5 +1098,22 @@ static T_DjiReturnCode DjiUser_FlightControlUpdateConfig(void)
return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
#endif
}
static T_DjiReturnCode DjiUser_ShowCommandFlyingMenu(void)
{
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
osalHandler->TaskSleepMs(2);
USER_LOG_INFO("Usage: [W] Front [A]-Left, [S]-Rear, [D]-Right");
USER_LOG_INFO("Usage: [Q]-Up [E]-Down, [Z]-Yaw--, [C]-Yaw++");
USER_LOG_INFO("Usage: [R]-Take off [F]-Force landing, [H]-Start go home, [Y]-Cancel go home");
USER_LOG_INFO("Usage: [G]-Start landing [T]-Cancel landing, [V]-Confirm landing, [X]-Set home location");
USER_LOG_INFO("Usage: [P]-Emergency stop motor, [B]-Turn on motors, [N]-Turn off motors, [J]-Update config");
USER_LOG_INFO("Usage: [I]-Enable arrest flying, [O]-Disable arrest flying, [K]-Emergency brake, [L]-Diasble Brake");
USER_LOG_INFO("Usage: [M]-Obtain joystick ctrl authority");
osalHandler->TaskSleepMs(2);
return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/****************** (C) COPYRIGHT DJI Innovations *****END OF FILE****/

View File

@ -63,6 +63,8 @@ start:
<< "| [8] Waypoint 3.0 sample - run airline mission by kmz file (not support on M300 RTK) |\n"
<< "| [9] Interest point sample - run interest point mission by settings (only support on M3E/M3T) |\n"
<< "| [a] EU-C6 FTS trigger sample - receive fts callback to trigger parachute function (only support on M3D/M3DT) |\n"
<< "| [b] Slow rotate blade sample, only support on M400 |\n"
<< "| [c] Select FTS pwm trigger position, only support on M4/M4T/M4D/M4TD |\n"
<< std::endl;
std::cin >> inputSelectSample;
@ -100,6 +102,12 @@ start:
case 'a':
DjiTest_FlightControlRunSample(E_DJI_TEST_FLIGHT_CTRL_SAMPLE_SELECT_FTS_TRIGGER);
break;
case 'b':
DjiTest_FlightControlRunSample(E_DJI_TEST_FLIGHT_CTRL_SAMPLE_SELECT_SLOW_ROTATE_BLADE);
break;
case 'c':
DjiTest_FlightControlFtsPwmTriggerSample();
break;
case 'q':
break;
default:

View File

@ -89,15 +89,7 @@ start:
<<
std::endl;
std::cout
<< "| [1] Select gimbal mount position at NO.1 payload port |"
<<
std::endl;
std::cout
<< "| [2] Select gimbal mount position at NO.2 payload port |"
<<
std::endl;
std::cout
<< "| [3] Select gimbal mount position at NO.3 payload port |"
<< "| [1 ~ 4] Select gimbal mount position at NO.1~NO.4 |"
<<
std::endl;
std::cout
@ -110,7 +102,7 @@ start:
return;
}
if (mountPosition > '3' || mountPosition < '1') {
if (mountPosition > '4' || mountPosition < '1') {
USER_LOG_ERROR("Input mount position is invalid");
goto start;
}
@ -247,6 +239,7 @@ start:
T_DjiGimbalManagerRotation rotation;
T_DjiAircraftInfoBaseInfo baseInfo;
E_DjiAircraftSeries aircraftSeries;
E_DjiFcSubscriptionTopic topicOfPayloadGimablAngle;
returnCode = DjiGimbalManager_Init();
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
@ -288,12 +281,24 @@ start:
}
else if (aircraftSeries == DJI_AIRCRAFT_SERIES_M30 || aircraftSeries == DJI_AIRCRAFT_SERIES_M3 ||
aircraftSeries == DJI_AIRCRAFT_SERIES_M3D) {
topicOfPayloadGimablAngle = DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES;
returnCode = DjiFcSubscription_SubscribeTopic(DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES, DJI_DATA_SUBSCRIPTION_TOPIC_50_HZ, NULL);
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
USER_LOG_ERROR("DjiFcSubscription_SubscribeTopic %d return %d", DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES, returnCode);
goto end;
}
USER_LOG_INFO("Subscribe topic DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES succefully.");
} else if (aircraftSeries == DJI_AIRCRAFT_SERIES_M400) {
topicOfPayloadGimablAngle = gimbalMountPosition == 1 ? DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES_ON_POS_NO1 :
gimbalMountPosition == 2 ? DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES_ON_POS_NO2 :
gimbalMountPosition == 3 ? DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES_ON_POS_NO3 :
gimbalMountPosition == 4 ? DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES_ON_POS_NO4 : DJI_FC_SUBSCRIPTION_TOPIC_TOTAL_NUMBER;
returnCode = DjiFcSubscription_SubscribeTopic(topicOfPayloadGimablAngle, DJI_DATA_SUBSCRIPTION_TOPIC_50_HZ, NULL);
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
USER_LOG_ERROR("DjiFcSubscription_SubscribeTopic %d return %d", DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES, returnCode);
goto end;
}
USER_LOG_INFO("Subscribe topic DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES_ON_POS_NO%d succefully.", gimbalMountPosition);
}
@ -403,8 +408,8 @@ start:
threeGimbalData.anglesData[gimbalMountPosition - 1].yaw);
}
else if (aircraftSeries == DJI_AIRCRAFT_SERIES_M30 || aircraftSeries == DJI_AIRCRAFT_SERIES_M3 ||
aircraftSeries == DJI_AIRCRAFT_SERIES_M3D) {
returnCode = DjiFcSubscription_GetLatestValueOfTopic(DJI_FC_SUBSCRIPTION_TOPIC_GIMBAL_ANGLES,
aircraftSeries == DJI_AIRCRAFT_SERIES_M3D || aircraftSeries == DJI_AIRCRAFT_SERIES_M400) {
returnCode = DjiFcSubscription_GetLatestValueOfTopic(topicOfPayloadGimablAngle,
(uint8_t *) &gimbalAngles,
sizeof(T_DjiFcSubscriptionGimbalAngles),
&timestamp);

View File

@ -28,6 +28,7 @@
#include <iostream>
#include "dji_logger.h"
#include "hms/test_hms.h"
#include "dji_hms_customization.h"
/* Private constants ---------------------------------------------------------*/
@ -77,6 +78,38 @@ start:
}
}
void DjiUser_RunHmsEnhanceSample(void)
{
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
T_DjiHmsAlarmEnhancedSetting setting;
USER_LOG_INFO("shake motor times 3, interval 500ms...");
setting.type = DJI_HMS_ALARM_ENHANCED_TYPE_SHAKE_MOTOR;
setting.times = 3;
setting.interval = 500;
DjiHmsCustomization_AlarmEnhancedCtrl(DJI_HMS_ALARM_ENHANCED_ACTION_START, setting);
osalHandler->TaskSleepMs(4000);
USER_LOG_INFO("play sound times 3, interval 500ms...");
setting.type = DJI_HMS_ALARM_ENHANCED_PLAY_SOUND;
DjiHmsCustomization_AlarmEnhancedCtrl(DJI_HMS_ALARM_ENHANCED_ACTION_START, setting);
osalHandler->TaskSleepMs(4000);
USER_LOG_INFO("shake motor and play sound times 3, interval 500ms...");
setting.times = 3;
setting.type = DJI_HMS_ALARM_ENHANCED_PLAY_SOUND_AND_SHAKE_MOTOR;
DjiHmsCustomization_AlarmEnhancedCtrl(DJI_HMS_ALARM_ENHANCED_ACTION_START, setting);
osalHandler->TaskSleepMs(4000);
USER_LOG_INFO("shake motor and play sound times 20, interval 500ms, interrupt 3s exit...");
setting.times = 20;
setting.type = DJI_HMS_ALARM_ENHANCED_PLAY_SOUND_AND_SHAKE_MOTOR;
DjiHmsCustomization_AlarmEnhancedCtrl(DJI_HMS_ALARM_ENHANCED_ACTION_START, setting);
osalHandler->TaskSleepMs(4000);
DjiHmsCustomization_AlarmEnhancedCtrl(DJI_HMS_ALARM_ENHANCED_ACTION_EXIT_ALL, setting);
USER_LOG_INFO("AlarmEnhaned exit.");
}
/* Private functions definition-----------------------------------------------*/
/****************** (C) COPYRIGHT DJI Innovations *****END OF FILE****/

View File

@ -40,6 +40,7 @@ extern "C" {
/* Exported functions --------------------------------------------------------*/
void DjiUser_RunHmsManagerSample(void);
void DjiUser_RunHmsEnhanceSample(void);
#ifdef __cplusplus
}

View File

@ -0,0 +1,6 @@
classes= 80
train = ~/COCO/train2017.txt
valid = ~/COCO/val2017.txt
names = coco.names
backup = model

View File

@ -0,0 +1,80 @@
person
bicycle
car
motorbike
aeroplane
bus
train
truck
boat
traffic light
fire hydrant
stop sign
parking meter
bench
bird
cat
dog
horse
sheep
cow
elephant
bear
zebra
giraffe
backpack
umbrella
handbag
tie
suitcase
frisbee
skis
snowboard
sports ball
kite
baseball bat
baseball glove
skateboard
surfboard
tennis racket
bottle
wine glass
cup
fork
knife
spoon
bowl
banana
apple
sandwich
orange
broccoli
carrot
hot dog
pizza
donut
cake
chair
sofa
pottedplant
bed
diningtable
toilet
tvmonitor
laptop
mouse
remote
keyboard
cell phone
microwave
oven
toaster
sink
refrigerator
book
clock
vase
scissors
teddy bear
hair drier
toothbrush

View File

@ -0,0 +1,244 @@
CUDA-version: 10010 (10010), cuDNN: 7.6.5, GPU count: 4
OpenCV version: 4.9.1
0,1,2,3
0 : compute_capability = 610, cudnn_half = 0, GPU: GeForce GTX 1080 Ti
net.optimized_memory = 0
mini_batch = 1, batch = 1, time_steps = 1, train = 0
layer filters size/strd(dil) input output
0 Create CUDA-stream - 0
Create cudnn-handle 0
conv 8 3 x 3/ 2 320 x 320 x 3 -> 160 x 160 x 8 0.011 BF
1 conv 8 1 x 1/ 1 160 x 160 x 8 -> 160 x 160 x 8 0.003 BF
2 conv 8/ 8 3 x 3/ 1 160 x 160 x 8 -> 160 x 160 x 8 0.004 BF
3 conv 4 1 x 1/ 1 160 x 160 x 8 -> 160 x 160 x 4 0.002 BF
4 conv 8 1 x 1/ 1 160 x 160 x 4 -> 160 x 160 x 8 0.002 BF
5 conv 8/ 8 3 x 3/ 1 160 x 160 x 8 -> 160 x 160 x 8 0.004 BF
6 conv 4 1 x 1/ 1 160 x 160 x 8 -> 160 x 160 x 4 0.002 BF
7 dropout p = 0.150 102400 -> 102400
8 Shortcut Layer: 3, wt = 0, wn = 0, outputs: 160 x 160 x 4 0.000 BF
9 conv 24 1 x 1/ 1 160 x 160 x 4 -> 160 x 160 x 24 0.005 BF
10 conv 24/ 24 3 x 3/ 2 160 x 160 x 24 -> 80 x 80 x 24 0.003 BF
11 conv 8 1 x 1/ 1 80 x 80 x 24 -> 80 x 80 x 8 0.002 BF
12 conv 32 1 x 1/ 1 80 x 80 x 8 -> 80 x 80 x 32 0.003 BF
13 conv 32/ 32 3 x 3/ 1 80 x 80 x 32 -> 80 x 80 x 32 0.004 BF
14 conv 8 1 x 1/ 1 80 x 80 x 32 -> 80 x 80 x 8 0.003 BF
15 dropout p = 0.150 51200 -> 51200
16 Shortcut Layer: 11, wt = 0, wn = 0, outputs: 80 x 80 x 8 0.000 BF
17 conv 32 1 x 1/ 1 80 x 80 x 8 -> 80 x 80 x 32 0.003 BF
18 conv 32/ 32 3 x 3/ 1 80 x 80 x 32 -> 80 x 80 x 32 0.004 BF
19 conv 8 1 x 1/ 1 80 x 80 x 32 -> 80 x 80 x 8 0.003 BF
20 dropout p = 0.150 51200 -> 51200
21 Shortcut Layer: 16, wt = 0, wn = 0, outputs: 80 x 80 x 8 0.000 BF
22 conv 32 1 x 1/ 1 80 x 80 x 8 -> 80 x 80 x 32 0.003 BF
23 conv 32/ 32 3 x 3/ 2 80 x 80 x 32 -> 40 x 40 x 32 0.001 BF
24 conv 8 1 x 1/ 1 40 x 40 x 32 -> 40 x 40 x 8 0.001 BF
25 conv 48 1 x 1/ 1 40 x 40 x 8 -> 40 x 40 x 48 0.001 BF
26 conv 48/ 48 3 x 3/ 1 40 x 40 x 48 -> 40 x 40 x 48 0.001 BF
27 conv 8 1 x 1/ 1 40 x 40 x 48 -> 40 x 40 x 8 0.001 BF
28 dropout p = 0.150 12800 -> 12800
29 Shortcut Layer: 24, wt = 0, wn = 0, outputs: 40 x 40 x 8 0.000 BF
30 conv 48 1 x 1/ 1 40 x 40 x 8 -> 40 x 40 x 48 0.001 BF
31 conv 48/ 48 3 x 3/ 1 40 x 40 x 48 -> 40 x 40 x 48 0.001 BF
32 conv 8 1 x 1/ 1 40 x 40 x 48 -> 40 x 40 x 8 0.001 BF
33 dropout p = 0.150 12800 -> 12800
34 Shortcut Layer: 29, wt = 0, wn = 0, outputs: 40 x 40 x 8 0.000 BF
35 conv 48 1 x 1/ 1 40 x 40 x 8 -> 40 x 40 x 48 0.001 BF
36 conv 48/ 48 3 x 3/ 1 40 x 40 x 48 -> 40 x 40 x 48 0.001 BF
37 conv 16 1 x 1/ 1 40 x 40 x 48 -> 40 x 40 x 16 0.002 BF
38 conv 96 1 x 1/ 1 40 x 40 x 16 -> 40 x 40 x 96 0.005 BF
39 conv 96/ 96 3 x 3/ 1 40 x 40 x 96 -> 40 x 40 x 96 0.003 BF
40 conv 16 1 x 1/ 1 40 x 40 x 96 -> 40 x 40 x 16 0.005 BF
41 dropout p = 0.150 25600 -> 25600
42 Shortcut Layer: 37, wt = 0, wn = 0, outputs: 40 x 40 x 16 0.000 BF
43 conv 96 1 x 1/ 1 40 x 40 x 16 -> 40 x 40 x 96 0.005 BF
44 conv 96/ 96 3 x 3/ 1 40 x 40 x 96 -> 40 x 40 x 96 0.003 BF
45 conv 16 1 x 1/ 1 40 x 40 x 96 -> 40 x 40 x 16 0.005 BF
46 dropout p = 0.150 25600 -> 25600
47 Shortcut Layer: 42, wt = 0, wn = 0, outputs: 40 x 40 x 16 0.000 BF
48 conv 96 1 x 1/ 1 40 x 40 x 16 -> 40 x 40 x 96 0.005 BF
49 conv 96/ 96 3 x 3/ 1 40 x 40 x 96 -> 40 x 40 x 96 0.003 BF
50 conv 16 1 x 1/ 1 40 x 40 x 96 -> 40 x 40 x 16 0.005 BF
51 dropout p = 0.150 25600 -> 25600
52 Shortcut Layer: 47, wt = 0, wn = 0, outputs: 40 x 40 x 16 0.000 BF
53 conv 96 1 x 1/ 1 40 x 40 x 16 -> 40 x 40 x 96 0.005 BF
54 conv 96/ 96 3 x 3/ 1 40 x 40 x 96 -> 40 x 40 x 96 0.003 BF
55 conv 16 1 x 1/ 1 40 x 40 x 96 -> 40 x 40 x 16 0.005 BF
56 dropout p = 0.150 25600 -> 25600
57 Shortcut Layer: 52, wt = 0, wn = 0, outputs: 40 x 40 x 16 0.000 BF
58 conv 96 1 x 1/ 1 40 x 40 x 16 -> 40 x 40 x 96 0.005 BF
59 conv 96/ 96 3 x 3/ 2 40 x 40 x 96 -> 20 x 20 x 96 0.001 BF
60 conv 24 1 x 1/ 1 20 x 20 x 96 -> 20 x 20 x 24 0.002 BF
61 conv 136 1 x 1/ 1 20 x 20 x 24 -> 20 x 20 x 136 0.003 BF
62 conv 136/ 136 3 x 3/ 1 20 x 20 x 136 -> 20 x 20 x 136 0.001 BF
63 conv 24 1 x 1/ 1 20 x 20 x 136 -> 20 x 20 x 24 0.003 BF
64 dropout p = 0.150 9600 -> 9600
65 Shortcut Layer: 60, wt = 0, wn = 0, outputs: 20 x 20 x 24 0.000 BF
66 conv 136 1 x 1/ 1 20 x 20 x 24 -> 20 x 20 x 136 0.003 BF
67 conv 136/ 136 3 x 3/ 1 20 x 20 x 136 -> 20 x 20 x 136 0.001 BF
68 conv 24 1 x 1/ 1 20 x 20 x 136 -> 20 x 20 x 24 0.003 BF
69 dropout p = 0.150 9600 -> 9600
70 Shortcut Layer: 65, wt = 0, wn = 0, outputs: 20 x 20 x 24 0.000 BF
71 conv 136 1 x 1/ 1 20 x 20 x 24 -> 20 x 20 x 136 0.003 BF
72 conv 136/ 136 3 x 3/ 1 20 x 20 x 136 -> 20 x 20 x 136 0.001 BF
73 conv 24 1 x 1/ 1 20 x 20 x 136 -> 20 x 20 x 24 0.003 BF
74 dropout p = 0.150 9600 -> 9600
75 Shortcut Layer: 70, wt = 0, wn = 0, outputs: 20 x 20 x 24 0.000 BF
76 conv 136 1 x 1/ 1 20 x 20 x 24 -> 20 x 20 x 136 0.003 BF
77 conv 136/ 136 3 x 3/ 1 20 x 20 x 136 -> 20 x 20 x 136 0.001 BF
78 conv 24 1 x 1/ 1 20 x 20 x 136 -> 20 x 20 x 24 0.003 BF
79 dropout p = 0.150 9600 -> 9600
80 Shortcut Layer: 75, wt = 0, wn = 0, outputs: 20 x 20 x 24 0.000 BF
81 conv 136 1 x 1/ 1 20 x 20 x 24 -> 20 x 20 x 136 0.003 BF
82 conv 136/ 136 3 x 3/ 2 20 x 20 x 136 -> 10 x 10 x 136 0.000 BF
83 conv 48 1 x 1/ 1 10 x 10 x 136 -> 10 x 10 x 48 0.001 BF
84 conv 224 1 x 1/ 1 10 x 10 x 48 -> 10 x 10 x 224 0.002 BF
85 conv 224/ 224 3 x 3/ 1 10 x 10 x 224 -> 10 x 10 x 224 0.000 BF
86 conv 48 1 x 1/ 1 10 x 10 x 224 -> 10 x 10 x 48 0.002 BF
87 dropout p = 0.150 4800 -> 4800
88 Shortcut Layer: 83, wt = 0, wn = 0, outputs: 10 x 10 x 48 0.000 BF
89 conv 224 1 x 1/ 1 10 x 10 x 48 -> 10 x 10 x 224 0.002 BF
90 conv 224/ 224 3 x 3/ 1 10 x 10 x 224 -> 10 x 10 x 224 0.000 BF
91 conv 48 1 x 1/ 1 10 x 10 x 224 -> 10 x 10 x 48 0.002 BF
92 dropout p = 0.150 4800 -> 4800
93 Shortcut Layer: 88, wt = 0, wn = 0, outputs: 10 x 10 x 48 0.000 BF
94 conv 224 1 x 1/ 1 10 x 10 x 48 -> 10 x 10 x 224 0.002 BF
95 conv 224/ 224 3 x 3/ 1 10 x 10 x 224 -> 10 x 10 x 224 0.000 BF
96 conv 48 1 x 1/ 1 10 x 10 x 224 -> 10 x 10 x 48 0.002 BF
97 dropout p = 0.150 4800 -> 4800
98 Shortcut Layer: 93, wt = 0, wn = 0, outputs: 10 x 10 x 48 0.000 BF
99 conv 224 1 x 1/ 1 10 x 10 x 48 -> 10 x 10 x 224 0.002 BF
100 conv 224/ 224 3 x 3/ 1 10 x 10 x 224 -> 10 x 10 x 224 0.000 BF
101 conv 48 1 x 1/ 1 10 x 10 x 224 -> 10 x 10 x 48 0.002 BF
102 dropout p = 0.150 4800 -> 4800
103 Shortcut Layer: 98, wt = 0, wn = 0, outputs: 10 x 10 x 48 0.000 BF
104 conv 224 1 x 1/ 1 10 x 10 x 48 -> 10 x 10 x 224 0.002 BF
105 conv 224/ 224 3 x 3/ 1 10 x 10 x 224 -> 10 x 10 x 224 0.000 BF
106 conv 48 1 x 1/ 1 10 x 10 x 224 -> 10 x 10 x 48 0.002 BF
107 dropout p = 0.150 4800 -> 4800
108 Shortcut Layer: 103, wt = 0, wn = 0, outputs: 10 x 10 x 48 0.000 BF
109 max 3x 3/ 1 10 x 10 x 48 -> 10 x 10 x 48 0.000 BF
110 route 108 -> 10 x 10 x 48
111 max 5x 5/ 1 10 x 10 x 48 -> 10 x 10 x 48 0.000 BF
112 route 108 -> 10 x 10 x 48
113 max 9x 9/ 1 10 x 10 x 48 -> 10 x 10 x 48 0.000 BF
114 route 113 111 109 108 -> 10 x 10 x 192
115 conv 96 1 x 1/ 1 10 x 10 x 192 -> 10 x 10 x 96 0.004 BF
116 conv 96/ 96 5 x 5/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.000 BF
117 conv 96 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.002 BF
118 conv 96/ 96 5 x 5/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.000 BF
119 conv 96 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.002 BF
120 conv 255 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 255 0.005 BF
121 yolo
[yolo] params: iou loss: ciou (4), iou_norm: 0.07, obj_norm: 1.00, cls_norm: 1.00, delta_norm: 1.00, scale_x_y: 1.00
nms_kind: greedynms (1), beta = 0.600000
122 route 115 -> 10 x 10 x 96
123 upsample 2x 10 x 10 x 96 -> 20 x 20 x 96
124 route 123 80 -> 20 x 20 x 120
125 conv 120/ 120 5 x 5/ 1 20 x 20 x 120 -> 20 x 20 x 120 0.002 BF
126 conv 120 1 x 1/ 1 20 x 20 x 120 -> 20 x 20 x 120 0.012 BF
127 conv 120/ 120 5 x 5/ 1 20 x 20 x 120 -> 20 x 20 x 120 0.002 BF
128 conv 120 1 x 1/ 1 20 x 20 x 120 -> 20 x 20 x 120 0.012 BF
129 conv 255 1 x 1/ 1 20 x 20 x 120 -> 20 x 20 x 255 0.024 BF
130 yolo
[yolo] params: iou loss: ciou (4), iou_norm: 0.07, obj_norm: 1.00, cls_norm: 1.00, delta_norm: 1.00, scale_x_y: 1.00
nms_kind: greedynms (1), beta = 0.600000
Total BFLOPS 0.252
avg_outputs = 62893
Allocate additional workspace_size = 1.23 MB
Loading weights from yolo-fastest-1.1.weights...
seen 64, trained: 14231 K-images (222 Kilo-batches_64)
Done! Loaded 131 layers from weights-file
calculation mAP (mean average precision)...
Detection layer: 121 - type = 28
Detection layer: 130 - type = 28
4952
detections_count = 897029, unique_truth_count = 36335
class_id = 0, name = person, ap = 45.27% (TP = 4021, FP = 6119)
class_id = 1, name = bicycle, ap = 16.88% (TP = 43, FP = 72)
class_id = 2, name = car, ap = 20.98% (TP = 484, FP = 1112)
class_id = 3, name = motorcycle, ap = 36.12% (TP = 129, FP = 160)
class_id = 4, name = airplane, ap = 57.68% (TP = 81, FP = 57)
class_id = 5, name = bus, ap = 52.42% (TP = 125, FP = 80)
class_id = 6, name = train, ap = 63.20% (TP = 110, FP = 60)
class_id = 7, name = truck, ap = 18.15% (TP = 70, FP = 104)
class_id = 8, name = boat, ap = 12.82% (TP = 70, FP = 188)
class_id = 9, name = traffic light, ap = 9.76% (TP = 76, FP = 162)
class_id = 10, name = fire hydrant, ap = 49.26% (TP = 46, FP = 40)
class_id = 11, name = stop sign, ap = 51.04% (TP = 39, FP = 21)
class_id = 12, name = parking meter, ap = 25.85% (TP = 13, FP = 5)
class_id = 13, name = bench, ap = 12.02% (TP = 43, FP = 55)
class_id = 14, name = bird, ap = 14.24% (TP = 64, FP = 137)
class_id = 15, name = cat, ap = 59.32% (TP = 98, FP = 126)
class_id = 16, name = dog, ap = 41.95% (TP = 80, FP = 95)
class_id = 17, name = horse, ap = 43.46% (TP = 120, FP = 151)
class_id = 18, name = sheep, ap = 33.25% (TP = 147, FP = 285)
class_id = 19, name = cow, ap = 35.18% (TP = 146, FP = 205)
class_id = 20, name = elephant, ap = 59.49% (TP = 151, FP = 152)
class_id = 21, name = bear, ap = 58.50% (TP = 46, FP = 44)
class_id = 22, name = zebra, ap = 66.36% (TP = 172, FP = 123)
class_id = 23, name = giraffe, ap = 65.48% (TP = 150, FP = 63)
class_id = 24, name = backpack, ap = 1.91% (TP = 4, FP = 22)
class_id = 25, name = umbrella, ap = 21.44% (TP = 91, FP = 138)
class_id = 26, name = handbag, ap = 0.61% (TP = 1, FP = 23)
class_id = 27, name = tie, ap = 10.44% (TP = 31, FP = 94)
class_id = 28, name = suitcase, ap = 12.93% (TP = 39, FP = 78)
class_id = 29, name = frisbee, ap = 27.25% (TP = 28, FP = 41)
class_id = 30, name = skis, ap = 11.67% (TP = 37, FP = 132)
class_id = 31, name = snowboard, ap = 10.36% (TP = 6, FP = 10)
class_id = 32, name = sports ball, ap = 17.34% (TP = 48, FP = 62)
class_id = 33, name = kite, ap = 25.58% (TP = 117, FP = 232)
class_id = 34, name = baseball bat, ap = 11.47% (TP = 15, FP = 27)
class_id = 35, name = baseball glove, ap = 10.58% (TP = 20, FP = 61)
class_id = 36, name = skateboard, ap = 18.58% (TP = 44, FP = 85)
class_id = 37, name = surfboard, ap = 14.43% (TP = 50, FP = 172)
class_id = 38, name = tennis racket, ap = 22.89% (TP = 67, FP = 116)
class_id = 39, name = bottle, ap = 7.63% (TP = 69, FP = 146)
class_id = 40, name = wine glass, ap = 7.97% (TP = 18, FP = 67)
class_id = 41, name = cup, ap = 13.11% (TP = 116, FP = 243)
class_id = 42, name = fork, ap = 4.41% (TP = 9, FP = 13)
class_id = 43, name = knife, ap = 1.48% (TP = 2, FP = 14)
class_id = 44, name = spoon, ap = 0.77% (TP = 1, FP = 6)
class_id = 45, name = bowl, ap = 23.25% (TP = 134, FP = 241)
class_id = 46, name = banana, ap = 8.99% (TP = 39, FP = 105)
class_id = 47, name = apple, ap = 5.32% (TP = 13, FP = 37)
class_id = 48, name = sandwich, ap = 23.40% (TP = 35, FP = 67)
class_id = 49, name = orange, ap = 16.69% (TP = 52, FP = 91)
class_id = 50, name = broccoli, ap = 16.88% (TP = 65, FP = 164)
class_id = 51, name = carrot, ap = 7.64% (TP = 27, FP = 80)
class_id = 52, name = hot dog, ap = 14.46% (TP = 11, FP = 31)
class_id = 53, name = pizza, ap = 41.55% (TP = 113, FP = 124)
class_id = 54, name = donut, ap = 19.84% (TP = 65, FP = 152)
class_id = 55, name = cake, ap = 18.44% (TP = 45, FP = 72)
class_id = 56, name = chair, ap = 10.04% (TP = 142, FP = 275)
class_id = 57, name = couch, ap = 29.89% (TP = 53, FP = 101)
class_id = 58, name = potted plant, ap = 10.76% (TP = 29, FP = 84)
class_id = 59, name = bed, ap = 43.32% (TP = 57, FP = 71)
class_id = 60, name = dining table, ap = 22.00% (TP = 183, FP = 283)
class_id = 61, name = toilet, ap = 58.93% (TP = 94, FP = 89)
class_id = 62, name = tv, ap = 47.13% (TP = 123, FP = 107)
class_id = 63, name = laptop, ap = 40.93% (TP = 75, FP = 112)
class_id = 64, name = mouse, ap = 32.37% (TP = 29, FP = 26)
class_id = 65, name = remote, ap = 4.22% (TP = 12, FP = 19)
class_id = 66, name = keyboard, ap = 31.90% (TP = 51, FP = 67)
class_id = 67, name = cell phone, ap = 15.28% (TP = 30, FP = 30)
class_id = 68, name = microwave, ap = 39.49% (TP = 20, FP = 14)
class_id = 69, name = oven, ap = 24.75% (TP = 34, FP = 45)
class_id = 70, name = toaster, ap = 2.32% (TP = 0, FP = 0)
class_id = 71, name = sink, ap = 20.24% (TP = 46, FP = 86)
class_id = 72, name = refrigerator, ap = 30.95% (TP = 42, FP = 44)
class_id = 73, name = book, ap = 1.74% (TP = 45, FP = 334)
class_id = 74, name = clock, ap = 32.38% (TP = 103, FP = 127)
class_id = 75, name = vase, ap = 13.89% (TP = 40, FP = 48)
class_id = 76, name = scissors, ap = 6.25% (TP = 1, FP = 3)
class_id = 77, name = teddy bear, ap = 33.81% (TP = 59, FP = 56)
class_id = 78, name = hair drier, ap = 0.00% (TP = 0, FP = 0)
class_id = 79, name = toothbrush, ap = 1.16% (TP = 0, FP = 2)
for conf_thresh = 0.25, precision = 0.39, recall = 0.25, F1-score = 0.31
for conf_thresh = 0.25, TP = 9204, FP = 14585, FN = 27131, average IoU = 27.42 %
IoU threshold = 50 %, used Area-Under-Curve for each unique Recall
mean average precision (mAP@0.50) = 0.243967, or 24.40 %
Total Detection Time: 133 Seconds

View File

@ -0,0 +1,239 @@
mini_batch = 1, batch = 1, time_steps = 1, train = 0
layer filters size/strd(dil) input output
0 Create CUDA-stream - 0
Create cudnn-handle 0
conv 16 3 x 3/ 2 320 x 320 x 3 -> 160 x 160 x 16 0.022 BF
1 conv 16 1 x 1/ 1 160 x 160 x 16 -> 160 x 160 x 16 0.013 BF
2 conv 16/ 16 3 x 3/ 1 160 x 160 x 16 -> 160 x 160 x 16 0.007 BF
3 conv 8 1 x 1/ 1 160 x 160 x 16 -> 160 x 160 x 8 0.007 BF
4 conv 16 1 x 1/ 1 160 x 160 x 8 -> 160 x 160 x 16 0.007 BF
5 conv 16/ 16 3 x 3/ 1 160 x 160 x 16 -> 160 x 160 x 16 0.007 BF
6 conv 8 1 x 1/ 1 160 x 160 x 16 -> 160 x 160 x 8 0.007 BF
7 dropout p = 0.200 204800 -> 204800
8 Shortcut Layer: 3, wt = 0, wn = 0, outputs: 160 x 160 x 8 0.000 BF
9 conv 48 1 x 1/ 1 160 x 160 x 8 -> 160 x 160 x 48 0.020 BF
10 conv 48/ 48 3 x 3/ 2 160 x 160 x 48 -> 80 x 80 x 48 0.006 BF
11 conv 16 1 x 1/ 1 80 x 80 x 48 -> 80 x 80 x 16 0.010 BF
12 conv 64 1 x 1/ 1 80 x 80 x 16 -> 80 x 80 x 64 0.013 BF
13 conv 64/ 64 3 x 3/ 1 80 x 80 x 64 -> 80 x 80 x 64 0.007 BF
14 conv 16 1 x 1/ 1 80 x 80 x 64 -> 80 x 80 x 16 0.013 BF
15 dropout p = 0.200 102400 -> 102400
16 Shortcut Layer: 11, wt = 0, wn = 0, outputs: 80 x 80 x 16 0.000 BF
17 conv 64 1 x 1/ 1 80 x 80 x 16 -> 80 x 80 x 64 0.013 BF
18 conv 64/ 64 3 x 3/ 1 80 x 80 x 64 -> 80 x 80 x 64 0.007 BF
19 conv 16 1 x 1/ 1 80 x 80 x 64 -> 80 x 80 x 16 0.013 BF
20 dropout p = 0.200 102400 -> 102400
21 Shortcut Layer: 16, wt = 0, wn = 0, outputs: 80 x 80 x 16 0.000 BF
22 conv 64 1 x 1/ 1 80 x 80 x 16 -> 80 x 80 x 64 0.013 BF
23 conv 64/ 64 3 x 3/ 2 80 x 80 x 64 -> 40 x 40 x 64 0.002 BF
24 conv 16 1 x 1/ 1 40 x 40 x 64 -> 40 x 40 x 16 0.003 BF
25 conv 96 1 x 1/ 1 40 x 40 x 16 -> 40 x 40 x 96 0.005 BF
26 conv 96/ 96 3 x 3/ 1 40 x 40 x 96 -> 40 x 40 x 96 0.003 BF
27 conv 16 1 x 1/ 1 40 x 40 x 96 -> 40 x 40 x 16 0.005 BF
28 dropout p = 0.200 25600 -> 25600
29 Shortcut Layer: 24, wt = 0, wn = 0, outputs: 40 x 40 x 16 0.000 BF
30 conv 96 1 x 1/ 1 40 x 40 x 16 -> 40 x 40 x 96 0.005 BF
31 conv 96/ 96 3 x 3/ 1 40 x 40 x 96 -> 40 x 40 x 96 0.003 BF
32 conv 16 1 x 1/ 1 40 x 40 x 96 -> 40 x 40 x 16 0.005 BF
33 dropout p = 0.200 25600 -> 25600
34 Shortcut Layer: 29, wt = 0, wn = 0, outputs: 40 x 40 x 16 0.000 BF
35 conv 96 1 x 1/ 1 40 x 40 x 16 -> 40 x 40 x 96 0.005 BF
36 conv 96/ 96 3 x 3/ 1 40 x 40 x 96 -> 40 x 40 x 96 0.003 BF
37 conv 32 1 x 1/ 1 40 x 40 x 96 -> 40 x 40 x 32 0.010 BF
38 conv 192 1 x 1/ 1 40 x 40 x 32 -> 40 x 40 x 192 0.020 BF
39 conv 192/ 192 3 x 3/ 1 40 x 40 x 192 -> 40 x 40 x 192 0.006 BF
40 conv 32 1 x 1/ 1 40 x 40 x 192 -> 40 x 40 x 32 0.020 BF
41 dropout p = 0.200 51200 -> 51200
42 Shortcut Layer: 37, wt = 0, wn = 0, outputs: 40 x 40 x 32 0.000 BF
43 conv 192 1 x 1/ 1 40 x 40 x 32 -> 40 x 40 x 192 0.020 BF
44 conv 192/ 192 3 x 3/ 1 40 x 40 x 192 -> 40 x 40 x 192 0.006 BF
45 conv 32 1 x 1/ 1 40 x 40 x 192 -> 40 x 40 x 32 0.020 BF
46 dropout p = 0.200 51200 -> 51200
47 Shortcut Layer: 42, wt = 0, wn = 0, outputs: 40 x 40 x 32 0.000 BF
48 conv 192 1 x 1/ 1 40 x 40 x 32 -> 40 x 40 x 192 0.020 BF
49 conv 192/ 192 3 x 3/ 1 40 x 40 x 192 -> 40 x 40 x 192 0.006 BF
50 conv 32 1 x 1/ 1 40 x 40 x 192 -> 40 x 40 x 32 0.020 BF
51 dropout p = 0.200 51200 -> 51200
52 Shortcut Layer: 47, wt = 0, wn = 0, outputs: 40 x 40 x 32 0.000 BF
53 conv 192 1 x 1/ 1 40 x 40 x 32 -> 40 x 40 x 192 0.020 BF
54 conv 192/ 192 3 x 3/ 1 40 x 40 x 192 -> 40 x 40 x 192 0.006 BF
55 conv 32 1 x 1/ 1 40 x 40 x 192 -> 40 x 40 x 32 0.020 BF
56 dropout p = 0.200 51200 -> 51200
57 Shortcut Layer: 52, wt = 0, wn = 0, outputs: 40 x 40 x 32 0.000 BF
58 conv 192 1 x 1/ 1 40 x 40 x 32 -> 40 x 40 x 192 0.020 BF
59 conv 192/ 192 3 x 3/ 2 40 x 40 x 192 -> 20 x 20 x 192 0.001 BF
60 conv 48 1 x 1/ 1 20 x 20 x 192 -> 20 x 20 x 48 0.007 BF
61 conv 272 1 x 1/ 1 20 x 20 x 48 -> 20 x 20 x 272 0.010 BF
62 conv 272/ 272 3 x 3/ 1 20 x 20 x 272 -> 20 x 20 x 272 0.002 BF
63 conv 48 1 x 1/ 1 20 x 20 x 272 -> 20 x 20 x 48 0.010 BF
64 dropout p = 0.200 19200 -> 19200
65 Shortcut Layer: 60, wt = 0, wn = 0, outputs: 20 x 20 x 48 0.000 BF
66 conv 272 1 x 1/ 1 20 x 20 x 48 -> 20 x 20 x 272 0.010 BF
67 conv 272/ 272 3 x 3/ 1 20 x 20 x 272 -> 20 x 20 x 272 0.002 BF
68 conv 48 1 x 1/ 1 20 x 20 x 272 -> 20 x 20 x 48 0.010 BF
69 dropout p = 0.200 19200 -> 19200
70 Shortcut Layer: 65, wt = 0, wn = 0, outputs: 20 x 20 x 48 0.000 BF
71 conv 272 1 x 1/ 1 20 x 20 x 48 -> 20 x 20 x 272 0.010 BF
72 conv 272/ 272 3 x 3/ 1 20 x 20 x 272 -> 20 x 20 x 272 0.002 BF
73 conv 48 1 x 1/ 1 20 x 20 x 272 -> 20 x 20 x 48 0.010 BF
74 dropout p = 0.200 19200 -> 19200
75 Shortcut Layer: 70, wt = 0, wn = 0, outputs: 20 x 20 x 48 0.000 BF
76 conv 272 1 x 1/ 1 20 x 20 x 48 -> 20 x 20 x 272 0.010 BF
77 conv 272/ 272 3 x 3/ 1 20 x 20 x 272 -> 20 x 20 x 272 0.002 BF
78 conv 48 1 x 1/ 1 20 x 20 x 272 -> 20 x 20 x 48 0.010 BF
79 dropout p = 0.200 19200 -> 19200
80 Shortcut Layer: 75, wt = 0, wn = 0, outputs: 20 x 20 x 48 0.000 BF
81 conv 272 1 x 1/ 1 20 x 20 x 48 -> 20 x 20 x 272 0.010 BF
82 conv 272/ 272 3 x 3/ 2 20 x 20 x 272 -> 10 x 10 x 272 0.000 BF
83 conv 96 1 x 1/ 1 10 x 10 x 272 -> 10 x 10 x 96 0.005 BF
84 conv 448 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 448 0.009 BF
85 conv 448/ 448 3 x 3/ 1 10 x 10 x 448 -> 10 x 10 x 448 0.001 BF
86 conv 96 1 x 1/ 1 10 x 10 x 448 -> 10 x 10 x 96 0.009 BF
87 dropout p = 0.200 9600 -> 9600
88 Shortcut Layer: 83, wt = 0, wn = 0, outputs: 10 x 10 x 96 0.000 BF
89 conv 448 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 448 0.009 BF
90 conv 448/ 448 3 x 3/ 1 10 x 10 x 448 -> 10 x 10 x 448 0.001 BF
91 conv 96 1 x 1/ 1 10 x 10 x 448 -> 10 x 10 x 96 0.009 BF
92 dropout p = 0.200 9600 -> 9600
93 Shortcut Layer: 88, wt = 0, wn = 0, outputs: 10 x 10 x 96 0.000 BF
94 conv 448 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 448 0.009 BF
95 conv 448/ 448 3 x 3/ 1 10 x 10 x 448 -> 10 x 10 x 448 0.001 BF
96 conv 96 1 x 1/ 1 10 x 10 x 448 -> 10 x 10 x 96 0.009 BF
97 dropout p = 0.200 9600 -> 9600
98 Shortcut Layer: 93, wt = 0, wn = 0, outputs: 10 x 10 x 96 0.000 BF
99 conv 448 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 448 0.009 BF
100 conv 448/ 448 3 x 3/ 1 10 x 10 x 448 -> 10 x 10 x 448 0.001 BF
101 conv 96 1 x 1/ 1 10 x 10 x 448 -> 10 x 10 x 96 0.009 BF
102 dropout p = 0.200 9600 -> 9600
103 Shortcut Layer: 98, wt = 0, wn = 0, outputs: 10 x 10 x 96 0.000 BF
104 conv 448 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 448 0.009 BF
105 conv 448/ 448 3 x 3/ 1 10 x 10 x 448 -> 10 x 10 x 448 0.001 BF
106 conv 96 1 x 1/ 1 10 x 10 x 448 -> 10 x 10 x 96 0.009 BF
107 dropout p = 0.200 9600 -> 9600
108 Shortcut Layer: 103, wt = 0, wn = 0, outputs: 10 x 10 x 96 0.000 BF
109 max 3x 3/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.000 BF
110 route 108 -> 10 x 10 x 96
111 max 5x 5/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.000 BF
112 route 108 -> 10 x 10 x 96
113 max 9x 9/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.001 BF
114 route 113 111 109 108 -> 10 x 10 x 384
115 conv 96 1 x 1/ 1 10 x 10 x 384 -> 10 x 10 x 96 0.007 BF
116 conv 96/ 96 5 x 5/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.000 BF
117 conv 96 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.002 BF
118 conv 96/ 96 5 x 5/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.000 BF
119 conv 96 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 96 0.002 BF
120 conv 255 1 x 1/ 1 10 x 10 x 96 -> 10 x 10 x 255 0.005 BF
121 yolo
[yolo] params: iou loss: ciou (4), iou_norm: 0.07, obj_norm: 1.00, cls_norm: 1.00, delta_norm: 1.00, scale_x_y: 1.00
nms_kind: greedynms (1), beta = 0.600000
122 route 115 -> 10 x 10 x 96
123 upsample 2x 10 x 10 x 96 -> 20 x 20 x 96
124 route 123 80 -> 20 x 20 x 144
125 conv 144/ 144 5 x 5/ 1 20 x 20 x 144 -> 20 x 20 x 144 0.003 BF
126 conv 144 1 x 1/ 1 20 x 20 x 144 -> 20 x 20 x 144 0.017 BF
127 conv 144/ 144 5 x 5/ 1 20 x 20 x 144 -> 20 x 20 x 144 0.003 BF
128 conv 144 1 x 1/ 1 20 x 20 x 144 -> 20 x 20 x 144 0.017 BF
129 conv 255 1 x 1/ 1 20 x 20 x 144 -> 20 x 20 x 255 0.029 BF
130 yolo
[yolo] params: iou loss: ciou (4), iou_norm: 0.07, obj_norm: 1.00, cls_norm: 1.00, delta_norm: 1.00, scale_x_y: 1.00
nms_kind: greedynms (1), beta = 0.600000
Total BFLOPS 0.725
avg_outputs = 120982
Allocate additional workspace_size = 0.31 MB
Loading weights from model/yolo-fastest-1_final.weights...
seen 64, trained: 16000 K-images (250 Kilo-batches_64)
Done! Loaded 131 layers from weights-file
calculation mAP (mean average precision)...
Detection layer: 121 - type = 28
Detection layer: 130 - type = 28
4952
detections_count = 664785, unique_truth_count = 36335
class_id = 0, name = person, ap = 53.92% (TP = 4976, FP = 5767)
class_id = 1, name = bicycle, ap = 25.29% (TP = 81, FP = 105)
class_id = 2, name = car, ap = 30.59% (TP = 666, FP = 1092)
class_id = 3, name = motorcycle, ap = 47.05% (TP = 157, FP = 174)
class_id = 4, name = airplane, ap = 63.87% (TP = 87, FP = 63)
class_id = 5, name = bus, ap = 60.84% (TP = 160, FP = 90)
class_id = 6, name = train, ap = 72.50% (TP = 124, FP = 59)
class_id = 7, name = truck, ap = 30.67% (TP = 126, FP = 177)
class_id = 8, name = boat, ap = 20.35% (TP = 111, FP = 233)
class_id = 9, name = traffic light, ap = 17.36% (TP = 147, FP = 311)
class_id = 10, name = fire hydrant, ap = 63.01% (TP = 54, FP = 22)
class_id = 11, name = stop sign, ap = 54.51% (TP = 38, FP = 25)
class_id = 12, name = parking meter, ap = 39.62% (TP = 24, FP = 12)
class_id = 13, name = bench, ap = 16.95% (TP = 67, FP = 120)
class_id = 14, name = bird, ap = 22.58% (TP = 104, FP = 185)
class_id = 15, name = cat, ap = 73.95% (TP = 129, FP = 112)
class_id = 16, name = dog, ap = 58.90% (TP = 118, FP = 128)
class_id = 17, name = horse, ap = 57.27% (TP = 153, FP = 120)
class_id = 18, name = sheep, ap = 45.20% (TP = 185, FP = 305)
class_id = 19, name = cow, ap = 48.22% (TP = 191, FP = 212)
class_id = 20, name = elephant, ap = 68.17% (TP = 176, FP = 147)
class_id = 21, name = bear, ap = 77.67% (TP = 51, FP = 28)
class_id = 22, name = zebra, ap = 74.43% (TP = 183, FP = 91)
class_id = 23, name = giraffe, ap = 75.02% (TP = 166, FP = 65)
class_id = 24, name = backpack, ap = 5.03% (TP = 21, FP = 86)
class_id = 25, name = umbrella, ap = 36.33% (TP = 151, FP = 161)
class_id = 26, name = handbag, ap = 1.68% (TP = 11, FP = 72)
class_id = 27, name = tie, ap = 20.32% (TP = 60, FP = 120)
class_id = 28, name = suitcase, ap = 21.99% (TP = 73, FP = 137)
class_id = 29, name = frisbee, ap = 46.40% (TP = 57, FP = 60)
class_id = 30, name = skis, ap = 19.74% (TP = 60, FP = 153)
class_id = 31, name = snowboard, ap = 18.86% (TP = 20, FP = 51)
class_id = 32, name = sports ball, ap = 28.16% (TP = 74, FP = 72)
class_id = 33, name = kite, ap = 35.39% (TP = 139, FP = 247)
class_id = 34, name = baseball bat, ap = 20.85% (TP = 33, FP = 63)
class_id = 35, name = baseball glove, ap = 21.76% (TP = 40, FP = 97)
class_id = 36, name = skateboard, ap = 36.03% (TP = 79, FP = 112)
class_id = 37, name = surfboard, ap = 27.98% (TP = 93, FP = 194)
class_id = 38, name = tennis racket, ap = 36.49% (TP = 99, FP = 175)
class_id = 39, name = bottle, ap = 16.24% (TP = 170, FP = 327)
class_id = 40, name = wine glass, ap = 15.37% (TP = 48, FP = 125)
class_id = 41, name = cup, ap = 23.22% (TP = 211, FP = 348)
class_id = 42, name = fork, ap = 14.48% (TP = 29, FP = 60)
class_id = 43, name = knife, ap = 4.63% (TP = 15, FP = 62)
class_id = 44, name = spoon, ap = 3.32% (TP = 9, FP = 27)
class_id = 45, name = bowl, ap = 33.69% (TP = 209, FP = 261)
class_id = 46, name = banana, ap = 23.40% (TP = 86, FP = 136)
class_id = 47, name = apple, ap = 8.21% (TP = 24, FP = 89)
class_id = 48, name = sandwich, ap = 33.67% (TP = 56, FP = 80)
class_id = 49, name = orange, ap = 22.59% (TP = 77, FP = 137)
class_id = 50, name = broccoli, ap = 23.62% (TP = 88, FP = 178)
class_id = 51, name = carrot, ap = 10.15% (TP = 55, FP = 159)
class_id = 52, name = hot dog, ap = 28.57% (TP = 33, FP = 38)
class_id = 53, name = pizza, ap = 51.21% (TP = 129, FP = 148)
class_id = 54, name = donut, ap = 30.97% (TP = 116, FP = 184)
class_id = 55, name = cake, ap = 32.03% (TP = 99, FP = 155)
class_id = 56, name = chair, ap = 18.50% (TP = 304, FP = 568)
class_id = 57, name = couch, ap = 48.84% (TP = 125, FP = 156)
class_id = 58, name = potted plant, ap = 20.71% (TP = 66, FP = 118)
class_id = 59, name = bed, ap = 52.73% (TP = 88, FP = 97)
class_id = 60, name = dining table, ap = 27.14% (TP = 224, FP = 334)
class_id = 61, name = toilet, ap = 66.39% (TP = 112, FP = 77)
class_id = 62, name = tv, ap = 56.32% (TP = 151, FP = 98)
class_id = 63, name = laptop, ap = 54.05% (TP = 100, FP = 157)
class_id = 64, name = mouse, ap = 44.78% (TP = 46, FP = 44)
class_id = 65, name = remote, ap = 7.84% (TP = 28, FP = 102)
class_id = 66, name = keyboard, ap = 44.37% (TP = 71, FP = 83)
class_id = 67, name = cell phone, ap = 24.25% (TP = 62, FP = 74)
class_id = 68, name = microwave, ap = 46.90% (TP = 21, FP = 19)
class_id = 69, name = oven, ap = 37.19% (TP = 54, FP = 52)
class_id = 70, name = toaster, ap = 10.84% (TP = 0, FP = 0)
class_id = 71, name = sink, ap = 34.06% (TP = 81, FP = 98)
class_id = 72, name = refrigerator, ap = 46.76% (TP = 57, FP = 45)
class_id = 73, name = book, ap = 4.20% (TP = 112, FP = 548)
class_id = 74, name = clock, ap = 53.92% (TP = 144, FP = 92)
class_id = 75, name = vase, ap = 25.27% (TP = 67, FP = 70)
class_id = 76, name = scissors, ap = 21.61% (TP = 7, FP = 10)
class_id = 77, name = teddy bear, ap = 47.50% (TP = 90, FP = 56)
class_id = 78, name = hair drier, ap = 0.70% (TP = 0, FP = 0)
class_id = 79, name = toothbrush, ap = 1.50% (TP = 2, FP = 9)
for conf_thresh = 0.25, precision = 0.43, recall = 0.35, F1-score = 0.39
for conf_thresh = 0.25, TP = 12750, FP = 16864, FN = 23585, average IoU = 31.39 %
IoU threshold = 50 %, used Area-Under-Curve for each unique Recall
mean average precision (mAP@0.50) = 0.343340, or 34.33 %
Total Detection Time: 93 Seconds

View File

@ -0,0 +1,947 @@
[net]
batch=32
subdivisions=1
width=320
height=320
channels=3
momentum=0.949
decay=0.0005
angle=0
saturation=1.5
exposure=1.5
hue=.1
learning_rate=0.001
burn_in=4000
max_batches=500000
policy=steps
steps=400000,450000
scales=.1,.1
[convolutional]
filters=16
size=3
pad=1
stride=2
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=16
filters=16
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=8
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=16
filters=16
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=8
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=48
filters=48
size=3
pad=1
stride=2
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=64
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=64
filters=64
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=64
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=64
filters=64
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=64
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=64
filters=64
size=3
pad=1
stride=2
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=96
filters=96
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=96
filters=96
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=96
filters=96
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=32
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=192
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=192
filters=192
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=32
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=192
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=192
filters=192
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=32
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=192
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=192
filters=192
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=32
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=192
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=192
filters=192
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=32
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=192
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=192
filters=192
size=3
pad=1
stride=2
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=272
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=272
filters=272
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=272
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=272
filters=272
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=272
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=272
filters=272
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=272
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=272
filters=272
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=272
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=272
filters=272
size=3
pad=1
stride=2
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=448
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=448
filters=448
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=448
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=448
filters=448
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=448
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=448
filters=448
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=448
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=448
filters=448
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
[convolutional]
filters=448
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=448
filters=448
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.2
[shortcut]
from=-5
activation=linear
###############
[maxpool]
stride=1
size=3
[route]
layers=-2
[maxpool]
stride=1
size=5
[route]
layers=-4
[maxpool]
stride=1
size=9
[route]
layers=-1,-3,-5,-6
### End SPP ###
###############
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=5
groups=96
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=1
stride=1
pad=1
batch_normalize=1
activation=linear
[convolutional]
filters=96
size=5
groups=96
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=1
stride=1
pad=1
batch_normalize=1
activation=linear
[convolutional]
size=1
stride=1
pad=1
filters=255
activation=linear
[yolo]
mask = 3,4,5
anchors = 12, 18, 37, 49, 52,132, 115, 73, 119,199, 242,238
classes=80
num=6
jitter=.15
ignore_thresh = .5
truth_thresh = 1
random=0
scale_x_y = 1.0
iou_thresh=0.213
cls_normalizer=1.0
iou_normalizer=0.07
iou_loss=ciou
nms_kind=greedynms
beta_nms=0.6
[route]
layers = -7
[upsample]
stride = 2
[route]
layers=-1,80
[convolutional]
filters=144
size=5
groups=144
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=144
size=1
stride=1
pad=1
batch_normalize=1
activation=linear
[convolutional]
filters=144
size=5
groups=144
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=144
size=1
stride=1
pad=1
batch_normalize=1
activation=linear
[convolutional]
size=1
stride=1
pad=1
filters=255
activation=linear
[yolo]
mask = 0,1,2
anchors = 12, 18, 37, 49, 52,132, 115, 73, 119,199, 242,238
classes=80
num=6
jitter=.15
ignore_thresh = .5
truth_thresh = 1
random=0
scale_x_y = 1.00
iou_thresh=0.213
cls_normalizer=1.0
iou_normalizer=0.07
iou_loss=ciou
nms_kind=greedynms
beta_nms=0.6

View File

@ -0,0 +1,946 @@
[net]
batch=32
subdivisions=1
width=320
height=320
channels=3
momentum=0.949
decay=0.0005
angle=0
saturation=1.5
exposure=1.5
hue=.1
learning_rate=0.001
burn_in=4000
max_batches=500000
policy=steps
steps=400000,450000
scales=.1,.1
[convolutional]
filters=8
size=3
pad=1
stride=2
batch_normalize=1
activation=leaky
[convolutional]
filters=8
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=8
filters=8
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=4
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=8
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=8
filters=8
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=4
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=24
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=24
filters=24
size=3
pad=1
stride=2
batch_normalize=1
activation=leaky
[convolutional]
filters=8
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=32
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=32
filters=32
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=8
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=32
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=32
filters=32
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=8
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=32
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=32
filters=32
size=3
pad=1
stride=2
batch_normalize=1
activation=leaky
[convolutional]
filters=8
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=48
filters=48
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=8
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=48
filters=48
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=8
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=48
filters=48
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=96
filters=96
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=96
filters=96
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=96
filters=96
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=96
filters=96
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=16
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=96
filters=96
size=3
pad=1
stride=2
batch_normalize=1
activation=leaky
[convolutional]
filters=24
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=136
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=136
filters=136
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=24
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=136
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=136
filters=136
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=24
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=136
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=136
filters=136
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=24
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=136
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=136
filters=136
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=24
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=136
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=136
filters=136
size=3
pad=1
stride=2
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[convolutional]
filters=224
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=224
filters=224
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=224
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=224
filters=224
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=224
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=224
filters=224
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=224
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=224
filters=224
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
[convolutional]
filters=224
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
groups=224
filters=224
size=3
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=48
size=1
stride=1
pad=0
batch_normalize=1
activation=linear
[dropout]
probability=.15
[shortcut]
from=-5
activation=linear
###############
[maxpool]
stride=1
size=3
[route]
layers=-2
[maxpool]
stride=1
size=5
[route]
layers=-4
[maxpool]
stride=1
size=9
[route]
layers=-1,-3,-5,-6
### End SPP ###
###############
[convolutional]
filters=96
size=1
stride=1
pad=0
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=5
groups=96
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=1
stride=1
pad=1
batch_normalize=1
activation=linear
[convolutional]
filters=96
size=5
groups=96
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=96
size=1
stride=1
pad=1
batch_normalize=1
activation=linear
[convolutional]
size=1
stride=1
pad=1
filters=255
activation=linear
[yolo]
mask = 3,4,5
anchors = 12, 18, 37, 49, 52,132, 115, 73, 119,199, 242,238
classes=80
num=6
jitter=.15
ignore_thresh = .5
truth_thresh = 1
random=0
scale_x_y = 1.0
iou_thresh=0.213
cls_normalizer=1.0
iou_normalizer=0.07
iou_loss=ciou
nms_kind=greedynms
beta_nms=0.6
[route]
layers = -7
[upsample]
stride = 2
[route]
layers=-1,80
[convolutional]
filters=120
size=5
groups=120
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=120
size=1
stride=1
pad=1
batch_normalize=1
activation=linear
[convolutional]
filters=120
size=5
groups=120
stride=1
pad=1
batch_normalize=1
activation=leaky
[convolutional]
filters=120
size=1
stride=1
pad=1
batch_normalize=1
activation=linear
[convolutional]
size=1
stride=1
pad=1
filters=255
activation=linear
[yolo]
mask = 0,1,2
anchors = 12, 18, 37, 49, 52,132, 115, 73, 119,199, 242,238
classes=80
num=6
jitter=.15
ignore_thresh = .5
truth_thresh = 1
random=0
scale_x_y = 1.00
iou_thresh=0.213
cls_normalizer=1.0
iou_normalizer=0.07
iou_loss=ciou
nms_kind=greedynms
beta_nms=0.6

View File

@ -0,0 +1,648 @@
/**
********************************************************************
*
* @copyright (c) 2023 DJI. All rights reserved.
*
* All information contained herein is, and remains, the property of DJI.
* The intellectual and technical concepts contained herein are proprietary
* to DJI and may be covered by U.S. and foreign patents, patents in process,
* and protected by trade secret or copyright law. Dissemination of this
* information, including but not limited to data and other proprietary
* material(s) incorporated within the information, in any form, is strictly
* prohibited without the express written consent of DJI.
*
* If you receive this source code without DJIs authorization, you may not
* further disseminate the information, and you must immediately remove the
* source code and notify DJI of its removal. DJI reserves the right to pursue
* legal actions against you for any loss(es) or damage(s) caused by your
* failure to do so.
*
*********************************************************************
*/
/* Includes ------------------------------------------------------------------*/
#include <iostream>
#include <dji_logger.h>
#include "test_liveview_entry.hpp"
#include "dji_liveview_object_detection.hpp"
#include "dji_payload_camera.h"
#include "dji_high_speed_data_channel.h"
#include <string>
#include <vector>
#include "dji_typedef.h"
#include <chrono>
#include <thread>
#include <fstream>
#include <ctime>
#include <sstream>
#include "dji_open_ar.h"
#include <queue>
#ifdef OPEN_CV_INSTALLED
#include <opencv2/opencv.hpp>
#include <opencv2/dnn.hpp>
#include <opencv2/core.hpp>
#include "image_processor_yolovfastest.hpp"
#endif
/* Private constants ---------------------------------------------------------*/
#define YOLO_LABLES_NUM 76
#define INVALID_CLASS_NUM 4
static const char* s_classLables[] = {
"person", "bicycle", "car", "motorbike",
"aeroplane", "bus", "train", "truck",
"boat", "traffic light", "fire hydrant", "stop sign",
"parking meter", "bench", "bird", "cat",
"dog", "horse", "sheep", "cow",
"elephant", "bear", "zebra", "giraffe",
"backpack", "umbrella", "handbag", "tie",
"suitcase", "frisbee", "skis", "snowboard",
"sports ball", "kite", "baseball bat", "baseball glove",
"skateboard", "surfboard", "tennis racket", "bottle",
"wine glass", "cup", "fork", "knife",
"spoon", "bowl", "banana", "apple",
"sandwich", "orange", "broccoli", "carrot",
"hot dog", "pizza", "donut", "cake",
"chair", "sofa", "pottedplant", "bed",
"diningtable", "toilet", "tvmonitor", "laptop",
"mouse", "remote", "keyboard", "cell phone",
"microwave", "oven", "toaster", "sink",
"refrigerator", "book", "clock", "vase",
"scissors", "teddy bear", "hair drier", "toothbrush",
};
static const char* s_invalidLables[] = {
"XXX", "WW", "YYYYYYYYYYY", "ZZZZZZZZ"
};
static std::ofstream outFileH264;
static std::ofstream outFileYUV;
static std::string getCurrentTimestamp();
static void outH264Tofile(const uint8_t *buf, int32_t len);
static void outYUVTofile(const uint8_t *buf, int32_t len);
static void DjiLiveview_RcvImageCallback(E_DjiLiveViewCameraPosition position, const uint8_t *buf, uint32_t len ,T_DjiLiveviewImageInfo imageInfo);
static void DjiLiveview_EncoderUseCallback(const uint8_t *buf, uint32_t len);
#ifdef OPEN_CV_INSTALLED
static ImageProcessorYolovFastest processor("YOLOvFastest");
static std::queue<cv::Mat> s_imageQueue;
static std::queue<T_DjiLiveViewStandardMetaData *> s_metaQueue;
static void *DjiLiveview_ObjectDetectionThread(void *arg);
T_DjiTaskHandle s_procThreadHandle;
T_DjiMutexHandle s_metaQueueMutexHandle;
T_DjiMutexHandle s_imageQueueMutexHandle;
#endif
void DjiUser_InitOpenAr(T_DjiOpenArPoint* point)
{
point->uuid = 1;
point->style_id = 10;
point->resource_id = 0;
point->is_always_in_edge = 1;
point->coordinate = {113.939467, 22.526366, 1.0};
point->text_attr.is_show = 1;
memcpy(point->text_attr.text, "测试文本", sizeof("测试文本"));
point->icon_attr = {0, 1, 33445566};
point->touch_attr = {1, {0.0, 0.0, 1.0}};
}
void ArRrefleshAll()
{
USER_LOG_INFO("do ar reflesh all");
uint8_t buf[1024];
memset(buf, 0, sizeof(buf));
T_DjiOpenArCircle* circle = (T_DjiOpenArCircle*)buf;
circle->ids = {50, 10};
circle->center = {113.939467, 22.526366, 11.};
circle->radius = 2;
circle->normal_vector = {0, 0, 3};
circle->face = {0, 1, 1, 1};
circle->stroke = {1, 1, 1, 1, 1, 1};
DjiLiveview_ArSetCircle(circle);
}
void DjiUser_RunOpenArSample()
{
DjiLiveview_ArRegRefleshAllCallback(ArRrefleshAll);
bool exit = false;
while (!exit)
{
std::cout
<< "\n"
<< "| Available commands: \n"
<< "| [0] Set Point - draw tow points\n"
<< "| [1] Update point - append tow points\n"
<< "| [2] Delete point - delete tow point\n"
<< "| [3] Clear point- clear all points\n"
<< "| [4] Set line - draw a line with five points.\n"
<< "| [5] Update line - add a line with tow points.\n"
<< "| [6] Delete line - delete the line drawn in the [5] step.\n"
<< "| [7] Clear line - clear lines\n"
<< "| [8] Set polygon - draw a cuboid\n"
<< "| [9] Update polygon - draw a triangular prism\n"
<< "| [a] Delete polygon - delete the triangular prism\n"
<< "| [b] Clear polygon - clear polygon\n"
<< "| [c] Set circle - draw a circle\n"
<< "| [d] Update circle - add a circle\n"
<< "| [e] Delete circle - delete a circle\n"
<< "| [f] Clear circle - clear circle\n"
<< "| [q] Exit\n"
<< std::endl;
char inputChar;
std::cin >> inputChar;
uint8_t buf[1024];
memset(buf, 0, sizeof(buf));
T_DjiOpenArPoint point;
DjiUser_InitOpenAr(&point);
T_DjiOpenArPointArray* point_array = (T_DjiOpenArPointArray*)buf;
T_DjiOpenArLine* line = (T_DjiOpenArLine*)buf;
T_DjiOpenArPolygon* polygon = (T_DjiOpenArPolygon*)buf;
T_DjiOpenArCircle* circle = (T_DjiOpenArCircle*)buf;
T_DjiOpenArPivotAxis* pivot = (T_DjiOpenArPivotAxis*)buf;
T_DjiOpenArDeletePointEntry* delete_point_entry = (T_DjiOpenArDeletePointEntry*)buf;
T_DjiOpenArDeleteLineEntry* delete_line_entry = (T_DjiOpenArDeleteLineEntry*)buf;
T_DjiOpenArDeletePolygonEntry* delete_polygon_entry = (T_DjiOpenArDeletePolygonEntry*)buf;
T_DjiOpenArDeleteCircleEntry* delete_circle_entry = (T_DjiOpenArDeleteCircleEntry*)buf;
T_DjiOpenArDeletePovixAxisEntry* delete_povix_entry = (T_DjiOpenArDeletePovixAxisEntry*)buf;
switch(inputChar) {
case '0':
point_array->len = 2;
point.uuid = 1;
memcpy(point.text_attr.text, "测试文本", sizeof("测试文本"));
memcpy(&point_array->points[0], &point, sizeof(point));
point.uuid = 2;
point.coordinate.altitude = 2.0f;
memcpy(point.text_attr.text, "test text", sizeof("test text"));
memcpy(&point_array->points[1], &point, sizeof(point));
DjiLiveview_ArSetPoint(point_array);
break;
case '1':
point_array->len = 2;
point.uuid = 3;
point.coordinate.altitude = 3.0f;
memcpy(point.text_attr.text, "point update", sizeof("point update"));
memcpy(&point_array->points[0], &point, sizeof(point));
point.uuid = 4;
point.coordinate.altitude = 4.0f;
memcpy(&point_array->points[1], &point, sizeof(point));
DjiLiveview_ArUpdatePoint(point_array);
break;
case '2':
delete_point_entry->resource_id = 0;
delete_point_entry->uuid_len = 2;
delete_point_entry->uuid_array[0] = 1;
delete_point_entry->uuid_array[1] = 2;
DjiLiveview_ArDeletePoint(delete_point_entry);
break;
case '3':
DjiLiveview_ArClearPoint(0);
break;
case '4':
line->ids.uuid = 10;
line->ids.style_id = 10;
line->point_array.len = 5;
point.uuid = 11;
point.coordinate.altitude = 4.0f;
memcpy(&line->point_array.points[0], &point, sizeof(point));
point.uuid = 12;
point.coordinate.altitude = 5.0f;
memcpy(&line->point_array.points[1], &point, sizeof(point));
point.uuid = 13;
point.coordinate.altitude = 6.0f;
memcpy(&line->point_array.points[2], &point, sizeof(point));
point.uuid = 14;
point.coordinate.altitude = 7.0f;
memcpy(&line->point_array.points[3], &point, sizeof(point));
point.uuid = 15;
point.coordinate.altitude = 8.0f;
memcpy(&line->point_array.points[4], &point, sizeof(point));
DjiLiveview_ArSetLine(line);
break;
case '5':
line->ids.uuid = 11;
line->ids.style_id = 10;
line->point_array.len = 2;
point.uuid = 16;
point.coordinate.altitude = 9.0f;
memcpy(&line->point_array.points[0], &point, sizeof(point));
point.uuid = 17;
point.coordinate.altitude = 10.0f;
memcpy(&line->point_array.points[1], &point, sizeof(point));
DjiLiveview_ArUpdateLine(line);
break;
case '6':
delete_line_entry[0].uuid = 10;
DjiLiveview_ArDeleteLine(delete_line_entry, 1);
break;
case '7':
DjiLiveview_ArClearLine();
break;
case '8':
polygon->ids = {40, 11};
polygon->face = {0, 1, 1, 1};
polygon->stroke = {1, 1, 1, 1, 1, 1};
polygon->normal_vector = {0, 0, 6};
polygon->point_array.len = 4;
point.uuid = 41;
point.coordinate.altitude = 1;
point.coordinate.longitude = 113.939438538;
point.coordinate.latitude = 22.5263937487;
memcpy(&polygon->point_array.points[0], &point, sizeof(point));
point.uuid = 42;
point.coordinate.longitude = 113.939496338;
point.coordinate.latitude = 22.5263937487;
memcpy(&polygon->point_array.points[1], &point, sizeof(point));
point.uuid = 43;
point.coordinate.longitude = 113.939496338;
point.coordinate.latitude = 22.5263397487;
memcpy(&polygon->point_array.points[2], &point, sizeof(point));
point.uuid = 44;
point.coordinate.longitude = 113.939438538;
point.coordinate.latitude = 22.5263397487;
memcpy(&polygon->point_array.points[3], &point, sizeof(point));
DjiLiveview_ArSetPolygon(polygon);
break;
case '9':
polygon->ids = {45, 11};
polygon->face = {0, 1, 1, 1};
polygon->stroke = {1, 1, 1, 1, 1, 1};
polygon->normal_vector = {0, 0, 4};
polygon->point_array.len = 3;
point.uuid = 46;
point.coordinate.altitude = 12;
point.coordinate.longitude = 113.939438538;
point.coordinate.latitude = 22.5263937487;
memcpy(&polygon->point_array.points[0], &point, sizeof(point));
point.uuid = 47;
point.coordinate.longitude = 113.939496338;
point.coordinate.latitude = 22.5263937487;
memcpy(&polygon->point_array.points[1], &point, sizeof(point));
point.uuid = 48;
point.coordinate.longitude = 113.939496338;
point.coordinate.latitude = 22.5263397487;
memcpy(&polygon->point_array.points[2], &point, sizeof(point));
DjiLiveview_ArUpdatePolygon(polygon);
break;
case 'a':
delete_polygon_entry->uuid = 40;
DjiLiveview_ArDeletePolygon(delete_polygon_entry, 1);
break;
case 'b':
DjiLiveview_ArClearPolygon();
break;
case 'c':
circle->ids = {50, 10};
circle->center = {113.939467, 22.526366, 11.};
circle->radius = 3;
circle->normal_vector = {0, 0, 3};
circle->face = {0, 1, 1, 1};
circle->stroke = {1, 1, 1, 1, 1, 1};
DjiLiveview_ArSetCircle(circle);
break;
case 'd':
circle->ids = {51, 11};
circle->center = {113.939467, 22.526366, 8.};
circle->radius = 4;
circle->normal_vector = {0, 0, 5};
circle->face = {1, 1, 1, 1};
circle->stroke = {1, 1, 1, 1, 1, 1};
DjiLiveview_ArUpdateCircle(circle);
break;
case 'e':
delete_circle_entry->uuid = 50;
DjiLiveview_ArDeleteCircle(delete_circle_entry, 1);
break;
case 'f':
DjiLiveview_ArClearCircle();
break;
case 'q':
exit = true;
break;
default:
break;
}
}
}
void DjiUser_RunCameraAiDetectionSample()
{
int pos = 1;
int mediaSource = 0; //support 0(app liveview)/1(1080p)/7(4k) for H30 camera
char isQuit;
E_DjiLiveViewCameraPosition CameraPostion;
E_DjiLiveViewCameraSource MediaResource;
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
if (pos < 1 || pos > 3 || (mediaSource != 0 &&mediaSource != 1 && mediaSource != 7))
{
USER_LOG_ERROR("invalid param");
return;
}
std::string timestamp = getCurrentTimestamp();
std::string h264FileName = "output_" + timestamp + ".h264";
outFileH264.open(h264FileName, std::ios::out | std::ios::binary | std::ios::app);
if (!outFileH264) {
std::cerr << "cant open " << h264FileName << std::endl;
}
#ifdef OPEN_CV_INSTALLED
osalHandler->MutexCreate(&s_metaQueueMutexHandle);
osalHandler->MutexCreate(&s_imageQueueMutexHandle);
osalHandler->TaskCreate("objectDetectionTask",DjiLiveview_ObjectDetectionThread,1024*1024,NULL, &s_procThreadHandle);
if (processor.Init() != 0) {
std::cerr << "Failed to initialize the processor." << std::endl;
return ;
}
#endif
CameraPostion = static_cast<E_DjiLiveViewCameraPosition>(pos);
MediaResource = static_cast<E_DjiLiveViewCameraSource>(mediaSource);
T_DjiReturnCode returnCode;
const T_DjiDataChannelBandwidthProportionOfHighspeedChannel bandwidthProportionOfHighspeedChannel =
{10, 60, 30};
returnCode = DjiHighSpeedDataChannel_SetBandwidthProportion(bandwidthProportionOfHighspeedChannel);
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Liveview init failed, HighSpeed channel init error: 0x%08llX", returnCode);
return;
}
USER_LOG_INFO("step 1: init liveview");
returnCode = DjiLiveview_Init();
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Liveview init faild, ret: 0x%08llX", returnCode);
goto init_failed;
}
#ifdef OPEN_CV_INSTALLED
returnCode = DjiLiveview_RegUserAiTargetLableList(YOLO_LABLES_NUM, s_classLables);
#else
returnCode = DjiLiveview_RegUserAiTargetLableList(INVALID_CLASS_NUM, s_invalidLables);
#endif
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Reg ai target lable faild, ret: 0x%08llX", returnCode);
goto init_failed;
}
USER_LOG_INFO("step 2: reg encoder callback");
returnCode = DjiLiveview_RegEncoderCallback(DjiLiveview_EncoderUseCallback);
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR( "reg Yuv Encoder callback faild, ret: 0x%08llX", returnCode);
goto reg_encoder_callback_failed;
}
USER_LOG_INFO("step 3:start yuv stream");
returnCode = DjiLiveview_StartImageStream(CameraPostion, MediaResource,
PIXFMT_RGB_PACKED ,DjiLiveview_RcvImageCallback);
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR( "start to subscribe YUV stream failed, ret: 0x%08llX", returnCode);
goto start_yuv_stream_failed;
}
USER_LOG_INFO("codec sample start ...");
while (true) {
std::cin >> isQuit;
if (isQuit == 'q' || isQuit == 'Q') {
break;
}
}
USER_LOG_INFO("codec sample end");
DjiLiveview_UnregUserAiTargetLableList();
start_yuv_stream_failed:
returnCode = DjiLiveview_StopImageStream(CameraPostion, MediaResource);
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR( "stop subscrib YUV stream failed, ret: 0x%08llX", returnCode);
}
reg_encoder_callback_failed:
returnCode = DjiLiveview_UnregEncoderCallback();
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR( "unreg encoder callback failed, ret: 0x%08llX", returnCode);
}
init_failed:
returnCode = DjiLiveview_Deinit();
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR( "deinit liveview failed, ret: 0x%08llX", returnCode);
}
outFileH264.close();
outFileYUV.close();
#ifdef OPEN_CV_INSTALLED
osalHandler->TaskDestroy(s_procThreadHandle);
osalHandler->MutexDestroy(s_imageQueueMutexHandle);
osalHandler->MutexDestroy(s_metaQueueMutexHandle);
while(!s_imageQueue.empty()){
s_imageQueue.pop();
}
while(!s_metaQueue.empty()) {
s_metaQueue.pop();
}
#endif
}
static std::string getCurrentTimestamp() {
std::time_t now = std::time(nullptr);
std::tm* now_tm = std::localtime(&now);
std::ostringstream oss;
oss << (now_tm->tm_year + 1900)
<< (now_tm->tm_mon + 1)
<< now_tm->tm_mday
<< now_tm->tm_hour
<< now_tm->tm_min;
return oss.str();
}
static void outH264Tofile(const uint8_t *buf, int32_t len) {
if (!outFileH264) {
USER_LOG_ERROR( "output.h264 is not open");
return;
}
outFileH264.write(reinterpret_cast<const char *>(buf), len);
}
static void outYUVTofile(const uint8_t *buf, int32_t len) {
if (!outFileYUV) {
USER_LOG_ERROR( "outyuv.h264 is not open");
return;
}
outFileYUV.write(reinterpret_cast<const char *>(buf), len);
}
static void DjiLiveview_RcvImageCallback(E_DjiLiveViewCameraPosition position, const uint8_t *buf, uint32_t len, T_DjiLiveviewImageInfo imageInfo)
{
T_DjiReturnCode DjiStat;
uint32_t OutPutLen;
std::vector<T_DjiLiveViewBoundingBox> bounding_boxes;
USER_LOG_INFO("catch image frame data, image type = %d height = %d, width = %d, frameId = %d, bufferLen= %d",
imageInfo.pixFmt ,imageInfo.height, imageInfo.width, imageInfo.frameId, len);
T_DjiLiveViewStandardMetaData * metaData = nullptr;
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
#ifdef OPEN_CV_INSTALLED
cv::Mat rgb_image( imageInfo.height, imageInfo.width, CV_8UC3, const_cast<uint8_t*>(buf));
cv::Mat rgb_image_copy = rgb_image.clone();
osalHandler->MutexLock(s_imageQueueMutexHandle);
while (s_imageQueue.size() > 30) {
USER_LOG_WARN("The image queue is full. Drop this strike.");
s_imageQueue.pop();
}
s_imageQueue.push(rgb_image_copy);
osalHandler->MutexUnlock(s_imageQueueMutexHandle);
osalHandler->MutexLock(s_metaQueueMutexHandle);
if (!s_metaQueue.empty()) {
metaData = s_metaQueue.front();
s_metaQueue.pop();
}
osalHandler->MutexUnlock(s_metaQueueMutexHandle);
DjiLiveview_EncodeAFrameToH264(buf, len, imageInfo, metaData);
if(metaData != nullptr) free(metaData);
#else
size_t size = sizeof(T_DjiLiveViewStandardMetaData) + 3 * sizeof(T_DjiLiveViewBoundingBox);
metaData = (T_DjiLiveViewStandardMetaData *)malloc(size);
if (!metaData) {
fprintf(stderr, "Failed to allocate memory\n");
return ;
}
// 初始化 boxCount
metaData->boxCount = 4;
// 初始化每个 T_DjiLiveViewBoundingBox
for (int i = 0; i < 4; i++) {
metaData->boxData[i].id = i;
metaData->boxData[i].type = i;
metaData->boxData[i].state = 1;
metaData->boxData[i].box.cx = (i+1)*1000;
metaData->boxData[i].box.cy = (i+1)*1000;
metaData->boxData[i].box.w = 1000;
metaData->boxData[i].box.h = 1000;
metaData->boxData[i].box.distance = 0;
}
// 打印结果
DjiLiveview_SendAiMetaToPilot(metaData);
DjiLiveview_EncodeAFrameToH264(buf, len,imageInfo, metaData);
#endif
}
static void DjiLiveview_EncoderUseCallback(const uint8_t *buf, uint32_t len)
{
T_DjiReturnCode returnCode;
outH264Tofile(buf, len);
returnCode = DjiPayloadCamera_SendVideoStream(buf, len);
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("failed to send video to pilot, ret: 0x%08llX", returnCode);
}
}
static void* DjiLiveview_ObjectDetectionThread(void *arg) {
T_DjiReturnCode DjiStat;
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
while(1) {
#ifdef OPEN_CV_INSTALLED
osalHandler->MutexLock(s_imageQueueMutexHandle);
if (s_imageQueue.empty()) {
osalHandler->MutexUnlock(s_imageQueueMutexHandle);
continue;
}
cv::Mat rgb_image = s_imageQueue.front();
s_imageQueue.pop();
osalHandler->MutexUnlock(s_imageQueueMutexHandle);
cv::Mat bgr_image;
cv::cvtColor(rgb_image, bgr_image, cv::COLOR_RGB2BGR);
std::shared_ptr<cv::Mat> image_ptr = std::make_shared<cv::Mat>(bgr_image);
std::vector<T_DjiLiveViewBoundingBox> bounding_boxes;
processor.Process(image_ptr, bounding_boxes);
T_DjiLiveViewStandardMetaData *metaData = (T_DjiLiveViewStandardMetaData *)malloc(
sizeof(T_DjiLiveViewStandardMetaData) + bounding_boxes.size() * sizeof(T_DjiLiveViewBoundingBox));
metaData->boxCount = bounding_boxes.size();
for (int i = 0; i < bounding_boxes.size(); i++) {
metaData->boxData[i] = bounding_boxes[i];
}
DjiLiveview_SendAiMetaToPilot(metaData);
osalHandler->MutexLock(s_metaQueueMutexHandle);
s_metaQueue.push(metaData);
osalHandler->MutexUnlock(s_metaQueueMutexHandle);
#else
break;
#endif
}
return NULL;
}

View File

@ -0,0 +1,41 @@
/**
********************************************************************
*
* @copyright (c) 2023 DJI. All rights reserved.
*
* All information contained herein is, and remains, the property of DJI.
* The intellectual and technical concepts contained herein are proprietary
* to DJI and may be covered by U.S. and foreign patents, patents in process,
* and protected by trade secret or copyright law. Dissemination of this
* information, including but not limited to data and other proprietary
* material(s) incorporated within the information, in any form, is strictly
* prohibited without the express written consent of DJI.
*
* If you receive this source code without DJIs authorization, you may not
* further disseminate the information, and you must immediately remove the
* source code and notify DJI of its removal. DJI reserves the right to pursue
* legal actions against you for any loss(es) or damage(s) caused by your
* failure to do so.
*
*********************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef TEST_LIVEVIEW_CODEC_H
#define TEST_LIVEVIEW_CODEC_H
#include "dji_liveview.h"
#ifdef __cplusplus
extern "C" {
#endif
/* Exported constants --------------------------------------------------------*/
void DjiUser_RunCameraAiDetectionSample(void);
void DjiUser_RunOpenArSample(void);
#ifdef __cplusplus
}
#endif
#endif // TEST_LIVEVIEW_CODEC_H
/************************ (C) COPYRIGHT DJI Innovations *******END OF FILE******/

View File

@ -0,0 +1,140 @@
/**
********************************************************************
*
* @copyright (c) 2023 DJI. All rights reserved.
*
* All information contained herein is, and remains, the property of DJI.
* The intellectual and technical concepts contained herein are proprietary
* to DJI and may be covered by U.S. and foreign patents, patents in process,
* and protected by trade secret or copyright law. Dissemination of this
* information, including but not limited to data and other proprietary
* material(s) incorporated within the information, in any form, is strictly
* prohibited without the express written consent of DJI.
*
* If you receive this source code without DJIs authorization, you may not
* further disseminate the information, and you must immediately remove the
* source code and notify DJI of its removal. DJI reserves the right to pursue
* legal actions against you for any loss(es) or damage(s) caused by your
* failure to do so.
*
*********************************************************************
*/
#ifdef OPEN_CV_INSTALLED
#include "image_processor_yolovfastest.hpp"
/* Includes ------------------------------------------------------------------*/
#include <sys/time.h>
#include <dji_logger.h>
#include <fstream>
#include <iostream>
#include <opencv2/dnn.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include <sstream>
#include <utils/util_misc.h>
using namespace cv;
using namespace dnn;
using namespace std;
int32_t ImageProcessorYolovFastest::Init() {
memset(cur_file_dir_path_, 0, kCurrentFilePathSizeMax);
memset(prototxt_file_dir_path_, 0, kFilePathSizeMax);
memset(weights_file_dir_path_, 0, kFilePathSizeMax);
if (DjiUserUtil_GetCurrentFileDirPath(__FILE__, sizeof(cur_file_dir_path_),
cur_file_dir_path_) != 0) {
USER_LOG_ERROR("get path failed");
return -1;
}
snprintf(prototxt_file_dir_path_, kFilePathSizeMax,
"%s/data/yolo-fastest-1.1_coco/yolo-fastest-1.1-xl.cfg",
cur_file_dir_path_);
snprintf(weights_file_dir_path_, kFilePathSizeMax,
"%s/data/yolo-fastest-1.1_coco/yolo-fastest-1.1-xl.weights",
cur_file_dir_path_);
USER_LOG_DEBUG("%s, %s", prototxt_file_dir_path_, weights_file_dir_path_);
net_ = readNetFromDarknet(prototxt_file_dir_path_, weights_file_dir_path_);
if (net_.empty()) {
USER_LOG_ERROR("Failed to load network");
return -1;
}
return 0;
}
void ImageProcessorYolovFastest::post_process(cv::Mat& frame, const std::vector<cv::Mat>& outs, std::vector<T_DjiLiveViewBoundingBox>& bounding_boxes) {
std::vector<int> class_ids;
std::vector<float> confidences;
std::vector<cv::Rect> boxes;
for (size_t i = 0; i < outs.size(); ++i) {
float* data = (float*)outs[i].data;
for (int j = 0; j < outs[i].rows; ++j, data += outs[i].cols) {
cv::Mat scores = outs[i].row(j).colRange(5, outs[i].cols);
cv::Point classid_point;
double confidence;
cv::minMaxLoc(scores, 0, &confidence, 0, &classid_point);
if (confidence > 0.5) {
int cx = (int)(data[0] * frame.cols);
int cy = (int)(data[1] * frame.rows);
int w = (int)(data[2] * frame.cols);
int h = (int)(data[3] * frame.rows);
int left = cx - (w >> 1);
int top = cy - (h >> 1);
class_ids.push_back(classid_point.x);
confidences.push_back((float)confidence);
boxes.push_back(cv::Rect(left, top, w, h));
}
}
}
std::vector<int> indices;
cv::dnn::NMSBoxes(boxes, confidences, 0.5, 0.4, indices);
for (size_t i = 0; i < indices.size(); ++i) {
int idx = indices[i];
cv::Rect box = boxes[idx];
T_DjiLiveViewBoundingBox bounding_box;
bounding_box.id = i;
bounding_box.type = class_ids[idx];
bounding_box.state = 1;
bounding_box.box.cx = (uint16_t)((box.x + box.width / 2) * 10000 / frame.cols);
bounding_box.box.cy = (uint16_t)((box.y + box.height / 2) * 10000 / frame.rows);
bounding_box.box.w = (uint16_t)(box.width * 10000 / frame.cols);
bounding_box.box.h = (uint16_t)(box.height * 10000 / frame.rows);
bounding_box.box.distance = 0;
bounding_boxes.push_back(bounding_box);
std::cout << "Bounding Box " << i << ": "
<< "Class ID = " << class_ids[idx] << ", "
<< "Confidence = " << confidences[idx] << ", "
<< "Box = [" << box.x << ", " << box.y << ", " << box.width << ", " << box.height << "]"
<< std::endl;
cv::rectangle(frame, box, cv::Scalar(0, 255, 0), 2);
std::string label = cv::format("ID: %d Conf: %.2f", class_ids[idx], confidences[idx]);
cv::putText(frame, label, cv::Point(box.x, box.y - 10), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 255, 0), 2);
}
}
void ImageProcessorYolovFastest::Process(const std::shared_ptr<Image>& image, std::vector<T_DjiLiveViewBoundingBox>& bounding_boxes) {
auto detect = [&](cv::Mat& frame, std::vector<cv::Mat>& outs) {
cv::Mat blob;
cv::dnn::blobFromImage(frame, blob, 1 / 255.0, cv::Size(320, 320), cv::Scalar(0, 0, 0), true, false);
net_.setInput(blob);
net_.forward(outs, net_.getUnconnectedOutLayersNames());
};
cv::Mat frame = *image;
std::vector<cv::Mat> outs;
detect(frame, outs);
post_process(frame, outs, bounding_boxes);
}
#endif

View File

@ -0,0 +1,57 @@
/**
********************************************************************
*
* @copyright (c) 2023 DJI. All rights reserved.
*
* All information contained herein is, and remains, the property of DJI.
* The intellectual and technical concepts contained herein are proprietary
* to DJI and may be covered by U.S. and foreign patents, patents in process,
* and protected by trade secret or copyright law. Dissemination of this
* information, including but not limited to data and other proprietary
* material(s) incorporated within the information, in any form, is strictly
* prohibited without the express written consent of DJI.
*
* If you receive this source code without DJIs authorization, you may not
* further disseminate the information, and you must immediately remove the
* source code and notify DJI of its removal. DJI reserves the right to pursue
* legal actions against you for any loss(es) or damage(s) caused by your
* failure to do so.
*
*********************************************************************
*/
#ifndef __IMAGE_PROCESSOR_DIAPLAY_H__
#define __IMAGE_PROCESSOR_DIAPLAY_H__
#ifdef OPEN_CV_INSTALLED
#include <memory>
#include "opencv2/opencv.hpp"
#include <dji_liveview.h>
class ImageProcessorYolovFastest {
public:
ImageProcessorYolovFastest(const std::string& name) : show_name_(name) {}
~ImageProcessorYolovFastest() {}
int32_t Init();
using Image = cv::Mat;
void Process(const std::shared_ptr<Image>& image, std::vector<T_DjiLiveViewBoundingBox>& bounding_boxes);
std::vector<T_DjiLiveViewBoundingBox> Process(const std::shared_ptr<Image>& image);
private:
std::string show_name_;
enum {
kFilePathSizeMax = 256,
kCurrentFilePathSizeMax = 128,
};
cv::dnn::Net net_;
char cur_file_dir_path_[kCurrentFilePathSizeMax];
char prototxt_file_dir_path_[kFilePathSizeMax];
char weights_file_dir_path_[kFilePathSizeMax];
void post_process(cv::Mat& frame, const std::vector<cv::Mat>& outs, std::vector<T_DjiLiveViewBoundingBox>& bounding_boxes);
};
#endif
#endif
// #endif

View File

View File

@ -0,0 +1,272 @@
/**
********************************************************************
* @file test_lidar_entry.cpp
* @brief
*
* @copyright (c) 2021 DJI. All rights reserved.
*
* All information contained herein is, and remains, the property of DJI.
* The intellectual and technical concepts contained herein are proprietary
* to DJI and may be covered by U.S. and foreign patents, patents in process,
* and protected by trade secret or copyright law. Dissemination of this
* information, including but not limited to data and other proprietary
* material(s) incorporated within the information, in any form, is strictly
* prohibited without the express written consent of DJI.
*
* If you receive this source code without DJIs authorization, you may not
* further disseminate the information, and you must immediately remove the
* source code and notify DJI of its removal. DJI reserves the right to pursue
* legal actions against you for any loss(es) or damage(s) caused by your
* failure to do so.
*
*********************************************************************
*/
/* Includes ------------------------------------------------------------------*/
#include "test_lidar_entry.hpp"
#include <dirent.h>
#include "dji_logger.h"
#include <iostream>
#include <fstream>
#include <string>
#include <ctime>
#include <mutex>
#include <fcntl.h>
#include <unistd.h>
#include <mutex>
#include <thread>
#include <iomanip>
#include <sstream>
#include <sys/stat.h>
#include <sys/types.h>
#include <queue>
/* Private constants ---------------------------------------------------------*/
#define PCD_FILE_DEFAULT_LENGTH (512)
#define FRAME_BUFFER_LENGTH (1024 * 1024)
#define SUBSCRIBE_DATA_TIME_MS (1000 * 10)
#define USER_PERCEPTION_LIRDAR_TASK_STACK_SIZE (2042)
#define PCD_FILE_PATH "./DJI_cloud_data"
/* Private types -------------------------------------------------------------*/
/* Private values -------------------------------------------------------------*/
static int lastFrameCnt = 0;
static std::queue<T_DjiLidarFrame *> lidarFrameQueue;
static T_DjiMutexHandle queueMutex;
static T_DjiSemaHandle dataSemaphore;
static bool stopProcessing = false;
static T_DjiSemaHandle taskExitSema;
/* Private functions declaration ---------------------------------------------*/
static void DjiTest_PerceptionLidarCallback(uint8_t *recvBuffer, uint32_t bufferLen);
static std::string DjiTest_getCurrentTimestamp();
static void DjiTest_WriteLidarFrameToBinaryPcdFile(const T_DjiLidarFrame *frame);
static void* DjiTest_ProcessLidarDataTask(void* arg);
/* Exported functions definition ---------------------------------------------*/
void DjiUser_RunLidarDataSubscriptionSample(void) {
int subscriptionDuration = 10;
lastFrameCnt = 0;
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
osalHandler->MutexCreate(&queueMutex);
osalHandler->SemaphoreCreate(0, &dataSemaphore);
osalHandler->SemaphoreCreate(0, &taskExitSema);
std::cout << "Please ensure that there is enough storage space for the PCD files." << std::endl;
T_DjiTaskHandle processingThread;
osalHandler->TaskCreate("LidarProcessingThread", DjiTest_ProcessLidarDataTask, USER_PERCEPTION_LIRDAR_TASK_STACK_SIZE, nullptr, &processingThread);
start:
T_DjiReturnCode returnCode;
returnCode = DjiPerception_Init();
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
std::cout << "DjiPerception Init failed" << std::endl;
return;
}
std::cout << "start subscribe Lidar data from aircraft" << std::endl;
returnCode = DjiPerception_SubscribeLidarData(DjiTest_PerceptionLidarCallback);
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
std::cout << "Request to subscribe Lidar data failed" << std::endl;
goto subscribeFailed;
}
osalHandler->TaskSleepMs(subscriptionDuration * 1000);
std::cout << "unsubscribe Lidar data " << std::endl;
subscribeFailed:
returnCode = DjiPerception_UnsubscribeLidarData();
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
std::cout << "Request to unsubscribe Lidar data failed" << std::endl;
}
returnCode = DjiPerception_Deinit();
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
std::cout << "DjiPerception DeInit failed" << std::endl;
return;
}
std::cout << "unsubscribe Lidar data success" << std::endl;
osalHandler->MutexLock(queueMutex);
stopProcessing = true;
osalHandler->MutexUnlock(queueMutex);
osalHandler->SemaphorePost(dataSemaphore);
osalHandler->SemaphoreWait(taskExitSema);
osalHandler->TaskDestroy(processingThread);
osalHandler->MutexDestroy(queueMutex);
osalHandler->SemaphoreDestroy(dataSemaphore);
osalHandler->SemaphoreDestroy(taskExitSema);
}
/* Private functions definition-----------------------------------------------*/
static void DjiTest_PerceptionLidarCallback(uint8_t *LidarFrame, uint32_t bufferLen) {
if (bufferLen != sizeof(T_DjiLidarFrame)) {
std::cout << "usb recv Lidar length wrong, length = " << bufferLen << std::endl;
return;
}
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
T_DjiLidarFrame * curFrame = (T_DjiLidarFrame *)osalHandler->Malloc(bufferLen);
memcpy(curFrame, LidarFrame, bufferLen);
osalHandler->MutexLock(queueMutex);
lidarFrameQueue.push(curFrame);
osalHandler->MutexUnlock(queueMutex);
osalHandler->SemaphorePost(dataSemaphore);
}
std::string DjiTest_getCurrentTimestamp() {
auto now = std::chrono::system_clock::now();
std::time_t nowTimeT = std::chrono::system_clock::to_time_t(now);
auto milliseconds = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()) % 1000;
std::tm nowTm = *std::localtime(&nowTimeT);
std::ostringstream oss;
oss << std::put_time(&nowTm, "%Y%m%d_%H%M%S");
oss << std::setfill('0') << std::setw(3) << milliseconds.count();
return oss.str();
}
static void DjiTest_WriteLidarFrameToBinaryPcdFile(const T_DjiLidarFrame *frame) {
uint32_t totalPoints = 0;
size_t headerLen = 0;
size_t pointDataSize = 0;
size_t bufferSize = 0;
char *buffer = NULL;
size_t bufferPos = 0;
int fd = 0;
std::string directory = PCD_FILE_PATH;
std::string filename = directory + "/DJI_cloud_data_" + DjiTest_getCurrentTimestamp() + ".pcd";
char header[PCD_FILE_DEFAULT_LENGTH];
if (mkdir(directory.c_str(), 0755) != 0 && errno != EEXIST) {
fprintf(stderr, "Error creating directory: %s\n", strerror(errno));
return;
}
for (uint16_t i = 0; i < frame->pkgNum; ++i) {
totalPoints += frame->pkgs[i].header.dotNum;
}
snprintf(header, sizeof(header),
"# .PCD v0.7 - Point Cloud Data file format\n"
"VERSION 0.7\n"
"FIELDS x y z intensity label\n"
"SIZE 4 4 4 1 1\n"
"TYPE F F F U U\n"
"COUNT 1 1 1 1 1\n"
"WIDTH %u\n"
"HEIGHT 1\n"
"VIEWPOINT 0 0 0 1 0 0 0\n"
"POINTS %u\n"
"DATA binary\n",
totalPoints, totalPoints);
// Calculate the size of the buffer needed
headerLen = strlen(header);
pointDataSize = totalPoints * (sizeof(float) * 3 + sizeof(uint8_t) * 2);
bufferSize = headerLen + pointDataSize;
buffer = (char *)malloc(bufferSize);
if (buffer == NULL) {
fprintf(stderr, "Error allocating memory for buffer\n");
return;
}
memcpy(buffer + bufferPos, header, headerLen);
bufferPos += headerLen;
for (uint16_t i = 0; i < frame->pkgNum; ++i) {
const T_DjiPerceptionLidarDecodePkg *pkg = &frame->pkgs[i];
for (uint16_t j = 0; j < pkg->header.dotNum; ++j) {
const T_DJIPerceptionLidarPoint *point = &pkg->points[j];
memcpy(buffer + bufferPos, &point->x, sizeof(float));
bufferPos += sizeof(float);
memcpy(buffer + bufferPos, &point->y, sizeof(float));
bufferPos += sizeof(float);
memcpy(buffer + bufferPos, &point->z, sizeof(float));
bufferPos += sizeof(float);
memcpy(buffer + bufferPos, &point->intensity, sizeof(uint8_t));
bufferPos += sizeof(uint8_t);
memcpy(buffer + bufferPos, &point->label, sizeof(uint8_t));
bufferPos += sizeof(uint8_t);
}
}
fd = open(filename.c_str(), O_WRONLY | O_APPEND | O_CREAT, 0644);
if (fd == -1) {
fprintf(stderr, "Error opening file for writing\n");
free(buffer);
return;
}
if (write(fd, buffer, bufferSize) == -1) {
fprintf(stderr, "Error writing buffer to file\n");
}
close(fd);
free(buffer);
}
static void* DjiTest_ProcessLidarDataTask(void* arg) {
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
while(true) {
osalHandler->SemaphoreWait(dataSemaphore);
osalHandler->MutexLock(queueMutex);
bool shouldStop = stopProcessing && lidarFrameQueue.empty();
if(shouldStop) {
osalHandler->MutexUnlock(queueMutex);
break;
}
T_DjiLidarFrame *lidarFrame = lidarFrameQueue.front();
lidarFrameQueue.pop();
osalHandler->MutexUnlock(queueMutex);
DjiTest_WriteLidarFrameToBinaryPcdFile(lidarFrame);
int curFrameCnt = lidarFrame->frameCnt;
std::cout << "Lidar data : curFrameCnt=" << curFrameCnt << std::endl;
if(lastFrameCnt != 0 && (curFrameCnt - lastFrameCnt) > 1) {
std::cout << "The number of lost packets during transmission is: " << curFrameCnt - lastFrameCnt - 1 << std::endl;
}
lastFrameCnt = curFrameCnt;
osalHandler->Free(lidarFrame);
}
osalHandler->SemaphorePost(taskExitSema);
return nullptr;
}
/****************** (C) COPYRIGHT DJI Innovations *****END OF FILE****/

View File

@ -0,0 +1,49 @@
/**
********************************************************************
* @file test_lidar_entry.hpp
* @brief This is the header file for "test_lidar_entry.cpp", defining the structure and
* (exported) function prototypes.
*
* @copyright (c) 2021 DJI. All rights reserved.
*
* All information contained herein is, and remains, the property of DJI.
* The intellectual and technical concepts contained herein are proprietary
* to DJI and may be covered by U.S. and foreign patents, patents in process,
* and protected by trade secret or copyright law. Dissemination of this
* information, including but not limited to data and other proprietary
* material(s) incorporated within the information, in any form, is strictly
* prohibited without the express written consent of DJI.
*
* If you receive this source code without DJIs authorization, you may not
* further disseminate the information, and you must immediately remove the
* source code and notify DJI of its removal. DJI reserves the right to pursue
* legal actions against you for any loss(es) or damage(s) caused by your
* failure to do so.
*
*********************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef TEST_LIDAR_ENTRY_H
#define TEST_LIDAR_ENTRY_H
/* Includes ------------------------------------------------------------------*/
#include "dji_perception.h"
#ifdef __cplusplus
extern "C" {
#endif
/* Exported constants --------------------------------------------------------*/
/* Exported types ------------------------------------------------------------*/
/* Exported functions --------------------------------------------------------*/
void DjiUser_RunLidarDataSubscriptionSample(void);
#ifdef __cplusplus
}
#endif
#endif // TEST_LIDAR_ENTRY_H
/************************ (C) COPYRIGHT DJI Innovations *******END OF FILE******/

View File

@ -0,0 +1,190 @@
/**
********************************************************************
* @file test_radar_entry.cpp
* @brief
*
* @copyright (c) 2021 DJI. All rights reserved.
*
* All information contained herein is, and remains, the property of DJI.
* The intellectual and technical concepts contained herein are proprietary
* to DJI and may be covered by U.S. and foreign patents, patents in process,
* and protected by trade secret or copyright law. Dissemination of this
* information, including but not limited to data and other proprietary
* material(s) incorporated within the information, in any form, is strictly
* prohibited without the express written consent of DJI.
*
* If you receive this source code without DJIs authorization, you may not
* further disseminate the information, and you must immediately remove the
* source code and notify DJI of its removal. DJI reserves the right to pursue
* legal actions against you for any loss(es) or damage(s) caused by your
* failure to do so.
*
*********************************************************************
*/
/* Includes ------------------------------------------------------------------*/
#include "test_radar_entry.hpp"
#include "dji_logger.h"
#include <iostream>
#include <ctime>
#include <chrono>
/* Private constants ---------------------------------------------------------*/
/* Private types -------------------------------------------------------------*/
/* Private values -------------------------------------------------------------*/
/* Private functions declaration ---------------------------------------------*/
static void DjiTest_PerceptionRadarCallback(E_DjiPerceptionRadarPosition radarPosition,
uint8_t *radarDataBuffer, uint32_t bufferLen);
static float parseVelocity(uint16_t velocity);
static float parseBeamAngle(uint16_t beamAngle);
/* Exported functions definition ---------------------------------------------*/
void DjiUser_RunRadarDataSubscriptionSample(void) {
int subscriptionDuration = 10;
T_DjiReturnCode returnCode;
char inputChar;
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
E_DjiPerceptionRadarPosition curPosition = MAX_RADAR_NUM;
returnCode = DjiPerception_Init();
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
USER_LOG_ERROR("DjiPerception Init failed");
return;
}
inputAgain:
std::cout
<< "| Available commands: |"
<<
std::endl;
std::cout
<< "| [d] Subscribe to downward millimeter wave radar data. |"
<<
std::endl;
std::cout
<< "| [u] Subscribe to upward millimeter wave radar data. |"
<<
std::endl;
std::cout
<< "| [f] Subscribe to forward millimeter wave radar data. |"
<<
std::endl;
std::cout
<< "| [b] Subscribe to backward millimeter wave radar data. |"
<<
std::endl;
std::cout
<< "| [l] Subscribe to leftward millimeter wave radar data. |"
<<
std::endl;
std::cout
<< "| [r] Subscribe to rightward millimeter wave radar data. |"
<<
std::endl;
std::cout
<< "| [q] quit |"
<<
std::endl;
std::cin >> inputChar;
switch (inputChar) {
case 'd':
USER_LOG_INFO("Subscribe to downward millimeter wave radar data");
curPosition = RADAR_POSITION_DOWN;
break;
case 'u':
USER_LOG_INFO("Subscribe to upward millimeter wave radar data.");
curPosition = RADAR_POSITION_UP;
break;
case 'f':
USER_LOG_INFO("Subscribe to forward millimeter wave radar data.");
curPosition = RADAR_POSITION_FRONT;
break;
case 'b':
USER_LOG_INFO("Subscribe to backward millimeter wave radar data.");
curPosition = RADAR_POSITION_BACK;
break;
case 'l':
USER_LOG_INFO("Subscribe to leftward millimeter wave radar data.");
curPosition = RADAR_POSITION_LEFT;
break;
case 'r':
USER_LOG_INFO("Subscribe to rightward millimeter wave radar data.");
curPosition = RADAR_POSITION_RIGHT;
break;
case 'q':
goto endOfSample;
default:
goto inputAgain;
}
returnCode = DjiPerception_SubscribeRadarData(curPosition, DjiTest_PerceptionRadarCallback);
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
USER_LOG_ERROR("Request to subscribe radar data failed");
goto subscribeFailed;
}
osalHandler->TaskSleepMs(subscriptionDuration * 1000);
subscribeFailed:
returnCode = DjiPerception_UnsubscribeRadarData(curPosition);
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
USER_LOG_ERROR("Request to unsubscribe Radar data failed");
}
goto inputAgain;
endOfSample:
returnCode = DjiPerception_Deinit();
if (returnCode != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
USER_LOG_ERROR("DjiPerception DeInit failed");
return;
}
}
/* Private functions definition-----------------------------------------------*/
static void DjiTest_PerceptionRadarCallback(E_DjiPerceptionRadarPosition radarPosition,
uint8_t *radarDataBuffer, uint32_t bufferLen) {
T_DjiRadarDataFrame* radarData;
radarData = (T_DjiRadarDataFrame*)radarDataBuffer;
if (radarDataBuffer == nullptr || bufferLen == 0) {
USER_LOG_ERROR("Invalid radar data: buffer=%p len=%u", radarDataBuffer, bufferLen);
return;
}
USER_LOG_INFO("RadarData[pos:%d][len:%u][units:%u][pack:%u/%u]",
radarPosition,
bufferLen,
static_cast<unsigned>(radarData->headInfo.dataLen),
static_cast<unsigned>(radarData->headInfo.curPack),
static_cast<unsigned>(radarData->headInfo.packNum));
for (uint32_t i = 0; i < radarData->headInfo.dataLen; ++i) {
const T_DjiRadarCloudUnit* unit = &radarData->data[i];
float azimuth = unit->azimuth / 1000.0f - 2*3.14;
float elevation = unit->elevation / 1000.0f - 2*3.14;
float radius = unit->radius / 100.0f;
float energy = unit->ene / 100.0f;
float velocity = parseVelocity(unit->base_info.velocity);
uint8_t snr = unit->base_info.snr;
float beamAngle = parseBeamAngle(unit->base_info.beamAngle);
USER_LOG_INFO(
"[Unit%d] Azimuth=%.3f(rad), Elevation=%.3f(rad), Radius=%.2f(m), Energy=%.2f, "
"Velocity=%.2f(m/s), SNR=%u(dB), BeamAngle=%.2f(deg)",
i, azimuth, elevation, radius, energy, velocity, snr, beamAngle);
}
}
static float parseVelocity(uint16_t velocity) {
return (velocity - 32767) / 100.0f;
}
static float parseBeamAngle(uint16_t beamAngle) {
if (beamAngle <= 450) {
return beamAngle / 10.0f;
} else {
return (beamAngle / 10.0f) - 90.0f;
}
}
/****************** (C) COPYRIGHT DJI Innovations *****END OF FILE****/

View File

@ -0,0 +1,49 @@
/**
********************************************************************
* @file test_radar_entry.hpp
* @brief This is the header file for "test_radar_entry.cpp", defining the structure and
* (exported) function prototypes.
*
* @copyright (c) 2021 DJI. All rights reserved.
*
* All information contained herein is, and remains, the property of DJI.
* The intellectual and technical concepts contained herein are proprietary
* to DJI and may be covered by U.S. and foreign patents, patents in process,
* and protected by trade secret or copyright law. Dissemination of this
* information, including but not limited to data and other proprietary
* material(s) incorporated within the information, in any form, is strictly
* prohibited without the express written consent of DJI.
*
* If you receive this source code without DJIs authorization, you may not
* further disseminate the information, and you must immediately remove the
* source code and notify DJI of its removal. DJI reserves the right to pursue
* legal actions against you for any loss(es) or damage(s) caused by your
* failure to do so.
*
*********************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef TEST_RADAR_ENTRY_H
#define TEST_RADAR_ENTRY_H
/* Includes ------------------------------------------------------------------*/
#include "dji_perception.h"
#ifdef __cplusplus
extern "C" {
#endif
/* Exported constants --------------------------------------------------------*/
/* Exported types ------------------------------------------------------------*/
/* Exported functions --------------------------------------------------------*/
void DjiUser_RunRadarDataSubscriptionSample(void);
#ifdef __cplusplus
}
#endif
#endif // TEST_RADAR_ENTRY_H
/************************ (C) COPYRIGHT DJI Innovations *******END OF FILE******/

View File

@ -0,0 +1,461 @@
/**
********************************************************************
* @file test_widget_manager.cpp
* @brief
*
* @copyright (c) 2021 DJI. All rights reserved.
*
* All information contained herein is, and remains, the property of DJI.
* The intellectual and technical concepts contained herein are proprietary
* to DJI and may be covered by U.S. and foreign patents, patents in process,
* and protected by trade secret or copyright law. Dissemination of this
* information, including but not limited to data and other proprietary
* material(s) incorporated within the information, in any form, is strictly
* prohibited without the express written consent of DJI.
*
* If you receive this source code without DJIs authorization, you may not
* further disseminate the information, and you must immediately remove the
* source code and notify DJI of its removal. DJI reserves the right to pursue
* legal actions against you for any loss(es) or damage(s) caused by your
* failure to do so.
*
*********************************************************************
*/
/* Includes ------------------------------------------------------------------*/
#include <iostream>
#include "test_widget_manager.hpp"
#include "dji_widget_manager.h"
#include <dji_logger.h>
#include <dji_platform.h>
#include "unistd.h"
#include "utils/util_misc.h"
#include <fcntl.h>
#include <sys/stat.h>
#include <unistd.h>
/* Private constants ---------------------------------------------------------*/
/* Private types -------------------------------------------------------------*/
/* Private values -------------------------------------------------------------*/
T_DjiWidgetManagerFileList s_fileList;
static FILE* s_widgetFileFd;
/* Private functions declaration ---------------------------------------------*/
static void DjiTestWidgetManager_RecvWidgetStatesCallback(E_DjiMountPosition position, T_DjiWidgetStates *statesData, uint8_t widgetNum);
static void DjiTestWidgetManager_RecvSpeakerStatesCallback(E_DjiMountPosition position, T_DjiSpeakerWidgetStates *speakerStates);
static void DjiTestWidgetManager_RunSeachLightManagerSample(E_DjiMountPosition position);
static void DjiTestWidgetManager_RunSpeakerManagerSample(E_DjiMountPosition position);
static T_DjiReturnCode DjiWidgetManager_UsrDownloadCallback(T_DjiDownloadWidgetFileInfo packetInfo,
const uint8_t *data,
uint16_t dataLen);
/* Exported functions definition ---------------------------------------------*/
T_DjiReturnCode DjiTest_WidgetMannagerStart(void)
{
T_DjiReturnCode djiStat;
E_DjiMountPosition position;
uint8_t index;
uint32_t value;
T_DjiOsalHandler *osalHandler = DjiPlatform_GetOsalHandler();
T_DjiWidgetStates state;
int widget_type_input;
int inputPosition = 0;
int inputIndex = 0;
bool continueFlag = true;
int payloadType;
T_DjiWidgetManagerFileList fileList;
std::cout << "Please select the position where the payload needs to be managed." << std::endl;
std::cin >> inputPosition;
std::cout << "Please select load type (1: third party payload; 2: DJI searchlight; 3: DJI speaker) ";
std::cin >>payloadType;
if (inputPosition < 1 || inputPosition > 8) {
std::cerr << "Invalid input for position. Must be between 1 and 8." << std::endl;
return -1;
}
if (payloadType < 1 || payloadType > 3) {
std::cerr << "Invalid input for payload type. Must be between 1 and 3." << std::endl;
return -1;
}
position = static_cast<E_DjiMountPosition>(inputPosition);
djiStat = DjiWidgetManager_Init();
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test widget manager init error, stat = 0x%08llX", djiStat);
return djiStat;
}
djiStat = DjiWidgetManager_RegDownloadFileDataCallback(position, DjiWidgetManager_UsrDownloadCallback);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test regist download callback on position:%d error, stat = 0x%08llX", position, djiStat);
goto finish_management;
}
switch(payloadType){
case 1:
djiStat = DjiWidgetManager_WidgetDownloadFileList(position, &s_fileList);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test download payload widget file list on position:%d error, stat = 0x%08llX", position, djiStat);
goto finish_management;
}
for(int i = 0; i < s_fileList.totalCount; i++) {
djiStat = DjiWidgetManager_DownloadFileByIndex(position, i);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test download payload widget by index on position:%d error, stat = 0x%08llX", position, djiStat);
goto finish_management;
}
}
djiStat = DjiWidgetManager_SubscribePayloadWidgetStates(position, DjiTestWidgetManager_RecvWidgetStatesCallback);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test subscribe payload widget state on position:%d error, stat = 0x%08llX", position, djiStat);
goto finish_management;
}
break;
case 2:
djiStat = DjiWidgetManager_WidgetDownloadFileList(position, &s_fileList);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test download payload widget file list on position:%d error, stat = 0x%08llX", position, djiStat);
goto finish_management;
}
for(int i = 0; i < s_fileList.totalCount; i++) {
djiStat = DjiWidgetManager_DownloadFileByIndex(position, i);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test download payload widget by index on position:%d error, stat = 0x%08llX", position, djiStat);
goto finish_management;
}
}
DjiTestWidgetManager_RunSeachLightManagerSample(position);
goto finish_management;
case 3:
DjiTestWidgetManager_RunSpeakerManagerSample(position);
goto finish_management;
}
set_value:
std::cout << "If you need to change the widget state, enter 1 " << std::endl;
std::cin >> continueFlag;
if(continueFlag != 1) goto finish_management;
std::cout << "Please enter the index and type to be set and the value to be set to :" << std::endl;
std::cin >> inputIndex >> widget_type_input >> state.widgetValue;
if (widget_type_input >= 1 && widget_type_input <= 5)
{
state.widgetType = static_cast<E_DjiWidgetType>(widget_type_input);
state.widgetIndex = inputIndex;
}
else
{
std::cout << "Invalid widget type input." << std::endl;
goto finish_management;
}
djiStat = DjiWidgetManager_SetWidgetState(position, state);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test set payload widget state on position: error, stat = 0x%08llX", position, djiStat);
goto finish_management;
}
goto set_value;
finish_management:
djiStat = DjiWidgetManager_DeInit();
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test deinit widget manager error, stat = 0x%08llX", djiStat);
return djiStat;
}
if(s_fileList.fileListInfo != NULL) {
free(s_fileList.fileListInfo);
s_fileList.fileListInfo = NULL;
}
return djiStat;
}
/* Private functions definition-----------------------------------------------*/
static void DjiTestWidgetManager_RecvWidgetStatesCallback(E_DjiMountPosition position, T_DjiWidgetStates *statesData, uint8_t widgetNum)
{
printf("\033[32mrecv widget state form pos: %u\n", position);
for (int i = 0; i < widgetNum; i++)
{
printf("widget index: %u type: %u value: %u\n",
statesData[i].widgetIndex,
static_cast<unsigned int>(statesData[i].widgetType),
statesData[i].widgetValue);
}
printf("\033[0m");
}
static void DjiTestWidgetManager_RecvSpeakerStatesCallback(E_DjiMountPosition position, T_DjiSpeakerWidgetStates *speakerStates) {
printf("\033[0;34mrecv speaker state form pos: %u\n", position);
printf("speaker playMode = %d\n", speakerStates->playMode);
printf("speaker workMode = %d\n", speakerStates->workMode);
printf("speaker playVloume = %u\n", speakerStates->playVloume);
printf("speaker systemStates = %d\n", speakerStates->systemStates);
printf("speaker playFileUuid = %s\n", speakerStates->playFileUuid);
printf("speaker playFileName = %s\n", speakerStates->playFileName);
printf("speaker playQuality = %u\n", speakerStates->playFileName);
printf("speaker actualVolume = %u\n", speakerStates->actualVolume);
printf("speaker limitVolumeOnTheGround = %u\n", speakerStates->limitVolumeOnTheGround);
printf("\033[0m");
}
void DjiTestWidgetManager_RunSeachLightManagerSample(E_DjiMountPosition position) {
T_DjiReturnCode djiStat;
T_DjiWidgetStates state;
djiStat = DjiWidgetManager_SubscribePayloadWidgetStates(position, DjiTestWidgetManager_RecvWidgetStatesCallback);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test subscribe search light widget state on position:%d error, stat = 0x%08llX", position, djiStat);
// return ;
}
std::cout<<"step 1: Turn on the searchlight."<<std::endl;
state = {DJI_WIDGET_TYPE_SWITCH, 3, 1};
djiStat = DjiWidgetManager_SetWidgetState(position, state);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test turn on search light widget on position :%d error, stat = 0x%08llX", position, djiStat);
return ;
}
sleep(5);
std::cout<<"step 2: Setting the searchlight to burst mode"<<std::endl;
state = {DJI_WIDGET_TYPE_SWITCH, 0, 1};
djiStat = DjiWidgetManager_SetWidgetState(position, state);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test setting the searchlight to burst mode on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
sleep(5);
state = {DJI_WIDGET_TYPE_SWITCH, 0, 0};
djiStat = DjiWidgetManager_SetWidgetState(position, state);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test setting up searchlight to roll out burst mode on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
sleep(5);
std::cout<<"step3: Switching the searchlight illumination mode"<<std::endl;
state = {DJI_WIDGET_TYPE_LIST, 0, 0};
djiStat = DjiWidgetManager_SetWidgetState(position, state);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test switching the searchlight illumination mode on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
sleep(5);
state = {DJI_WIDGET_TYPE_SWITCH, 2, 1};
djiStat = DjiWidgetManager_SetWidgetState(position, state);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test turn off the searchlight on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
sleep(5);
state = {DJI_WIDGET_TYPE_SWITCH, 2, 2};
djiStat = DjiWidgetManager_SetWidgetState(position, state);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test turn off the searchlight on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
sleep(5);
std::cout<<"step4: Turn off the searchlight."<<std::endl;
state = {DJI_WIDGET_TYPE_SWITCH, 3, 0};
djiStat = DjiWidgetManager_SetWidgetState(position, state);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test turn off the searchlight on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
djiStat = DjiWidgetManager_UnsubscribePayloadWidgetStates(position);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test unsubscribe search light widget state on position:%d error, stat = 0x%08llX", position, djiStat);
// return ;
}
}
static void DjiTestWidgetManager_RunSpeakerManagerSample(E_DjiMountPosition position) {
T_DjiReturnCode djiStat;
T_DjiSpeakerWidgetStatesParam param;
T_DjiSpeakerAudioFileInfo audioFileInfo;
char curFileDirPath[WIDGET_MANAGER_MAX_FILE_PATH_LEN];
char filePath[WIDGET_MANAGER_MAX_FILE_PATH_LEN];
djiStat = DjiUserUtil_GetCurrentFileDirPath(__FILE__, WIDGET_MANAGER_MAX_FILE_PATH_LEN, curFileDirPath);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS) {
USER_LOG_ERROR("Get file current path error, stat = 0x%08llX", djiStat);
return ;
}
snprintf(filePath, WIDGET_MANAGER_MAX_FILE_PATH_LEN, "%sdata/3K-5K.opus", curFileDirPath);
const char* fileName = "3K-5K.opus";
const char* uuid = "abcdef";
memset(&audioFileInfo, 0, sizeof(audioFileInfo));
audioFileInfo.fileType = DJI_SPEAKER_WIDGET_FILE_TYPE_OPUS;
memcpy(audioFileInfo.fileName, fileName, strlen(fileName));
memcpy(audioFileInfo.filePath, filePath, strlen(filePath));
memcpy(audioFileInfo.uuid, uuid, strlen(uuid));
audioFileInfo.fileBitrate = DJI_SPEAKER_WIDGET_AUDIO_DECODE_BITRATE_32000;
std::cout<<"step 1: subscribe widget state of speaker"<<std::endl;
djiStat = DjiWidgetManager_SubscribeSpeakerStates(position, DjiTestWidgetManager_RecvSpeakerStatesCallback);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test subscribe speaker widget state on position:%d error, stat = 0x%08llX", position, djiStat);
// return ;
}
std::cout<<"step 2: set playing param of speaker"<<std::endl;
param = {DJI_WIDGET_MGR_SPEAKER_PARAM_VOLUME, 20};
djiStat = DjiWidgetManager_SetSpeakertState(position, &param);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test setting the speaker volume on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
param = {DJI_WIDGET_MGR_SPEAKER_PARAM_PLAY_MODE, 1};
djiStat = DjiWidgetManager_SetSpeakertState(position, &param);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test set the speaker play mode on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
std::cout<<"step 3: send audio file data to speaker"<<std::endl;
djiStat = DjiWidgetManager_SendSpeakerAudioData(position, &audioFileInfo);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test send audio file to speaker on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
std::cout<<"step 4: play the audio file"<<std::endl;
param = {DJI_WIDGET_MGR_SPEAKER_PARAM_PALY_ACTION, 0};
djiStat = DjiWidgetManager_SetSpeakertState(position, &param);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test set the speaker to start play on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
sleep(10);
param = {DJI_WIDGET_MGR_SPEAKER_PARAM_PALY_ACTION, 1};
djiStat = DjiWidgetManager_SetSpeakertState(position, &param);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test set the speaker to stop play on position:%d error, stat = 0x%08llX", position, djiStat);
return ;
}
std::cout<<"step 5: stop subscribe speaker state"<<std::endl;
djiStat = DjiWidgetManager_UnsubscribeSpeakerStates(position);
if (djiStat != DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
USER_LOG_ERROR("Dji test unsubscribe speaker widget state on position:%d error, stat = 0x%08llX", position, djiStat);
// return ;
}
}
static T_DjiReturnCode DjiWidgetManager_UsrDownloadCallback(T_DjiDownloadWidgetFileInfo packetInfo,
const uint8_t *data,
uint16_t dataLen) {
std::string directory;
std::string fileName = s_fileList.fileListInfo[packetInfo.fileIndex].fileName;
std::string path;
directory = "widget_file_from_position_" + std::to_string(packetInfo.position) ;
path = directory + '/' + fileName;
size_t realLen;
if (mkdir(directory.c_str(), 0755) != 0 && errno != EEXIST) {
USER_LOG_ERROR("Error creating directory: %s\n", strerror(errno));
return DJI_ERROR_SYSTEM_MODULE_CODE_SYSTEM_ERROR;
}
switch (packetInfo.downloadFileEvent) {
case DJI_DOWNLOAD_FILE_EVENT_START: {
s_widgetFileFd = fopen(path.c_str(), "wb+");
if(s_widgetFileFd == NULL) {
USER_LOG_ERROR("Dji test Open widget file failed");
return DJI_ERROR_SYSTEM_MODULE_CODE_SYSTEM_ERROR;
}
realLen = fwrite(data, 1, dataLen, s_widgetFileFd);
if(realLen != dataLen) {
USER_LOG_ERROR("Dji test write widget data failed");
return DJI_ERROR_SYSTEM_MODULE_CODE_SYSTEM_ERROR;
}
break;
}
case DJI_DOWNLOAD_FILE_EVENT_TRANSFER: {
if(s_widgetFileFd != NULL) {
realLen = fwrite(data, 1, dataLen, s_widgetFileFd);
if(realLen != dataLen) {
USER_LOG_ERROR("Dji test write widget data failed");
return DJI_ERROR_SYSTEM_MODULE_CODE_SYSTEM_ERROR;
}
}
break;
}
case DJI_DOWNLOAD_FILE_EVENT_END: {
if(s_widgetFileFd != NULL) {
realLen = fwrite(data, 1, dataLen, s_widgetFileFd);
if(realLen != dataLen) {
USER_LOG_ERROR("Dji test write widget data failed");
return DJI_ERROR_SYSTEM_MODULE_CODE_SYSTEM_ERROR;
}
}
fclose(s_widgetFileFd);
s_widgetFileFd = NULL;
break;
}
case DJI_DOWNLOAD_FILE_EVENT_START_TRANSFER_END: {
s_widgetFileFd = fopen(path.c_str(), "wb+");
if(s_widgetFileFd == NULL) {
USER_LOG_ERROR("Dji test Open widget file failed");
return DJI_ERROR_SYSTEM_MODULE_CODE_SYSTEM_ERROR;
}
realLen = fwrite(data, 1, dataLen, s_widgetFileFd);
if(realLen != dataLen) {
USER_LOG_ERROR("Dji test write widget data failed");
return DJI_ERROR_SYSTEM_MODULE_CODE_SYSTEM_ERROR;
}
fclose(s_widgetFileFd);
s_widgetFileFd = NULL;
break;
}
default: {
USER_LOG_ERROR("Dji test unknown download event");
break;
}
}
return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/****************** (C) COPYRIGHT DJI Innovations *****END OF FILE****/

View File

@ -0,0 +1,51 @@
/**
********************************************************************
* @file test_widget_manager.hpp
* @brief This is the header file for "test_widget_manager.cpp", defining the structure and
* (exported) function prototypes.
*
* @copyright (c) 2021 DJI. All rights reserved.
*
* All information contained herein is, and remains, the property of DJI.
* The intellectual and technical concepts contained herein are proprietary
* to DJI and may be covered by U.S. and foreign patents, patents in process,
* and protected by trade secret or copyright law. Dissemination of this
* information, including but not limited to data and other proprietary
* material(s) incorporated within the information, in any form, is strictly
* prohibited without the express written consent of DJI.
*
* If you receive this source code without DJIs authorization, you may not
* further disseminate the information, and you must immediately remove the
* source code and notify DJI of its removal. DJI reserves the right to pursue
* legal actions against you for any loss(es) or damage(s) caused by your
* failure to do so.
*
*********************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef DJI_WIDGET_MANAGER_INTERNAL_H
#define DJI_WIDGET_MANAGER_INTERNAL_H
/* Includes ------------------------------------------------------------------*/
#include "dji_typedef.h"
// #include "dji_widget_manager.h"
#ifdef __cplusplus
extern "C" {
#endif
/* Exported constants --------------------------------------------------------*/
/* Exported types ------------------------------------------------------------*/
/* Exported functions --------------------------------------------------------*/
T_DjiReturnCode DjiTest_WidgetMannagerStart(void);
#ifdef __cplusplus
}
#endif
#endif // DJI_WIDGET_MANAGER_INTERNAL_H
/************************ (C) COPYRIGHT DJI Innovations *******END OF FILE******/