obsdk demo:stream_color

This commit is contained in:
tangchao0503
2026-03-30 17:23:04 +08:00
parent 65b0b307f6
commit fad9423121
10 changed files with 1249 additions and 22 deletions

View File

@ -1,20 +0,0 @@
// ConsoleApplication1.cpp : 此文件包含 "main" 函数。程序执行将在此处开始并结束。
//
#include <iostream>
int main()
{
std::cout << "Hello World!\n";
}
// 运行程序: Ctrl + F5 或调试 >“开始执行(不调试)”菜单
// 调试程序: F5 或调试 >“开始调试”菜单
// 入门使用技巧:
// 1. 使用解决方案资源管理器窗口添加/管理文件
// 2. 使用团队资源管理器窗口连接到源代码管理
// 3. 使用输出窗口查看生成输出和其他消息
// 4. 使用错误列表窗口查看错误
// 5. 转到“项目”>“添加新项”以创建新的代码文件,或转到“项目”>“添加现有项”以将现有代码文件添加到项目
// 6. 将来,若要再次打开此项目,请转到“文件”>“打开”>“项目”并选择 .sln 文件

View File

@ -100,12 +100,15 @@
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<SDLCheck>true</SDLCheck>
<PreprocessorDefinitions>_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<PreprocessorDefinitions>_DEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ConformanceMode>true</ConformanceMode>
<AdditionalIncludeDirectories>C:\Program Files\OrbbecSDK 2.7.6\include;C:\Program Files\OrbbecSDK 2.7.6\examples\src\utils;D:\cpp_library\opencv3.4.11\opencv\build\include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<AdditionalDependencies>opencv_world3411.lib;opencv_world3411d.lib;OrbbecSDK.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalLibraryDirectories>D:\cpp_library\opencv3.4.11\opencv\build\x64\vc15\lib;C:\Program Files\OrbbecSDK 2.7.6\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
@ -123,7 +126,16 @@
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="ConsoleApplication1.cpp" />
<ClCompile Include="stream_color.cpp" />
<ClCompile Include="utils.cpp" />
<ClCompile Include="utils_c.c" />
<ClCompile Include="utils_opencv.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="utils.hpp" />
<ClInclude Include="utils_c.h" />
<ClInclude Include="utils_opencv.hpp" />
<ClInclude Include="utils_types.h" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">

View File

@ -0,0 +1,50 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
int main(void) try {
// Create a pipeline with default device.
ob::Pipeline pipe;
// Configure which streams to enable or disable for the Pipeline by creating a Config.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// Enable color video stream.
config->enableVideoStream(OB_STREAM_COLOR);
// Start the pipeline with config.
pipe.start(config);
// Create a window for rendering and set the resolution of the window.
ob_smpl::CVWindow win("Color");
while (win.run()) {
// Wait for up to 100ms for a frameset in blocking mode.
auto frameSet = pipe.waitForFrameset();
if (frameSet == nullptr) {
continue;
}
// get color frame from frameset.
auto colorFrame = frameSet->getFrame(OB_FRAME_COLOR);
// Render colorFrame.
win.pushFramesToView(colorFrame);
}
// Stop the Pipeline, no frame data will be generated
pipe.stop();
return 0;
}
catch (ob::Error& e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@ -0,0 +1,54 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include "utils.hpp"
#include "utils_c.h"
#include <chrono>
namespace ob_smpl {
char waitForKeyPressed(uint32_t timeout_ms) {
return ob_smpl_wait_for_key_press(timeout_ms);
}
uint64_t getNowTimesMs() {
return std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
}
int getInputOption() {
char inputOption = ob_smpl::waitForKeyPressed();
if(inputOption == ESC_KEY) {
return -1;
}
return inputOption - '0';
}
bool isLiDARDevice(std::shared_ptr<ob::Device> device) {
std::shared_ptr<ob::SensorList> sensorList = device->getSensorList();
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
OBSensorType sensorType = sensorList->getSensorType(index);
if(sensorType == OB_SENSOR_LIDAR) {
return true;
}
}
return false;
}
bool supportAnsiEscape() {
if(ob_smpl_support_ansi_escape() == 0) {
return false;
}
return true;
}
bool isGemini305Device(int vid, int pid) {
return ob_smpl_is_gemini305_device(vid, pid);
}
bool isAstraMiniDevice(int vid, int pid) {
return ob_smpl_is_astra_mini_device(vid, pid);
}
} // namespace ob_smpl

View File

@ -0,0 +1,56 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#pragma once
#include <stdint.h>
#include "utils_types.h"
#include <sstream>
#include <libobsensor/ObSensor.hpp>
namespace ob_smpl {
char waitForKeyPressed(uint32_t timeout_ms = 0);
uint64_t getNowTimesMs();
int getInputOption();
template <typename T> std::string toString(const T a_value, const int n = 6) {
std::ostringstream out;
out.precision(n);
out << std::fixed << a_value;
return std::move(out).str();
}
/**
* @brief Check if the device is a LiDAR device.
*
* @param device The device to check.
* @return true if the device is a LiDAR device.
* @return false otherwise.
*/
bool isLiDARDevice(std::shared_ptr<ob::Device> device);
bool supportAnsiEscape();
/**
* @brief Check if the device is a Gemini305 device.
*
* @param vid The vendor ID of the device.
* @param pid The product ID of the device.
* @return true if the device is a Gemini305 device.
* @return false otherwise.
*/
bool isGemini305Device(int vid, int pid);
/**
* @brief Check if the device is a Astra Mini device.
*
* @param vid The vendor ID of the device.
* @param pid The product ID of the device.
* @return true if the device is a Astra Mini device.
* @return false otherwise.
*/
bool isAstraMiniDevice(int vid, int pid);
} // namespace ob_smpl

View File

@ -0,0 +1,213 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include "utils_c.h"
#include "utils_types.h"
#include <stdbool.h>
#include <stdio.h>
#ifdef __cplusplus
extern "C" {
#endif
#if defined(__linux__) || defined(__APPLE__)
#ifdef __linux__
#include <termio.h>
#else
#include <termios.h>
#endif
#include <string.h>
#include <unistd.h>
#include <fcntl.h>
#define gets_s gets
int getch(void) {
struct termios tm, tm_old;
int fd = 0, ch;
if(tcgetattr(fd, &tm) < 0) { // Save the current terminal settings
return -1;
}
tm_old = tm;
cfmakeraw(&tm); // Change the terminal settings to raw mode, in which all input data is processed in bytes
if(tcsetattr(fd, TCSANOW, &tm) < 0) { // Settings after changes on settings
return -1;
}
ch = getchar();
if(tcsetattr(fd, TCSANOW, &tm_old) < 0) { // Change the settings to what they were originally
return -1;
}
return ch;
}
int kbhit(void) {
struct termios oldt, newt;
int ch;
int oldf;
tcgetattr(STDIN_FILENO, &oldt);
newt = oldt;
newt.c_lflag &= ~(ICANON | ECHO);
tcsetattr(STDIN_FILENO, TCSANOW, &newt);
oldf = fcntl(STDIN_FILENO, F_GETFL, 0);
fcntl(STDIN_FILENO, F_SETFL, oldf | O_NONBLOCK);
ch = getchar();
tcsetattr(STDIN_FILENO, TCSANOW, &oldt);
fcntl(STDIN_FILENO, F_SETFL, oldf);
if(ch != EOF) {
ungetc(ch, stdin);
return 1;
}
return 0;
}
#include <sys/time.h>
uint64_t ob_smpl_get_current_timestamp_ms(void) {
struct timeval te;
long long milliseconds;
gettimeofday(&te, NULL); // Get the current time
milliseconds = te.tv_sec * 1000LL + te.tv_usec / 1000; // Calculate milliseconds
return milliseconds;
}
char ob_smpl_wait_for_key_press(uint32_t timeout_ms) { // Get the current time
struct timeval te;
long long start_time;
gettimeofday(&te, NULL);
start_time = te.tv_sec * 1000LL + te.tv_usec / 1000;
while(true) {
long long current_time;
if(kbhit()) {
return getch();
}
gettimeofday(&te, NULL);
current_time = te.tv_sec * 1000LL + te.tv_usec / 1000;
if(timeout_ms > 0 && current_time - start_time > timeout_ms) {
return 0;
}
usleep(100);
}
}
int ob_smpl_support_ansi_escape(void) {
if(isatty(fileno(stdout)) == 0) {
// unsupport
return 0;
}
return 1;
}
#else // Windows
#include <conio.h>
#include <windows.h>
#include <io.h>
#include <stdio.h>
uint64_t ob_smpl_get_current_timestamp_ms() {
FILETIME ft;
LARGE_INTEGER li;
GetSystemTimeAsFileTime(&ft);
li.LowPart = ft.dwLowDateTime;
li.HighPart = ft.dwHighDateTime;
long long milliseconds = li.QuadPart / 10000LL;
return milliseconds;
}
char ob_smpl_wait_for_key_press(uint32_t timeout_ms) {
HANDLE hStdin = GetStdHandle(STD_INPUT_HANDLE);
if(hStdin == INVALID_HANDLE_VALUE) {
return 0;
}
DWORD mode = 0;
if(!GetConsoleMode(hStdin, &mode)) {
return 0;
}
mode &= ~ENABLE_ECHO_INPUT;
if(!SetConsoleMode(hStdin, mode)) {
return 0;
}
DWORD start_time = GetTickCount();
while(true) {
if(_kbhit()) {
char ch = (char)_getch();
SetConsoleMode(hStdin, mode);
return ch;
}
if(timeout_ms > 0 && GetTickCount() - start_time > timeout_ms) {
SetConsoleMode(hStdin, mode);
return 0;
}
Sleep(1);
}
}
int ob_smpl_support_ansi_escape(void) {
if(_isatty(_fileno(stdout)) == 0) {
// unsupport
return 0;
}
HANDLE hOut = GetStdHandle(STD_OUTPUT_HANDLE);
if(hOut == INVALID_HANDLE_VALUE) {
return 0;
}
DWORD mode = 0;
if(!GetConsoleMode(hOut, &mode)) {
return 0;
}
if((mode & ENABLE_VIRTUAL_TERMINAL_PROCESSING) == 0) {
return 0;
}
return 1;
}
#endif
bool ob_smpl_is_lidar_device(ob_device *device) {
ob_error *error = NULL;
ob_sensor_list *sensorList = NULL;
uint32_t sensorCount = 0;
if(device == NULL) {
return false;
}
sensorList = ob_device_get_sensor_list(device, &error);
CHECK_OB_ERROR_EXIT(&error);
sensorCount = ob_sensor_list_get_count(sensorList, &error);
CHECK_OB_ERROR_EXIT(&error);
for(uint32_t index = 0; index < sensorCount; index++) {
OBSensorType sensorType = ob_sensor_list_get_sensor_type(sensorList, index, &error);
CHECK_OB_ERROR_EXIT(&error);
if(sensorType == OB_SENSOR_LIDAR) {
ob_delete_sensor_list(sensorList, &error);
CHECK_OB_ERROR_EXIT(&error);
return true;
}
}
ob_delete_sensor_list(sensorList, &error);
CHECK_OB_ERROR_EXIT(&error);
return false;
}
bool ob_smpl_is_gemini305_device(int vid, int pid) {
return (vid == OB_DEVICE_VID && (pid == 0x0840 || pid == 0x0841 || pid == 0x0842 || pid == 0x0843));
}
bool ob_smpl_is_astra_mini_device(int vid, int pid) {
return (vid == OB_DEVICE_VID && (pid == 0x069d || pid == 0x065b || pid == 0x065e));
}
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,78 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#pragma once
#include <stdint.h>
#include <stdlib.h>
#include <libobsensor/ObSensor.h>
#ifdef __cplusplus
extern "C" {
#endif
/**
* @brief Get the current system timestamp in milliseconds.
*
*/
uint64_t ob_smpl_get_current_timestamp_ms(void);
/**
* @brief Wait for key press.
*
* @param[in] timeout_ms The maximum time to wait for a key press in milliseconds. Set to 0 to wait indefinitely.
*
* @return char The key that was pressed.
*/
char ob_smpl_wait_for_key_press(uint32_t timeout_ms);
/**
* @brief Check if the device is a LiDAR device.
*
* @param device The device to check.
* @return true if the device is a LiDAR device.
* @return false otherwise.
*/
bool ob_smpl_is_lidar_device(ob_device *device);
/**
* @brief Check if stdout supports ANSI escape sequences.
*
* @return 1 if supported, 0 not supported.
*/
int ob_smpl_support_ansi_escape(void);
/**
* @brief Check if the device is a Gemini305 device.
*
* @param vid The vendor ID of the device.
* @param pid The product ID of the device.
* @return true if the device is a Gemini305 device.
* @return false otherwise.
*/
bool ob_smpl_is_gemini305_device(int vid, int pid);
/**
* @brief Check if the device is a Astra Mini device.
*
* @param vid The vendor ID of the device.
* @param pid The product ID of the device.
* @return true if the device is a Astra Mini device.
* @return false otherwise.
*/
bool ob_smpl_is_astra_mini_device(int vid, int pid);
// Macro to check for error and exit program if there is one.
#define CHECK_OB_ERROR_EXIT(error) \
if(*error) { \
const char *error_message = ob_error_get_message(*error); \
fprintf(stderr, "Error: %s\n", error_message); \
ob_delete_error(*error); \
*error = NULL; \
exit(-1); \
} \
*error = NULL;
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,653 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include "utils_opencv.hpp"
#include "utils.hpp"
#include "utils_types.h"
#if defined(__has_include)
#if __has_include(<opencv2/core/utils/logger.hpp>)
#include <opencv2/core/utils/logger.hpp>
#define TO_DISABLE_OPENCV_LOG
#endif
#endif
namespace ob_smpl {
const std::string defaultKeyMapPrompt = "'Esc': Exit Window, '?': Show Key Map";
CVWindow::CVWindow(std::string name, uint32_t width, uint32_t height, ArrangeMode arrangeMode)
: name_(std::move(name)),
arrangeMode_(arrangeMode),
width_(width),
height_(height),
closed_(false),
showInfo_(true),
showSyncTimeInfo_(false),
isWindowDestroyed_(false),
alpha_(0.6f),
showPrompt_(false) {
#if defined(TO_DISABLE_OPENCV_LOG)
cv::utils::logging::setLogLevel(cv::utils::logging::LogLevel::LOG_LEVEL_SILENT);
#endif
prompt_ = defaultKeyMapPrompt;
cv::namedWindow(name_, cv::WINDOW_NORMAL);
cv::resizeWindow(name_, width_, height_);
renderMat_ = cv::Mat::zeros(height_, width_, CV_8UC3);
cv::putText(renderMat_, "Waiting for streams...", cv::Point(8, 16), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
cv::imshow(name_, renderMat_);
// start processing thread
processThread_ = std::thread(&CVWindow::processFrames, this);
winCreatedTime_ = getNowTimesMs();
}
CVWindow::~CVWindow() noexcept {
close();
destroyWindow();
}
void CVWindow::setKeyPressedCallback(std::function<void(int)> callback) {
keyPressedCallback_ = callback;
}
// if window is closed
bool CVWindow::run() {
{
// show render mat
std::lock_guard<std::mutex> lock(renderMatsMtx_);
cv::imshow(name_, renderMat_);
}
int key = cv::waitKey(1);
if(key != -1) {
if(key == ESC_KEY) {
closed_ = true;
srcFrameGroupsCv_.notify_all();
}
else if(key == '1') {
arrangeMode_ = ARRANGE_SINGLE;
addLog("Switch to SINGLE arrange mode");
}
else if(key == '2') {
arrangeMode_ = ARRANGE_ONE_ROW;
addLog("Switch to ONE_ROW arrange mode");
}
else if(key == '3') {
arrangeMode_ = ARRANGE_ONE_COLUMN;
addLog("Switch to ONE_COLUMN arrange mode");
}
else if(key == '4') {
arrangeMode_ = ARRANGE_GRID;
addLog("Switch to GRID arrange mode");
}
else if(key == '5') {
arrangeMode_ = ARRANGE_OVERLAY;
addLog("Switch to OVERLAY arrange mode");
}
else if(key == '?' || key == '/') {
showPrompt_ = !showPrompt_;
}
else if(key == '+' || key == '=') {
alpha_ += 0.1f;
if(alpha_ > 1) {
alpha_ = 1;
}
addLog("Adjust alpha to " + ob_smpl::toString(alpha_, 1) + " (Only valid in OVERLAY arrange mode)");
}
else if(key == '-' || key == '_') {
alpha_ -= 0.1f;
if(alpha_ < 0) {
alpha_ = 0;
}
addLog("Adjust alpha to " + ob_smpl::toString(alpha_, 1) + " (Only valid in OVERLAY arrange mode)");
}
if(keyPressedCallback_) {
keyPressedCallback_(key);
}
}
return !closed_;
}
// close window
void CVWindow::close() {
{
std::lock_guard<std::mutex> lock(renderMatsMtx_);
closed_ = true;
srcFrameGroupsCv_.notify_all();
}
if(processThread_.joinable()) {
processThread_.join();
}
matGroups_.clear();
srcFrameGroups_.clear();
}
void CVWindow::destroyWindow() {
if(!isWindowDestroyed_) {
cv::destroyWindow(name_);
cv::waitKey(1);
isWindowDestroyed_ = true;
}
else {
std::cout << "CVWindows has been destroyed!" << std::endl;
}
}
void CVWindow::reset() {
// close thread and clear cache
close();
// restart thread
closed_ = false;
processThread_ = std::thread(&CVWindow::processFrames, this);
}
// set the window size
void CVWindow::resize(int width, int height) {
width_ = width;
height_ = height;
cv::resizeWindow(name_, width_, height_);
}
void CVWindow::setKeyPrompt(const std::string &prompt) {
prompt_ = defaultKeyMapPrompt + ", " + prompt;
}
void CVWindow::addLog(const std::string &log) {
log_ = log;
logCreatedTime_ = getNowTimesMs();
}
// add frames to the show
void CVWindow::pushFramesToView(std::vector<std::shared_ptr<const ob::Frame>> frames, int groupId) {
if(frames.empty()) {
return;
}
std::vector<std::shared_ptr<const ob::Frame>> singleFrames;
for(auto &frame: frames) {
if(frame == nullptr) {
continue;
}
if(!frame->is<ob::FrameSet>()) {
// single frame, add to the list
singleFrames.push_back(frame);
continue;
}
// FrameSet contains multiple frames
auto frameSet = frame->as<ob::FrameSet>();
for(uint32_t index = 0; index < frameSet->getCount(); index++) {
auto subFrame = frameSet->getFrameByIndex(index);
singleFrames.push_back(subFrame);
}
}
std::lock_guard<std::mutex> lk(srcFrameGroupsMtx_);
srcFrameGroups_[groupId] = singleFrames;
srcFrameGroupsCv_.notify_one();
}
void CVWindow::pushFramesToView(std::shared_ptr<const ob::Frame> currentFrame, int groupId) {
pushFramesToView(std::vector<std::shared_ptr<const ob::Frame>>{ currentFrame }, groupId);
}
// set show frame info
void CVWindow::setShowInfo(bool show) {
showInfo_ = show;
}
// set show frame synctime info
void CVWindow::setShowSyncTimeInfo(bool show) {
showSyncTimeInfo_ = show;
}
// set alpha for OVERLAY render mode
void CVWindow::setAlpha(float alpha) {
alpha_ = alpha;
if(alpha_ < 0) {
alpha_ = 0;
}
else if(alpha_ > 1) {
alpha_ = 1;
}
}
// frames processing thread
void CVWindow::processFrames() {
std::map<int, std::vector<std::shared_ptr<const ob::Frame>>> frameGroups;
while(!closed_) {
if(closed_) {
break;
}
{
std::unique_lock<std::mutex> lk(srcFrameGroupsMtx_);
srcFrameGroupsCv_.wait(lk);
frameGroups = srcFrameGroups_;
}
if(frameGroups.empty()) {
continue;
}
for(const auto &framesItem: frameGroups) {
int groupId = framesItem.first;
const auto &frames = framesItem.second;
for(const auto &frame: frames) {
auto rstMat = visualize(frame);
if(!rstMat.empty()) {
int uid = groupId * OB_FRAME_TYPE_COUNT + static_cast<int>(frame->getType());
matGroups_[uid] = { frame, rstMat };
}
}
}
if(matGroups_.empty()) {
continue;
}
arrangeFrames();
}
}
void CVWindow::arrangeFrames() {
cv::Mat renderMat;
try {
if(arrangeMode_ == ARRANGE_SINGLE || matGroups_.size() == 1) {
auto &mat = matGroups_.begin()->second.second;
renderMat = resizeMatKeepAspectRatio(mat, width_, height_);
}
else if(arrangeMode_ == ARRANGE_ONE_ROW) {
for(auto &item: matGroups_) {
auto &mat = item.second.second;
cv::Mat resizeMat = resizeMatKeepAspectRatio(mat, static_cast<int>(width_ / matGroups_.size()), height_);
if(renderMat.dims > 0 && renderMat.cols > 0 && renderMat.rows > 0) {
cv::hconcat(renderMat, resizeMat, renderMat);
}
else {
renderMat = resizeMat;
}
}
}
else if(arrangeMode_ == ARRANGE_ONE_COLUMN) {
for(auto &item: matGroups_) {
auto &mat = item.second.second;
cv::Mat resizeMat = resizeMatKeepAspectRatio(mat, width_, static_cast<int>(height_ / matGroups_.size()));
if(renderMat.dims > 0 && renderMat.cols > 0 && renderMat.rows > 0) {
cv::vconcat(renderMat, resizeMat, renderMat);
}
else {
renderMat = resizeMat;
}
}
}
else if(arrangeMode_ == ARRANGE_GRID) {
int count = static_cast<int>(matGroups_.size());
int idealSide = static_cast<int>(std::sqrt(count));
int rows = idealSide;
int cols = idealSide;
while(rows * cols < count) { // find the best row and column count
cols++;
if(rows * cols < count) {
rows++;
}
}
std::vector<cv::Mat> gridImages; // store all images in grid
auto it = matGroups_.begin();
for(int i = 0; i < rows; i++) {
std::vector<cv::Mat> rowImages; // store images in the same row
for(int j = 0; j < cols; j++) {
int index = i * cols + j;
cv::Mat resizeMat;
if(index < count) {
auto mat = it->second.second;
resizeMat = resizeMatKeepAspectRatio(mat, width_ / cols, height_ / rows);
it++;
}
else {
resizeMat = cv::Mat::zeros(height_ / rows, width_ / cols, CV_8UC3); // fill with black
}
rowImages.push_back(resizeMat);
}
cv::Mat lineMat;
cv::hconcat(rowImages, lineMat); // horizontal concat all images in the same row
gridImages.push_back(lineMat);
}
cv::vconcat(gridImages, renderMat); // vertical concat all images in the grid
}
else if(arrangeMode_ == ARRANGE_OVERLAY && matGroups_.size() >= 2) {
cv::Mat overlayMat;
const auto &mat1 = matGroups_.begin()->second.second;
const auto &mat2 = matGroups_.rbegin()->second.second;
renderMat = resizeMatKeepAspectRatio(mat1, width_, height_);
overlayMat = resizeMatKeepAspectRatio(mat2, width_, height_);
float alpha = alpha_;
for(int i = 0; i < renderMat.rows; i++) {
for(int j = 0; j < renderMat.cols; j++) {
cv::Vec3b &outRgb = renderMat.at<cv::Vec3b>(i, j);
cv::Vec3b &overlayRgb = overlayMat.at<cv::Vec3b>(i, j);
outRgb[0] = (uint8_t)(outRgb[0] * (1.0f - alpha) + overlayRgb[0] * alpha);
outRgb[1] = (uint8_t)(outRgb[1] * (1.0f - alpha) + overlayRgb[1] * alpha);
outRgb[2] = (uint8_t)(outRgb[2] * (1.0f - alpha) + overlayRgb[2] * alpha);
}
}
}
}
catch(std::exception &e) {
std::cerr << e.what() << std::endl;
}
if(renderMat.empty()) {
return;
}
if(showPrompt_ || getNowTimesMs() - winCreatedTime_ < 5000) {
cv::putText(renderMat, prompt_, cv::Point(8, 16), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
}
if(!log_.empty() && getNowTimesMs() - logCreatedTime_ < 3000) {
cv::putText(renderMat, log_, cv::Point(8, height_ - 16), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
}
std::lock_guard<std::mutex> lock(renderMatsMtx_);
renderMat_ = renderMat;
}
cv::Mat CVWindow::visualize(std::shared_ptr<const ob::Frame> frame) {
if(frame == nullptr) {
return cv::Mat();
}
cv::Mat rstMat;
switch(frame->getType()) {
case OB_FRAME_COLOR:
case OB_FRAME_COLOR_LEFT:
case OB_FRAME_COLOR_RIGHT: {
auto videoFrame = frame->as<const ob::VideoFrame>();
switch(videoFrame->getFormat()) {
case OB_FORMAT_MJPG: {
cv::Mat rawMat(1, videoFrame->getDataSize(), CV_8UC1, videoFrame->getData());
rstMat = cv::imdecode(rawMat, 1);
} break;
case OB_FORMAT_NV21: {
cv::Mat rawMat(videoFrame->getHeight() * 3 / 2, videoFrame->getWidth(), CV_8UC1, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_YUV2BGR_NV21);
} break;
case OB_FORMAT_YUYV:
case OB_FORMAT_YUY2: {
cv::Mat rawMat(videoFrame->getHeight(), videoFrame->getWidth(), CV_8UC2, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_YUV2BGR_YUY2);
} break;
case OB_FORMAT_BGR: {
cv::Mat rawMat(videoFrame->getHeight(), videoFrame->getWidth(), CV_8UC3, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_BGR2RGB);
} break;
case OB_FORMAT_RGB: {
cv::Mat rawMat(videoFrame->getHeight(), videoFrame->getWidth(), CV_8UC3, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_RGB2BGR);
} break;
case OB_FORMAT_RGBA: {
cv::Mat rawMat(videoFrame->getHeight(), videoFrame->getWidth(), CV_8UC4, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_RGBA2BGR);
} break;
case OB_FORMAT_BGRA: {
cv::Mat rawMat(videoFrame->getHeight(), videoFrame->getWidth(), CV_8UC4, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_BGRA2RGB);
} break;
case OB_FORMAT_UYVY: {
cv::Mat rawMat(videoFrame->getHeight(), videoFrame->getWidth(), CV_8UC2, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_YUV2BGR_UYVY);
} break;
case OB_FORMAT_I420: {
cv::Mat rawMat(videoFrame->getHeight() * 3 / 2, videoFrame->getWidth(), CV_8UC1, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_YUV2BGR_I420);
} break;
case OB_FORMAT_Y8: {
cv::Mat rawMat(videoFrame->getHeight(), videoFrame->getWidth(), CV_8UC1, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_GRAY2BGR);
} break;
case OB_FORMAT_Y16: {
cv::Mat rawMat(videoFrame->getHeight(), videoFrame->getWidth(), CV_16UC1, videoFrame->getData());
cv::Mat gray8;
rawMat.convertTo(gray8, CV_8UC1, 255.0 / 65535.0);
cv::cvtColor(gray8, rstMat, cv::COLOR_GRAY2BGR);
} break;
default:
break;
}
if(showSyncTimeInfo_ && !rstMat.empty()) {
drawInfo(rstMat, videoFrame);
}
} break;
case OB_FRAME_DEPTH: {
auto videoFrame = frame->as<const ob::VideoFrame>();
if(videoFrame->getFormat() == OB_FORMAT_Y16 || videoFrame->getFormat() == OB_FORMAT_Z16 || videoFrame->getFormat() == OB_FORMAT_Y12C4) {
cv::Mat rawMat = cv::Mat(videoFrame->getHeight(), videoFrame->getWidth(), CV_16UC1, videoFrame->getData());
// depth frame pixel value multiply scale to get distance in millimeter
float scale = videoFrame->as<ob::DepthFrame>()->getValueScale();
cv::Mat cvtMat;
// normalization to 0-255. 0.032f is 256/8000, to limit the range of depth to 8000mm
rawMat.convertTo(cvtMat, CV_32F, scale * 0.032f);
// apply gamma correction to enhance the contrast for near objects
cv::pow(cvtMat, 0.6f, cvtMat);
// convert to 8-bit
cvtMat.convertTo(cvtMat, CV_8UC1, 10); // multiplier 10 is to normalize to 0-255 (nearly) after applying gamma correction
// apply colormap
cv::applyColorMap(cvtMat, rstMat, cv::COLORMAP_JET);
}
if(showSyncTimeInfo_ && !rstMat.empty()) {
drawInfo(rstMat, videoFrame);
}
} break;
case OB_FRAME_IR:
case OB_FRAME_IR_LEFT:
case OB_FRAME_IR_RIGHT: {
auto videoFrame = frame->as<const ob::VideoFrame>();
if(videoFrame->getFormat() == OB_FORMAT_Y16) {
cv::Mat cvtMat;
cv::Mat rawMat = cv::Mat(videoFrame->getHeight(), videoFrame->getWidth(), CV_16UC1, videoFrame->getData());
rawMat.convertTo(cvtMat, CV_8UC1, 1.0 / 16.0f);
cv::cvtColor(cvtMat, rstMat, cv::COLOR_GRAY2RGB);
}
else if(videoFrame->getFormat() == OB_FORMAT_Y8) {
cv::Mat rawMat = cv::Mat(videoFrame->getHeight(), videoFrame->getWidth(), CV_8UC1, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_GRAY2RGB);
}
else if(videoFrame->getFormat() == OB_FORMAT_MJPG) {
cv::Mat rawMat(1, videoFrame->getDataSize(), CV_8UC1, videoFrame->getData());
rstMat = cv::imdecode(rawMat, 1);
cv::cvtColor(rstMat, rstMat, cv::COLOR_GRAY2RGB);
}
if(showSyncTimeInfo_ && !rstMat.empty()) {
drawInfo(rstMat, videoFrame);
}
} break;
case OB_FRAME_CONFIDENCE: {
auto videoFrame = frame->as<const ob::VideoFrame>();
if(videoFrame->getFormat() == OB_FORMAT_Y16) {
cv::Mat cvtMat;
cv::Mat rawMat = cv::Mat(videoFrame->getHeight(), videoFrame->getWidth(), CV_16UC1, videoFrame->getData());
rawMat.convertTo(cvtMat, CV_8UC1, 1.0 / 16.0f);
cv::cvtColor(cvtMat, rstMat, cv::COLOR_GRAY2RGB);
}
else if(videoFrame->getFormat() == OB_FORMAT_Y8) {
cv::Mat rawMat = cv::Mat(videoFrame->getHeight(), videoFrame->getWidth(), CV_8UC1, videoFrame->getData());
cv::cvtColor(rawMat, rstMat, cv::COLOR_GRAY2RGB);
}
} break;
case OB_FRAME_ACCEL: {
rstMat = cv::Mat::zeros(320, 300, CV_8UC3);
auto accelFrame = frame->as<ob::AccelFrame>();
auto value = accelFrame->getValue();
std::string str = "Accel:";
cv::putText(rstMat, str.c_str(), cv::Point(8, 60), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
str = std::string(" timestamp=") + std::to_string(accelFrame->getTimeStampUs()) + "us";
cv::putText(rstMat, str.c_str(), cv::Point(8, 100), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
str = std::string(" x=") + std::to_string(value.x) + "m/s^2";
cv::putText(rstMat, str.c_str(), cv::Point(8, 140), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
str = std::string(" y=") + std::to_string(value.y) + "m/s^2";
cv::putText(rstMat, str.c_str(), cv::Point(8, 180), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
str = std::string(" z=") + std::to_string(value.z) + "m/s^2";
cv::putText(rstMat, str.c_str(), cv::Point(8, 220), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
} break;
case OB_FRAME_GYRO: {
rstMat = cv::Mat::zeros(320, 300, CV_8UC3);
auto gyroFrame = frame->as<ob::GyroFrame>();
auto value = gyroFrame->getValue();
std::string str = "Gyro:";
cv::putText(rstMat, str.c_str(), cv::Point(8, 60), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
str = std::string(" timestamp=") + std::to_string(gyroFrame->getTimeStampUs()) + "us";
cv::putText(rstMat, str.c_str(), cv::Point(8, 100), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
str = std::string(" x=") + std::to_string(value.x) + "rad/s";
cv::putText(rstMat, str.c_str(), cv::Point(8, 140), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
str = std::string(" y=") + std::to_string(value.y) + "rad/s";
cv::putText(rstMat, str.c_str(), cv::Point(8, 180), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
str = std::string(" z=") + std::to_string(value.z) + "rad/s";
cv::putText(rstMat, str.c_str(), cv::Point(8, 220), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(255, 255, 255), 1, cv::LINE_AA);
} break;
default:
break;
}
return rstMat;
}
// add frame info to mat
void CVWindow::drawInfo(cv::Mat &imageMat, std::shared_ptr<const ob::VideoFrame> &frame) {
int baseline = 0; // Used to calculate text size and baseline
cv::Size textSize; // Size of the text to be drawn
int padding = 5; // Padding around the text for the background
// Helper lambda function to draw text with background
auto putTextWithBackground = [&](const std::string &text, cv::Point origin) {
// Getting text size for background
textSize = cv::getTextSize(text, cv::FONT_HERSHEY_SIMPLEX, 0.6, 1, &baseline);
// Drawing the white background
cv::rectangle(imageMat, origin + cv::Point(0, baseline), origin + cv::Point(textSize.width, -textSize.height) - cv::Point(0, padding),
cv::Scalar(255, 255, 255), cv::FILLED);
// Putting black text on the white background
cv::putText(imageMat, text, origin, cv::FONT_HERSHEY_SIMPLEX, 0.6, cv::Scalar(0, 0, 0), 1);
};
// Drawing text with background based on frame type
auto frameType = frame->getType();
auto frameFormat = frame->getFormat();
switch(frameFormat) {
case OB_FORMAT_NV21: {
switch(frameType) {
case OB_FRAME_COLOR:
putTextWithBackground("Color-NV21", cv::Point(8, 16));
break;
case OB_FRAME_COLOR_LEFT:
putTextWithBackground("LeftColor-NV21", cv::Point(8, 16));
break;
case OB_FRAME_COLOR_RIGHT:
putTextWithBackground("RightColor-NV21", cv::Point(8, 16));
break;
default:
break;
}
} break;
case OB_FORMAT_MJPG: {
switch(frameType) {
case OB_FRAME_COLOR:
putTextWithBackground("Color-MJPG", cv::Point(8, 16));
break;
case OB_FRAME_COLOR_LEFT:
putTextWithBackground("LeftColor-MJPG", cv::Point(8, 16));
break;
case OB_FRAME_COLOR_RIGHT:
putTextWithBackground("RightColor-MJPG", cv::Point(8, 16));
break;
default:
break;
}
} break;
case OB_FORMAT_YUYV:
case OB_FORMAT_YUY2: {
switch(frameType) {
case OB_FRAME_COLOR:
putTextWithBackground("Color-YUYV", cv::Point(8, 16));
break;
case OB_FRAME_COLOR_LEFT:
putTextWithBackground("LeftColor-YUYV", cv::Point(8, 16));
break;
case OB_FRAME_COLOR_RIGHT:
putTextWithBackground("RightColor-YUYV", cv::Point(8, 16));
break;
default:
break;
}
} break;
default: {
switch(frameType) {
case OB_FRAME_DEPTH:
putTextWithBackground("Depth", cv::Point(8, 16));
break;
case OB_FRAME_IR:
putTextWithBackground("IR", cv::Point(8, 16));
break;
case OB_FRAME_IR_LEFT:
putTextWithBackground("LeftIR", cv::Point(8, 16));
break;
case OB_FRAME_IR_RIGHT:
putTextWithBackground("RightIR", cv::Point(8, 16));
break;
default:
break;
}
} break;
}
// Timestamp information with background
putTextWithBackground("frame timestamp(us): " + std::to_string(frame->getTimeStampUs()), cv::Point(8, 40));
putTextWithBackground("system timestamp(us): " + std::to_string(frame->getSystemTimeStampUs()), cv::Point(8, 64));
}
cv::Mat CVWindow::resizeMatKeepAspectRatio(const cv::Mat &mat, int width, int height) {
auto hScale = static_cast<double>(width) / mat.cols;
auto vScale = static_cast<double>(height) / mat.rows;
auto scale = std::min(hScale, vScale);
auto newWidth = static_cast<int>(mat.cols * scale);
auto newHeight = static_cast<int>(mat.rows * scale);
cv::Mat resizeMat;
cv::resize(mat, resizeMat, cv::Size(newWidth, newHeight));
if(newWidth == width && newHeight == height) {
return resizeMat;
}
// padding the resized mat to target width and height
cv::Mat paddedMat;
if(newWidth < width) {
auto paddingLeft = (width - newWidth) / 2;
auto paddingRight = width - newWidth - paddingLeft;
cv::copyMakeBorder(resizeMat, paddedMat, 0, 0, paddingLeft, paddingRight, cv::BORDER_CONSTANT, cv::Scalar(0, 0, 0));
}
if(newHeight < height) {
auto paddingTop = (height - newHeight) / 2;
auto paddingBottom = height - newHeight - paddingTop;
cv::copyMakeBorder(resizeMat, paddedMat, paddingTop, paddingBottom, 0, 0, cv::BORDER_CONSTANT, cv::Scalar(0, 0, 0));
}
return paddedMat;
}
} // namespace ob_smpl

View File

@ -0,0 +1,117 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#pragma once
#include <libobsensor/ObSensor.hpp>
#include <opencv2/opencv.hpp>
#include <string>
#include <thread>
#include <mutex>
#include <condition_variable>
#include <cmath>
#include <map>
#include "utils_types.h"
#include "utils.hpp"
namespace ob_smpl {
// arrange type
typedef enum {
ARRANGE_SINGLE, // Only show the first frame
ARRANGE_ONE_ROW, // Arrange the frames in the array as a row
ARRANGE_ONE_COLUMN, // Arrange the frames in the array as a column
ARRANGE_GRID, // Arrange the frames in the array as a grid
ARRANGE_OVERLAY // Overlay the first two frames in the array
} ArrangeMode;
class CVWindow {
public:
// create a window with the specified name, width and height
CVWindow(std::string name, uint32_t width = 1280, uint32_t height = 720, ArrangeMode arrangeMode = ARRANGE_SINGLE);
~CVWindow() noexcept;
// run the window loop
bool run();
// close window
void close();
// clear cached frames and mats
void reset();
// add frames to view
void pushFramesToView(std::vector<std::shared_ptr<const ob::Frame>> frames, int groupId = 0);
void pushFramesToView(std::shared_ptr<const ob::Frame> currentFrame, int groupId = 0);
// set show frame info
void setShowInfo(bool show);
// set show frame syncTime info
void setShowSyncTimeInfo(bool show);
// set alpha, only valid when arrangeMode_ is ARRANGE_OVERLAY
void setAlpha(float alpha);
// set the window size
void resize(int width, int height);
// set the key pressed callback
void setKeyPressedCallback(std::function<void(int)> callback);
// set the key prompt
void setKeyPrompt(const std::string &prompt);
// set the log message
void addLog(const std::string &log);
// destroyWindow
void destroyWindow();
private:
// frames processing thread function
void processFrames();
// arrange frames in the renderMat_ according to the arrangeMode_
void arrangeFrames();
// add info to mat
cv::Mat visualize(std::shared_ptr<const ob::Frame> frame);
// draw info to mat
void drawInfo(cv::Mat &imageMat, std::shared_ptr<const ob::VideoFrame> &frame);
cv::Mat resizeMatKeepAspectRatio(const cv::Mat &mat, int width, int height);
private:
std::string name_;
ArrangeMode arrangeMode_;
uint32_t width_;
uint32_t height_;
bool closed_;
bool showInfo_;
bool showSyncTimeInfo_;
bool isWindowDestroyed_;
float alpha_;
std::thread processThread_;
std::map<int, std::vector<std::shared_ptr<const ob::Frame>>> srcFrameGroups_;
std::mutex srcFrameGroupsMtx_;
std::condition_variable srcFrameGroupsCv_;
using StreamsMatMap = std::map<int, std::pair<std::shared_ptr<const ob::Frame>, cv::Mat>>;
StreamsMatMap matGroups_;
std::mutex renderMatsMtx_;
cv::Mat renderMat_;
std::string prompt_;
bool showPrompt_;
uint64 winCreatedTime_;
std::string log_;
uint64 logCreatedTime_;
std::function<void(int)> keyPressedCallback_;
};
} // namespace ob_smpl

View File

@ -0,0 +1,14 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#ifdef __cplusplus
extern "C" {
#endif
#define ESC_KEY 27
#define OB_DEVICE_VID 0x2bc5
#ifdef __cplusplus
}
#endif