一、開發環境準備
1.1 硬件要求
- 奧比中光深度相機(如Astra Pro、Gemini等)
- USB 3.0接口(確保數據傳輸穩定)
- 支持OpenGL的顯卡(可選,用于點云可視化)
1.2 軟件環境
- SDK安裝:
- 從奧比中光開發者官網下載最新的OrbbecSDK
- 運行安裝程序并配置環境變量(Windows)或執行
./install.sh
(Linux)
- 開發工具鏈:
- Visual Studio (Windows)
- GCC 7.5+/Clang 9+(Linux)
- CMake 3.10+(跨平臺項目配置)
二、SDK架構與核心組件
OrbbecSDK采用模塊化設計,主要組件包括:
- DeviceManager:設備發現與管理
- Device:相機設備抽象
- StreamProfile:數據流配置(分辨率、幀率、格式等)
- Frame:圖像幀數據容器
- Pipeline:數據流處理管道
- FrameListener:幀數據回調接口
三、基本開發流程
3.1 初始化SDK與設備
#include <OrbbecSDK.h>
#include <iostream>
#include <string>int main() {// 初始化SDKob::Context context;// 枚舉并打印所有可用設備std::shared_ptr<ob::DeviceList> deviceList = context.queryDeviceList();std::cout << "設備數量: " << deviceList->deviceCount() << std::endl;// 獲取第一個設備std::shared_ptr<ob::Device> device;if (deviceList->deviceCount() > 0) {device = deviceList->getDevice(0);std::cout << "已連接設備: " << device->getDeviceInfo()->name() << std::endl;} else {std::cerr << "未發現設備!" << std::endl;return -1;}// 創建數據流處理管道std::shared_ptr<ob::Pipeline> pipeline = std::make_shared<ob::Pipeline>(device);// 配置并啟動數據流// ...
}
3.2 配置并啟動數據流
// 配置深度流
ob::Config config;
std::shared_ptr<ob::StreamProfileList> profileList = pipeline->getStreamProfileList(OB_SENSOR_DEPTH);// 選擇848x480@30fps的深度流配置
std::shared_ptr<ob::VideoStreamProfile> depthProfile = std::dynamic_pointer_cast<ob::VideoStreamProfile>(profileList->getVideoStreamProfile(848, 0, OB_FORMAT_Y16, 30));// 啟用深度流
config.enableStream(depthProfile);// 啟動管道
pipeline->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {// 幀數據回調處理if (frameSet->depthFrame()) {auto depthFrame = frameSet->depthFrame();std::cout << "獲取到深度幀: " << "寬度=" << depthFrame->width() << ", 高度=" << depthFrame->height() << ", 時間戳=" << depthFrame->timestamp() << std::endl;// 處理深度數據processDepthFrame(depthFrame);}
});
3.3 深度數據處理
void processDepthFrame(std::shared_ptr<ob::DepthFrame> depthFrame) {// 獲取深度數據指針uint16_t* depthData = (uint16_t*)depthFrame->data();int width = depthFrame->width();int height = depthFrame->height();// 計算中心點深度值(單位:毫米)int centerX = width / 2;int centerY = height / 2;uint16_t centerDepth = depthData[centerY * width + centerX];std::cout << "中心點深度: " << centerDepth << "mm" << std::endl;// 可選:轉換為點云convertToPointCloud(depthFrame);
}
3.4 點云生成與處理
void convertToPointCloud(std::shared_ptr<ob::DepthFrame> depthFrame) {// 創建點云轉換工具std::shared_ptr<ob::PointCloudGenerator> pcGenerator = std::make_shared<ob::PointCloudGenerator>();// 設置點云格式為XYZpcGenerator->setFormat(OB_FORMAT_XYZ);// 生成點云std::shared_ptr<ob::Frame> pointCloud = pcGenerator->generate(depthFrame);// 獲取點云數據if (pointCloud) {float* points = (float*)pointCloud->data();int pointCount = pointCloud->dataSize() / (3 * sizeof(float)); // XYZ三個分量// 簡單統計:計算平均深度float sumDepth = 0;for (int i = 0; i < pointCount; i++) {float z = points[i * 3 + 2]; // Z坐標if (z > 0) sumDepth += z;}std::cout << "平均深度: " << sumDepth / pointCount << "mm" << std::endl;}
}
3.5 相機參數獲取與使用
// 獲取內參
auto depthSensor = device->getSensor(OB_SENSOR_DEPTH);
auto intrinsics = depthSensor->getIntrinsics(depthProfile);std::cout << "深度相機內參:" << std::endl<< "fx=" << intrinsics.fx << ", fy=" << intrinsics.fy << std::endl<< "cx=" << intrinsics.cx << ", cy=" << intrinsics.cy << std::endl;// 深度轉世界坐標示例
float depthValue = depthData[centerY * width + centerX];
float worldX = (centerX - intrinsics.cx) * depthValue / intrinsics.fx;
float worldY = (centerY - intrinsics.cy) * depthValue / intrinsics.fy;
float worldZ = depthValue;std::cout << "世界坐標: (" << worldX << ", " << worldY << ", " << worldZ << ")mm" << std::endl;
3.6 相機工作模式切換
// 切換到尺寸測量模式(Dimensioning)
ob::WorkingMode workingMode;
workingMode.type = OB_WORKING_MODE_DIMENSIONING;
device->setWorkingMode(workingMode);std::cout << "已切換到尺寸測量模式" << std::endl;
四、完整示例代碼
下面是一個完整的奧比中光深度相機C++開發示例,包含設備初始化、數據流獲取、深度處理和點云生成:
#include <OrbbecSDK.h>
#include <iostream>
#include <string>
#include <vector>
#include <atomic>// 全局標志,控制程序運行
std::atomic<bool> running(true);// 處理深度幀
void processDepthFrame(std::shared_ptr<ob::DepthFrame> depthFrame, std::shared_ptr<ob::PointCloudGenerator> pcGenerator) {if (!depthFrame) return;// 獲取深度數據uint16_t* depthData = (uint16_t*)depthFrame->data();int width = depthFrame->width();int height = depthFrame->height();// 計算中心點深度int centerX = width / 2;int centerY = height / 2;uint16_t centerDepth = depthData[centerY * width + centerX];std::cout << "深度幀: 寬度=" << width << ", 高度=" << height << ", 中心點深度=" << centerDepth << "mm" << std::endl;// 生成點云if (pcGenerator) {auto pointCloud = pcGenerator->generate(depthFrame);if (pointCloud) {float* points = (float*)pointCloud->data();int pointCount = pointCloud->dataSize() / (3 * sizeof(float));// 簡單統計:計算平均深度float sumDepth = 0;int validPoints = 0;for (int i = 0; i < pointCount; i++) {float z = points[i * 3 + 2];if (z > 0) {sumDepth += z;validPoints++;}}if (validPoints > 0) {std::cout << "點云: 點數=" << validPoints << ", 平均深度=" << sumDepth / validPoints << "mm" << std::endl;}}}
}int main() {try {// 初始化SDKob::Context context;// 枚舉設備std::shared_ptr<ob::DeviceList> deviceList = context.queryDeviceList();if (deviceList->deviceCount() == 0) {std::cerr << "未發現設備!" << std::endl;return -1;}// 獲取第一個設備std::shared_ptr<ob::Device> device = deviceList->getDevice(0);std::cout << "已連接設備: " << device->getDeviceInfo()->name() << std::endl;// 創建數據流處理管道std::shared_ptr<ob::Pipeline> pipeline = std::make_shared<ob::Pipeline>(device);// 獲取深度流配置列表std::shared_ptr<ob::StreamProfileList> depthProfileList = pipeline->getStreamProfileList(OB_SENSOR_DEPTH);// 選擇合適的深度流配置(848x480@30fps, Y16格式)std::shared_ptr<ob::VideoStreamProfile> depthProfile = std::dynamic_pointer_cast<ob::VideoStreamProfile>(depthProfileList->getVideoStreamProfile(848, 0, OB_FORMAT_Y16, 30));if (!depthProfile) {// 如果沒有指定配置,獲取默認配置depthProfile = std::dynamic_pointer_cast<ob::VideoStreamProfile>(depthProfileList->getDefaultVideoStreamProfile());}// 創建配置對象并啟用深度流ob::Config config;config.enableStream(depthProfile);// 創建點云生成器std::shared_ptr<ob::PointCloudGenerator> pcGenerator = std::make_shared<ob::PointCloudGenerator>();pcGenerator->setFormat(OB_FORMAT_XYZ); // 設置點云格式為XYZ// 打印相機內參auto depthSensor = device->getSensor(OB_SENSOR_DEPTH);auto intrinsics = depthSensor->getIntrinsics(depthProfile);std::cout << "深度相機內參:" << std::endl<< "fx=" << intrinsics.fx << ", fy=" << intrinsics.fy << std::endl<< "cx=" << intrinsics.cx << ", cy=" << intrinsics.cy << std::endl;// 啟動數據流pipeline->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {if (frameSet && frameSet->depthFrame()) {processDepthFrame(frameSet->depthFrame(), pcGenerator);}});std::cout << "按Enter鍵退出..." << std::endl;std::cin.get();running = false;// 停止數據流pipeline->stop();std::cout << "程序已退出" << std::endl;return 0;}catch (const ob::Error& e) {std::cerr << "SDK錯誤: " << e.getName() << " (" << e.getFunction() << ")" << std::endl<< "錯誤碼: " << e.getErrorCode() << std::endl<< "錯誤描述: " << e.getDescription() << std::endl;return -1;}catch (const std::exception& e) {std::cerr << "異常: " << e.what() << std::endl;return -1;}
}
五、CMake配置示例
為了方便項目構建,建議使用CMake配置:
cmake_minimum_required(VERSION 3.10)
project(OrbbecDepthCameraDemo)# 設置C++標準
set(CMAKE_CXX_STANDARD 11)# 查找OrbbecSDK
find_package(OrbbecSDK REQUIRED)# 添加可執行文件
add_executable(orbbec_demo orbbec_depth_camera_demo.cpp)# 鏈接OrbbecSDK庫
target_link_libraries(orbbec_demo ${OrbbecSDK_LIBRARIES}
)# 包含頭文件目錄
target_include_directories(orbbec_demo PUBLIC${OrbbecSDK_INCLUDE_DIRS}
)# 設置可執行文件輸出路徑
set_target_properties(orbbec_demo PROPERTIESRUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin
)
六、常見功能擴展
6.1 RGB-D數據同步獲取
// 啟用RGB流
std::shared_ptr<ob::StreamProfileList> colorProfileList = pipeline->getStreamProfileList(OB_SENSOR_COLOR);
std::shared_ptr<ob::VideoStreamProfile> colorProfile = std::dynamic_pointer_cast<ob::VideoStreamProfile>(colorProfileList->getVideoStreamProfile(1280, 0, OB_FORMAT_RGB, 30));
config.enableStream(colorProfile);// 在幀回調中處理RGB和深度數據
pipeline->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {if (frameSet->depthFrame() && frameSet->colorFrame()) {auto depthFrame = frameSet->depthFrame();auto colorFrame = frameSet->colorFrame();// 處理同步的RGB-D數據processRGBDFrame(depthFrame, colorFrame);}
});
6.2 深度圖與彩色圖對齊
// 創建對齊器
std::shared_ptr<ob::Align> align = std::make_shared<ob::Align>(OB_ALIGN_DEPTH_TO_COLOR);// 在幀回調中應用對齊
pipeline->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {if (frameSet) {// 應用對齊auto alignedFrameSet = align->process(frameSet);if (alignedFrameSet->depthFrame() && alignedFrameSet->colorFrame()) {auto depthFrame = alignedFrameSet->depthFrame();auto colorFrame = alignedFrameSet->colorFrame();// 處理對齊后的RGB-D數據processAlignedRGBDFrame(depthFrame, colorFrame);}}
});
6.3 保存深度圖和點云數據
// 保存深度圖為PNG
void saveDepthImage(std::shared_ptr<ob::DepthFrame> depthFrame, const std::string& filename) {cv::Mat depthMat(depthFrame->height(), depthFrame->width(), CV_16UC1, depthFrame->data());cv::imwrite(filename, depthMat);
}// 保存點云為PLY格式
void savePointCloud(std::shared_ptr<ob::Frame> pointCloud, const std::string& filename) {float* points = (float*)pointCloud->data();int pointCount = pointCloud->dataSize() / (3 * sizeof(float));std::ofstream file(filename);if (file.is_open()) {// PLY文件頭file << "ply\n";file << "format ascii 1.0\n";file << "element vertex " << pointCount << "\n";file << "property float x\n";file << "property float y\n";file << "property float z\n";file << "end_header\n";// 寫入點數據for (int i = 0; i < pointCount; i++) {float x = points[i * 3];float y = points[i * 3 + 1];float z = points[i * 3 + 2];file << x << " " << y << " " << z << "\n";}file.close();}
}
七、注意事項
- 線程安全:OrbbecSDK的多數對象非線程安全,避免在多線程中同時操作同一實例
- 內存管理:Frame對象使用智能指針管理,避免手動釋放
- 錯誤處理:建議使用try-catch捕獲SDK拋出的異常
- 性能優化:
- 減少幀回調中的復雜計算
- 使用硬件加速(如OpenCL)處理點云
- 避免頻繁創建和銷毀SDK對象