一、开发环境准备
1.1 硬件要求
- 奥比中光深度相机(如Astra Pro、Gemini等)
- USB 3.0接口(确保数据传输稳定)
- 支持OpenGL的显卡(可选,用于点云可视化)
1.2 软件环境
- SDK安装:
- 从奥比中光开发者官网下载最新的OrbbecSDK
- 运行安装程序并配置环境变量(Windows)或执行
./install.sh
(Linux)
- 开发工具链:
- Visual Studio (Windows)
- GCC 7.5+/Clang 9+(Linux)
- CMake 3.10+(跨平台项目配置)
二、SDK架构与核心组件
OrbbecSDK采用模块化设计,主要组件包括:
- DeviceManager:设备发现与管理
- Device:相机设备抽象
- StreamProfile:数据流配置(分辨率、帧率、格式等)
- Frame:图像帧数据容器
- Pipeline:数据流处理管道
- FrameListener:帧数据回调接口
三、基本开发流程
3.1 初始化SDK与设备
#include <OrbbecSDK.h>
#include <iostream>
#include <string>int main() {// 初始化SDKob::Context context;// 枚举并打印所有可用设备std::shared_ptr<ob::DeviceList> deviceList = context.queryDeviceList();std::cout << "设备数量: " << deviceList->deviceCount() << std::endl;// 获取第一个设备std::shared_ptr<ob::Device> device;if (deviceList->deviceCount() > 0) {device = deviceList->getDevice(0);std::cout << "已连接设备: " << device->getDeviceInfo()->name() << std::endl;} else {std::cerr << "未发现设备!" << std::endl;return -1;}// 创建数据流处理管道std::shared_ptr<ob::Pipeline> pipeline = std::make_shared<ob::Pipeline>(device);// 配置并启动数据流// ...
}
3.2 配置并启动数据流
// 配置深度流
ob::Config config;
std::shared_ptr<ob::StreamProfileList> profileList = pipeline->getStreamProfileList(OB_SENSOR_DEPTH);// 选择848x480@30fps的深度流配置
std::shared_ptr<ob::VideoStreamProfile> depthProfile = std::dynamic_pointer_cast<ob::VideoStreamProfile>(profileList->getVideoStreamProfile(848, 0, OB_FORMAT_Y16, 30));// 启用深度流
config.enableStream(depthProfile);// 启动管道
pipeline->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {// 帧数据回调处理if (frameSet->depthFrame()) {auto depthFrame = frameSet->depthFrame();std::cout << "获取到深度帧: " << "宽度=" << depthFrame->width() << ", 高度=" << depthFrame->height() << ", 时间戳=" << depthFrame->timestamp() << std::endl;// 处理深度数据processDepthFrame(depthFrame);}
});
3.3 深度数据处理
void processDepthFrame(std::shared_ptr<ob::DepthFrame> depthFrame) {// 获取深度数据指针uint16_t* depthData = (uint16_t*)depthFrame->data();int width = depthFrame->width();int height = depthFrame->height();// 计算中心点深度值(单位:毫米)int centerX = width / 2;int centerY = height / 2;uint16_t centerDepth = depthData[centerY * width + centerX];std::cout << "中心点深度: " << centerDepth << "mm" << std::endl;// 可选:转换为点云convertToPointCloud(depthFrame);
}
3.4 点云生成与处理
void convertToPointCloud(std::shared_ptr<ob::DepthFrame> depthFrame) {// 创建点云转换工具std::shared_ptr<ob::PointCloudGenerator> pcGenerator = std::make_shared<ob::PointCloudGenerator>();// 设置点云格式为XYZpcGenerator->setFormat(OB_FORMAT_XYZ);// 生成点云std::shared_ptr<ob::Frame> pointCloud = pcGenerator->generate(depthFrame);// 获取点云数据if (pointCloud) {float* points = (float*)pointCloud->data();int pointCount = pointCloud->dataSize() / (3 * sizeof(float)); // XYZ三个分量// 简单统计:计算平均深度float sumDepth = 0;for (int i = 0; i < pointCount; i++) {float z = points[i * 3 + 2]; // Z坐标if (z > 0) sumDepth += z;}std::cout << "平均深度: " << sumDepth / pointCount << "mm" << std::endl;}
}
3.5 相机参数获取与使用
// 获取内参
auto depthSensor = device->getSensor(OB_SENSOR_DEPTH);
auto intrinsics = depthSensor->getIntrinsics(depthProfile);std::cout << "深度相机内参:" << std::endl<< "fx=" << intrinsics.fx << ", fy=" << intrinsics.fy << std::endl<< "cx=" << intrinsics.cx << ", cy=" << intrinsics.cy << std::endl;// 深度转世界坐标示例
float depthValue = depthData[centerY * width + centerX];
float worldX = (centerX - intrinsics.cx) * depthValue / intrinsics.fx;
float worldY = (centerY - intrinsics.cy) * depthValue / intrinsics.fy;
float worldZ = depthValue;std::cout << "世界坐标: (" << worldX << ", " << worldY << ", " << worldZ << ")mm" << std::endl;
3.6 相机工作模式切换
// 切换到尺寸测量模式(Dimensioning)
ob::WorkingMode workingMode;
workingMode.type = OB_WORKING_MODE_DIMENSIONING;
device->setWorkingMode(workingMode);std::cout << "已切换到尺寸测量模式" << std::endl;
四、完整示例代码
下面是一个完整的奥比中光深度相机C++开发示例,包含设备初始化、数据流获取、深度处理和点云生成:
#include <OrbbecSDK.h>
#include <iostream>
#include <string>
#include <vector>
#include <atomic>// 全局标志,控制程序运行
std::atomic<bool> running(true);// 处理深度帧
void processDepthFrame(std::shared_ptr<ob::DepthFrame> depthFrame, std::shared_ptr<ob::PointCloudGenerator> pcGenerator) {if (!depthFrame) return;// 获取深度数据uint16_t* depthData = (uint16_t*)depthFrame->data();int width = depthFrame->width();int height = depthFrame->height();// 计算中心点深度int centerX = width / 2;int centerY = height / 2;uint16_t centerDepth = depthData[centerY * width + centerX];std::cout << "深度帧: 宽度=" << width << ", 高度=" << height << ", 中心点深度=" << centerDepth << "mm" << std::endl;// 生成点云if (pcGenerator) {auto pointCloud = pcGenerator->generate(depthFrame);if (pointCloud) {float* points = (float*)pointCloud->data();int pointCount = pointCloud->dataSize() / (3 * sizeof(float));// 简单统计:计算平均深度float sumDepth = 0;int validPoints = 0;for (int i = 0; i < pointCount; i++) {float z = points[i * 3 + 2];if (z > 0) {sumDepth += z;validPoints++;}}if (validPoints > 0) {std::cout << "点云: 点数=" << validPoints << ", 平均深度=" << sumDepth / validPoints << "mm" << std::endl;}}}
}int main() {try {// 初始化SDKob::Context context;// 枚举设备std::shared_ptr<ob::DeviceList> deviceList = context.queryDeviceList();if (deviceList->deviceCount() == 0) {std::cerr << "未发现设备!" << std::endl;return -1;}// 获取第一个设备std::shared_ptr<ob::Device> device = deviceList->getDevice(0);std::cout << "已连接设备: " << device->getDeviceInfo()->name() << std::endl;// 创建数据流处理管道std::shared_ptr<ob::Pipeline> pipeline = std::make_shared<ob::Pipeline>(device);// 获取深度流配置列表std::shared_ptr<ob::StreamProfileList> depthProfileList = pipeline->getStreamProfileList(OB_SENSOR_DEPTH);// 选择合适的深度流配置(848x480@30fps, Y16格式)std::shared_ptr<ob::VideoStreamProfile> depthProfile = std::dynamic_pointer_cast<ob::VideoStreamProfile>(depthProfileList->getVideoStreamProfile(848, 0, OB_FORMAT_Y16, 30));if (!depthProfile) {// 如果没有指定配置,获取默认配置depthProfile = std::dynamic_pointer_cast<ob::VideoStreamProfile>(depthProfileList->getDefaultVideoStreamProfile());}// 创建配置对象并启用深度流ob::Config config;config.enableStream(depthProfile);// 创建点云生成器std::shared_ptr<ob::PointCloudGenerator> pcGenerator = std::make_shared<ob::PointCloudGenerator>();pcGenerator->setFormat(OB_FORMAT_XYZ); // 设置点云格式为XYZ// 打印相机内参auto depthSensor = device->getSensor(OB_SENSOR_DEPTH);auto intrinsics = depthSensor->getIntrinsics(depthProfile);std::cout << "深度相机内参:" << std::endl<< "fx=" << intrinsics.fx << ", fy=" << intrinsics.fy << std::endl<< "cx=" << intrinsics.cx << ", cy=" << intrinsics.cy << std::endl;// 启动数据流pipeline->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {if (frameSet && frameSet->depthFrame()) {processDepthFrame(frameSet->depthFrame(), pcGenerator);}});std::cout << "按Enter键退出..." << std::endl;std::cin.get();running = false;// 停止数据流pipeline->stop();std::cout << "程序已退出" << std::endl;return 0;}catch (const ob::Error& e) {std::cerr << "SDK错误: " << e.getName() << " (" << e.getFunction() << ")" << std::endl<< "错误码: " << e.getErrorCode() << std::endl<< "错误描述: " << e.getDescription() << std::endl;return -1;}catch (const std::exception& e) {std::cerr << "异常: " << e.what() << std::endl;return -1;}
}
五、CMake配置示例
为了方便项目构建,建议使用CMake配置:
cmake_minimum_required(VERSION 3.10)
project(OrbbecDepthCameraDemo)# 设置C++标准
set(CMAKE_CXX_STANDARD 11)# 查找OrbbecSDK
find_package(OrbbecSDK REQUIRED)# 添加可执行文件
add_executable(orbbec_demo orbbec_depth_camera_demo.cpp)# 链接OrbbecSDK库
target_link_libraries(orbbec_demo ${OrbbecSDK_LIBRARIES}
)# 包含头文件目录
target_include_directories(orbbec_demo PUBLIC${OrbbecSDK_INCLUDE_DIRS}
)# 设置可执行文件输出路径
set_target_properties(orbbec_demo PROPERTIESRUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin
)
六、常见功能扩展
6.1 RGB-D数据同步获取
// 启用RGB流
std::shared_ptr<ob::StreamProfileList> colorProfileList = pipeline->getStreamProfileList(OB_SENSOR_COLOR);
std::shared_ptr<ob::VideoStreamProfile> colorProfile = std::dynamic_pointer_cast<ob::VideoStreamProfile>(colorProfileList->getVideoStreamProfile(1280, 0, OB_FORMAT_RGB, 30));
config.enableStream(colorProfile);// 在帧回调中处理RGB和深度数据
pipeline->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {if (frameSet->depthFrame() && frameSet->colorFrame()) {auto depthFrame = frameSet->depthFrame();auto colorFrame = frameSet->colorFrame();// 处理同步的RGB-D数据processRGBDFrame(depthFrame, colorFrame);}
});
6.2 深度图与彩色图对齐
// 创建对齐器
std::shared_ptr<ob::Align> align = std::make_shared<ob::Align>(OB_ALIGN_DEPTH_TO_COLOR);// 在帧回调中应用对齐
pipeline->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {if (frameSet) {// 应用对齐auto alignedFrameSet = align->process(frameSet);if (alignedFrameSet->depthFrame() && alignedFrameSet->colorFrame()) {auto depthFrame = alignedFrameSet->depthFrame();auto colorFrame = alignedFrameSet->colorFrame();// 处理对齐后的RGB-D数据processAlignedRGBDFrame(depthFrame, colorFrame);}}
});
6.3 保存深度图和点云数据
// 保存深度图为PNG
void saveDepthImage(std::shared_ptr<ob::DepthFrame> depthFrame, const std::string& filename) {cv::Mat depthMat(depthFrame->height(), depthFrame->width(), CV_16UC1, depthFrame->data());cv::imwrite(filename, depthMat);
}// 保存点云为PLY格式
void savePointCloud(std::shared_ptr<ob::Frame> pointCloud, const std::string& filename) {float* points = (float*)pointCloud->data();int pointCount = pointCloud->dataSize() / (3 * sizeof(float));std::ofstream file(filename);if (file.is_open()) {// PLY文件头file << "ply\n";file << "format ascii 1.0\n";file << "element vertex " << pointCount << "\n";file << "property float x\n";file << "property float y\n";file << "property float z\n";file << "end_header\n";// 写入点数据for (int i = 0; i < pointCount; i++) {float x = points[i * 3];float y = points[i * 3 + 1];float z = points[i * 3 + 2];file << x << " " << y << " " << z << "\n";}file.close();}
}
七、注意事项
- 线程安全:OrbbecSDK的多数对象非线程安全,避免在多线程中同时操作同一实例
- 内存管理:Frame对象使用智能指针管理,避免手动释放
- 错误处理:建议使用try-catch捕获SDK抛出的异常
- 性能优化:
- 减少帧回调中的复杂计算
- 使用硬件加速(如OpenCL)处理点云
- 避免频繁创建和销毁SDK对象