realsense d435i 详解

发布时间:2023-05-20

一、介绍

realsense d435i是一款深度相机,由英特尔公司开发。它通过采用一组红外发射器和摄像头,来实现深度图像的捕获。它不仅可以捕获彩色图像,还可以捕获深度图像和红外图像,可以应用于机器人、工业自动化、虚拟现实等多个领域。

二、硬件描述

realsense d435i由多个模块组成,包括RGB相机、红外发射器、红外接收器、深度传感器和IMU传感器。RGB相机用于捕获彩色图像,红外发射器和红外接收器用于计算深度数据,深度传感器用于捕获深度图像,而IMU传感器用于捕获运动信息。

三、realsense SDK简介

realsense SDK是一款开源软件开发工具包,是由英特尔公司开发的。它可以方便地与realsense d435i相机进行交互,并进行深度信息的采集和处理。它支持多个编程语言,如C++、C#、Python等。 以下是一个使用C++语言编写的简单程序,用于捕获realsense d435i相机的深度数据:

#include <iostream>
#include <librealsense2/rs.hpp>
int main()
{
    rs2::pipeline pipe;
    rs2::config cfg;
    cfg.enable_stream(RS2_STREAM_DEPTH);
    pipe.start(cfg);
    while (true)
    {
        rs2::frameset frames = pipe.wait_for_frames();
        rs2::depth_frame depth = frames.get_depth_frame();
        std::cout << "Depth at (320, 240): " << depth.get_distance(320, 240) << std::endl;
    }
    return 0;
}

四、realsense与ROS结合

ROS(Robot Operating System)是一个用于机器人应用程序开发的开源框架,是由加州大学圣地亚哥分校开发的。ROS可以与realsense d435i相机结合起来,用于机器人的导航、建图和路径规划等多个应用领域。 以下是一个使用ROS和realsense d435i相机实现体积估计的案例:

#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <sensor_msgs/Image.h>
#include <sensor_msgs/PointCloud2.h>
#include <cv_bridge/cv_bridge.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/point_types.h>
#include <pcl/filters/passthrough.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/filters/statistical_outlier_removal.h>
#include <pcl/segmentation/progressive_morphological_filter.h>
#include <pcl/surface/mls.h>
#include <pcl/common/transforms.h>
#include <pcl/registration/ndt.h>
#include <pcl_ros/point_cloud.h>
#include <iostream>
class VolumeEstimation {
public:
    VolumeEstimation(ros::NodeHandle& nh, ros::NodeHandle& pnh) {
        cloud_.reset(new pcl::PointCloud<pcl::PointXYZ>());
        cloud_filtered_.reset(new pcl::PointCloud<pcl::PointXYZ>());
        cloud_smoothed_.reset(new pcl::PointCloud<pcl::PointXYZ>());
        cloud_rotated_.reset(new pcl::PointCloud<pcl::PointXYZ>());
        cloud_transformed_.reset(new pcl::PointCloud<pcl::PointXYZ>());
        registered_.reset(new pcl::PointCloud<pcl::PointXYZ>());
        ndt_.reset(new pcl::NormalDistributionsTransform<pcl::PointXYZ, pcl::PointXYZ>());
        ndt_->setTransformationEpsilon(0.01);
        ndt_->setStepSize(0.1);
        ndt_->setResolution(1.0);
        ndt_->setMaximumIterations(35);
        depth_sub_ = nh.subscribe("/depth/image_raw", 1, &VolumeEstimation::depthCallback, this);
        pcl_pub_ = nh.advertise<sensor_msgs::PointCloud2>("/pcl_cloud", 1);
        voxel_pub_ = nh.advertise<sensor_msgs::PointCloud2>("/pcl_cloud_voxel", 1);
        smoothed_pub_ = nh.advertise<sensor_msgs::PointCloud2>("/pcl_cloud_smoothed", 1);
        rotated_pub_ = nh.advertise<sensor_msgs::PointCloud2>("/pcl_cloud_rotated", 1);
        transformed_pub_ = nh.advertise<sensor_msgs::PointCloud2>("/pcl_cloud_transformed", 1);
        registered_pub_ = nh.advertise<sensor_msgs::PointCloud2>("/pcl_cloud_registered", 1);
    }
    ~VolumeEstimation() {}
    void depthCallback(const sensor_msgs::ImageConstPtr& msg) {
        try {
            cv::Mat depth = cv_bridge::toCvShare(msg, "16UC1")->image;
            cloud_->height = depth.rows;
            cloud_->width = depth.cols;
            cloud_->is_dense = false;
            cloud_->points.resize(cloud_->height * cloud_->width);
            int index = 0;
            for (int i = 0; i < depth.rows; ++i) {
                for (int j = 0; j < depth.cols; ++j) {
                    pcl::PointXYZ& point = cloud_->points[index];
                    point.x = (j - cx_) * depth.at<uint16_t>(i,j) / fx_;
                    point.y = (i - cy_) * depth.at<uint16_t>(i,j) / fy_;
                    point.z = depth.at<uint16_t>(i,j) * 0.001;
                    index++;
                }
            }
            pcl_conversions::toPCL(ros::Time::now(), cloud_->header.stamp);
            cloud_->header.frame_id = "realsense_frame";
            pcl_pub_.publish(cloud_);
            passthroughFilter(cloud_);
            voxelGridFilter(cloud_filtered_);
            progressiveMorphFilter(cloud_filtered_, cloud_smoothed_);
            transformPointCloud(cloud_smoothed_, cloud_transformed_);
            ndtRegistration(cloud_transformed_, registered_);
            sensor_msgs::PointCloud2 pcl_cloud;
            pcl::toROSMsg(*cloud_, pcl_cloud);
            voxel_pub_.publish(pcl_cloud);
            pcl::toROSMsg(*cloud_smoothed_, pcl_cloud);
            smoothed_pub_.publish(pcl_cloud);
            pcl::toROSMsg(*cloud_rotated_, pcl_cloud);
            rotated_pub_.publish(pcl_cloud);
            pcl::toROSMsg(*cloud_transformed_, pcl_cloud);
            transformed_pub_.publish(pcl_cloud);
            pcl::toROSMsg(*registered_, pcl_cloud);
            registered_pub_.publish(pcl_cloud);
        } catch (cv_bridge::Exception& e) {
            ROS_ERROR("cv_bridge exception: %s", e.what());
        }
    }
    void passthroughFilter(const pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud_in) {
        pcl::PassThrough<pcl::PointXYZ> filter;
        filter.setInputCloud(cloud_in);
        filter.setFilterFieldName("z");
        filter.setFilterLimits(0.01, 1.0);
        filter.filter(*cloud_filtered_);
        cloud_filtered_->header.frame_id = "realsense_frame";
    }
    void voxelGridFilter(const pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud_in) {
        pcl::VoxelGrid<pcl::PointXYZ> filter;
        filter.setInputCloud(cloud_in);
        filter.setLeafSize(0.025, 0.025, 0.025);
        filter.filter(*cloud_filtered_);
        cloud_filtered_->header.frame_id = "realsense_frame";
    }
    void progressiveMorphFilter(const pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud_in, pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud_out) {
        pcl::ProgressiveMorphologicalFilter<pcl::PointXYZ> filter;
        filter.setInputCloud(cloud_in);
        filter.setMaxWindowSize(20);
        filter.setSlope(0.5f);
        filter.setInitialDistance(0.5f);
        filter.setThreshold(0.1f);
        filter.setErosionIterations(2);
        filter.setDilationIterations(2);
        filter.setNumberOfThreads(4);
        filter.filter(*cloud_out);
        cloud_out->header.frame_id = "realsense_frame";
    }
    void transformPointCloud(const pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud_in, pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud_out) {
        pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_rotated(new pcl::PointCloud<pcl::PointXYZ>());
        Eigen::Matrix4f transform = Eigen::Matrix4f::Identity();
        float theta = -M_PI / 2.0;
        transform(0,0) = std::cos(theta);
        transform(0,1) = -std::sin(theta);
        transform(1,0) = std::sin(theta);
        transform(1,1) = std::cos(theta);
        pcl::transformPointCloud(*cloud_in, *cloud_rotated, transform);
        pcl::PassThrough<pcl::PointXYZ> filter;
        filter.setInputCloud(cloud_rotated);
        filter.setFilterFieldName("z");
        filter.setFilterLimits(0.01, 1.5);
        filter.filter(*cloud_out);
        cloud_out->header.frame_id = "realsense_frame";
    }
    void ndtRegistration(const pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud_in, pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud_out) {
        pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_init(new pcl::PointCloud<pcl::PointXYZ>());
        pcl::VoxelGrid<pcl::PointXYZ> voxel;
        voxel.setLeafSize(0.05, 0.05, 0.05);
        voxel.setInputCloud(cloud_in);
        voxel.filter(*cloud_init);
        ndt_->setInputSource(cloud_init);
        ndt_->setInputTarget(cloud_out);
        pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud(new pcl::PointCloud<pcl::PointXYZ>());
        ndt_->align(*output_cloud);
        Eigen::Matrix4f transform = ndt_->getFinalTransformation();
        pcl::transformPointCloud(*cloud_init, *cloud_out, transform);
        cloud_out->header.frame_id = "realsense_frame";
    }
private:
    ros::Subscriber depth_sub_;
    ros::Publisher pcl_pub_;
    ros::Publisher voxel_pub_;
    ros::Publisher smoothed_pub_;
    ros::Publisher rotated_pub_;
    ros::Publisher transformed_pub_;
    ros::Publisher registered_pub_;
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_;
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_filtered_;
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_smoothed_;
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_rotated_;
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_transformed_;
    pcl::PointCloud<pcl::PointXYZ>::Ptr registered_;
    std::unique_ptr<pcl::NormalDistributionsTransform<pcl::PointXYZ, pcl::PointXYZ>> ndt_;
    float fx_ = 385.255, fy_ = 385.255, cx_ = 320.5, cy_ = 240.5;
};
int main(int argc, char* argv[]) {
    ros::init(argc, argv, "volume_estimation");
    ros::NodeHandle nh, pnh("~");
    VolumeEstimation estimation(nh, pnh);
    ros::spin();
    return 0;
}

五、结论

realsense d435i是一款高质量的深度相机,可以应用于机器人、工业自动化、虚拟现实等多个领域。通过使用realsense SDK和ROS,我们可以方便地与realsense d435i相机进行交互,并进行深度信息的采集和处理。