使用RGBD相机识别箱体,并提取箱体位姿

Posted Hill_LAI

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了使用RGBD相机识别箱体,并提取箱体位姿相关的知识,希望对你有一定的参考价值。

使用opencv+pcl方法识别,非深度学习
具体内容后续解读

#include <include/application_driver/DetectorBox.h>


void drawKeyline(cv::Mat& img, cv::Point pStart, cv::Point pEnd, float density_len, int arror_len, int arror_alpha, cv::Scalar& color, int thickness, int lineType)
{
    const double PI = 3.1415926;

    double d_x = (pEnd.x - pStart.x);
    double d_y = (pEnd.y - pStart.y);

    double add_len = 50;
    double d_len = sqrtf(powf(d_x, 2) + powf(d_y, 2));
    double d_len_new = d_len + add_len;

    d_x = d_x*(d_len_new / d_len);
    d_y = d_y*(d_len_new / d_len);

    int number = d_len_new / density_len;
    double u_x = d_x / number;
    double u_y = d_y / number;

    std::vector<Point> inside_points;

    for (int i = 0; i < number; i++)
    {
        int temp_x = pStart.x + i*u_x;
        int temp_y = pStart.y + i*u_y;
        Point temp(temp_x, temp_y);

        inside_points.push_back(temp);
    }

    double us_x = 0.382 * u_x;
    double us_y = 0.382 * u_y;

    int points_count = inside_points.size();

    for (int i = 0; i < points_count - 1;i++)
    {
        Point p_start((inside_points[i].x - us_x), (inside_points[i].y - us_y));
        Point p_end((inside_points[i].x + us_x), (inside_points[i].y + us_y));
        line(img, p_start, p_end, color, 1);
    }

    if (0 == points_count) return;

    Point arrow;
    //计算 θ 角(最简单的一种情况在下面图示中已经展示,关键在于 atan2 函数,详情见下面)
    double angle = atan2((double)(pStart.y - pEnd.y), (double)(pStart.x - pEnd.x));
    //line(img, pStart, pEnd, color, thickness, lineType);
    //计算箭角边的另一端的端点位置(上面的还是下面的要看箭头的指向,也就是pStart和pEnd的位置)
    arrow.x = inside_points[points_count - 1].x + arror_len * cos(angle + PI * arror_alpha / 180);
    arrow.y = inside_points[points_count - 1].y + arror_len * sin(angle + PI * arror_alpha / 180);
    line(img, inside_points[points_count - 1], arrow, color, thickness, lineType);
    arrow.x = inside_points[points_count - 1].x + arror_len * cos(angle - PI * arror_alpha / 180);
    arrow.y = inside_points[points_count - 1].y + arror_len * sin(angle - PI * arror_alpha / 180);
    line(img, inside_points[points_count - 1], arrow, color, thickness, lineType);


}


detectorBox::detectorBox(){}

detectorBox::~detectorBox(){}

void detectorBox::setCameraInfo(std::vector<float> cameraInfo)
{
    if(cameraInfo.size() != 3) return;

    camera_cx = cameraInfo[0];
    camera_cy = cameraInfo[1];
    camera_f = cameraInfo[2];
}

int detectorBox::detectBoxPosition(cv::Mat& image_depth, cv::Mat& image_rgb, float& x, float& y, float& z, float& angle)
{
    const double camera_factor = 1000;

    //Device capture image fail
    if (image_depth.empty()) return -1;//debug by hill

    Mat depth = image_depth.clone();
    //LOGE("Step 1: Transfer 16-bit image to 8-bit image\\n");
    std::cout << "Step-1 " << std::endl;
    //------------------------Step 1: Transfer 16-bit image to 8-bit image -----------------------------/
    Mat g_pShowImage = Mat::zeros(depth.size(), CV_8UC1);
    int valueRange = 3000;//maxValue - minValue;
    int row_number = depth.rows;
    int col_number = depth.cols;

    for (unsigned int i = 0; i < row_number; i++)
    {
        ushort* value_d = depth.ptr<ushort>(i);
        uchar* value_out = g_pShowImage.ptr<uchar>(i);
        for (unsigned int j = 0; j < col_number; j++)
        {
            // d value maybe NULL
            ushort d = value_d[j] / 10;
            ushort value = d * 255 / valueRange;

            if (d > 3000 || d < 300) //change for astro-p
            {
                value_out[j] = 0; //change for astro-p
                continue; //change for astro-p
            }

            if (value > 255)
            {
                value_out[j] = 255;
            }
            else
            {
                value_out[j] = value;
            }
        }
    }

    //------------------------------get the blue mask-------------------------------------------/
    std::cout << "Step-2 " << std::endl;

    cv::Mat image_hsv = image_rgb.clone();
    cv::Mat image_blue_mask = cv::Mat(image_rgb.size(),CV_8UC1);
    cv::cvtColor(image_rgb, image_hsv, cv::COLOR_BGR2HSV);
            double low_H = 78;
            double low_S = 43;
            double low_V = 46;
            double high_H = 124;
            double high_S = 255;
            double high_V = 255;
            cv::inRange(image_hsv, cv::Scalar(low_H, low_S, low_V), cv::Scalar(high_H, high_S, high_V), image_blue_mask);

    //LOGE("Step 3:Depth image transfer point cloud\\n");
    std::cout << "Step-3 " << std::endl;
    /****************************************Step 3: Depth image transfer point cloud***********************************/
    //median blur
    Mat depth_mblur;
    medianBlur(depth, depth_mblur, 3);
    depth = depth_mblur;

    if (depth.empty()) return -1;//if image is empty,debug by hill

    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_obj(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointXYZ p;


    int sample_i = 3;
    int sample_j = 3;
    int row_start = 10; //20
    int col_start = 16; //32
    int row_end = row_start + depth.rows*0.95; //row_start + depth.rows*0.8;
    int col_end = col_start + depth.cols*0.95; //col_start + depth.cols*0.9;

    for (unsigned int i = row_start; i < row_end; i = i + sample_i) //default is 15
    {
        for (unsigned int j = col_start; j < col_end; j = j + sample_j) //default is 18
        {

            uchar m = image_blue_mask.at<uchar>(i, j);
            if (m != 255) continue;

            ushort d = depth.at<ushort>(i, j);

            // transfer to point cloud
            p.z = double(d) / camera_factor;
            p.x = (j - camera_cx) * p.z / camera_f;
            p.y = (i - camera_cy) * p.z / camera_f;

            cloud->points.push_back(p);
        }
    }

    cloud->width = cloud->points.size();
    cloud->height = 1;
    cloud->is_dense = false;
    cloud->points.resize(cloud->width * cloud->height);

    //LOGE("Step 7: Euclidean Cluster Extraction\\n");
    std::cout << "Step-4 " << std::endl;
    //-----------------------Step 7: Euclidean Cluster Extraction-------------------------/
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZ>);
    tree->setInputCloud(cloud);

    std::vector<pcl::PointIndices> cluster_indices;
    pcl::EuclideanClusterExtraction<pcl::PointXYZ> ec;
    ec.setClusterTolerance(0.05); // for 8m
    ec.setMinClusterSize(20);
    ec.setMaxClusterSize(200000);
    ec.setSearchMethod(tree);
    ec.setInputCloud(cloud);
    ec.extract(cluster_indices);

    //Return 0 when nothing catch
    if (0 == cluster_indices.size()) return -3; //if do not find the object,return,debug by hill
    //catch the biggest point set
    vector<pcl::PointCloud<pcl::PointXYZ>::Ptr> cloud_cluster_set;
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cluster(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cluster_1(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cluster_2(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cluster_3(new pcl::PointCloud<pcl::PointXYZ>);
    cloud_cluster_set.push_back(cloud_cluster_1);
    cloud_cluster_set.push_back(cloud_cluster_2);
    cloud_cluster_set.push_back(cloud_cluster_3);
    double distance_to_origin, min_distance;
    min_distance = 10;
    int index, min_index;
    index = 0;

    for (std::vector<pcl::PointIndices>::const_iterator it = cluster_indices.begin(); it != cluster_indices.end(); ++it)
    {
        for (std::vector<int>::const_iterator pit = it->indices.begin(); pit != it->indices.end(); ++pit)
            cloud_cluster_set[index]->points.push_back(cloud->points[*pit]);
        cloud_cluster_set[index]->width = cloud_cluster_set[index]->points.size();
        cloud_cluster_set[index]->height = 1;
        cloud_cluster_set[index]->is_dense = false;
        cloud_cluster_set[index]->points.resize(cloud_cluster_set[index]->width * cloud_cluster_set[index]->height);
        break;
        //keep the biggest point set
    }

    if(cloud_cluster_set.size() == 0) return -1;

    *cloud_cluster = *cloud_cluster_set[0];

    //pcl::io::savePCDFileASCII("/home/peak/BoxDetect/cloud_cluster.pcd",*cloud_cluster);
    //return 0;

    //LOGE("Step 8 : for object, we need to segment the all plane\\n");
    std::cout << "Step-5 " << std::endl;
    //----------------------------Step 8: for all object, we need to calculate the biggest plane:--------------------/

    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_p(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_all_plane(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cluster_cutted(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_a(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_b(new pcl::PointCloud<pcl::PointXYZ>);

    *cloud_all_plane = *cloud_cluster;

    int nr_points = (int)cloud_all_plane->points.size();
    //orient normal of point cloud

    // While 50% of the original cloud is still there

    pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients());
    pcl::PointIndices::Ptr inliers(new pcl::PointIndices());
    //create the segmentation object
    pcl::SACSegmentation<pcl::PointXYZ> seg;
    // optional
    seg.setOptimizeCoefficients(true);
    // mandatory
    seg.setModelType(pcl::SACMODEL_PLANE);
    seg.setMethodType(pcl::SAC_RANSAC);
    //seg.setMaxIterations(100);
    seg.setDistanceThreshold(0.01);//astro-p is 0.02 astro-mini is 0.01

    vector<Eigen::Vector4f> centroid_set;
    vector<double> angle_set;
    vector<pcl::PointCloud<pcl::PointXYZ>::Ptr> cloud_set;

    int count = 0;
    while (cloud_all_plane->points.size() > 0.1* nr_points)
    {
        if (cloud_all_plane->points.size() < 50) break;

        //Segment the largest planar component from the remaining cloud
        seg.setDistanceThreshold(0.01);
        seg.setInputCloud(cloud_all_plane);
        seg.segment(*inliers, *coefficients);

        if (inliers->indices.size() == 0)
        {
            //std::cout << "Could not estimate a planar model for the given dataset." << std::endl;
            break;
        }

        vector<double> normal;
        normal.push_back(coefficients->values[0]);
        normal.push_back(coefficients->values[1]);
        normal.push_back(coefficients->values[2]);
        normal.push_back(coefficients->values[3]);

        //angle
        Eigen::Vector3d normal_a(0, 0, 1);
        Eigen::Vector3d normal_b(coefficients广励PC透明电表箱

刚入手 marshall jvm 410 和 1960a 箱体,请问怎么接

R语言使用ggplot2包使用geom_boxplot函数绘制基础分组箱图(分组箱体框颜色配置)实战

R语言ggplot2可视化箱图(boxplot)时忽视异常值(outlier)并重新分配坐标轴的范围是的可视化的箱图可以有效显示箱体实战

R语言使用ggplot2包使用geom_boxplot函数绘制基础分组箱图(不同分组配置不同的箱体填充色)实战

R语言使用ggplot2包使用geom_boxplot函数绘制基础分组箱图(配置箱体内的统一填充颜色)实战