Merge pull request !1 from jzq/develop
master
jzq 2024-06-11 10:46:23 +00:00 committed by Gitee
commit 81512baac3
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
12 changed files with 2403 additions and 0 deletions

685
fusion.cpp Normal file
View File

@ -0,0 +1,685 @@
#include <set>
#include "fusion.h"
// 角度 -> 弧度转换
#define ANGLE_TO_RADIAN(angle) ((angle)*3141.59 / 180000)
// fusion函数
char LCFusion::fusion(std::vector<Mat> &camInfo)
{
projectedPoints.clear();
if (camInfo.size() < 3) // 判断返回的双目测距信息是否为空正常为3
{
std::cout << "There is no detection box!" << std::endl;
return -1;
}
Mat boxes = camInfo[1], dwha = camInfo[2];
projectedPoints.clear();
if (this->upScan.pointCloud.empty())
{
std::cout << "There is no lidar data!" << std::endl;
return -2;
}
if (boxes.empty())
{
std::cout << "There is no detection box!" << std::endl;
return -1;
}
else
{
draw_point(camInfo); // 画出雷达映射到图像中的点
std::cout << "Fusion start!" << std::endl;
// 大物体
vector<int> clusterlable = clusters_bboxs(boxes); // 得到每簇雷达映射的检测框标签(检测框索引值)
optimize_clusterlable(clusterlable, boxes); // 优化检测框(针对镂空物体)
big_fusion(clusterlable, boxes, dwha); // 大物体赋语义
//小物体
small_box_without_dis(boxes, dwha); // 无距离信息的检测框映射
small_fusion(boxes, dwha); // 小物体赋语义
}
return 1;
}
void LCFusion::big_fusion(vector<int> &clusterlable, Mat &bboxs, Mat &dwha) // 大物体赋语义
{
std::cout << "big_fusion" << std::endl;
for (int i = 0; i < clusterlable.size(); i++) // 遍历每簇雷达
{
int bbox_num = clusterlable[i];
if (bbox_num != -1) // -1代表无检测框与之对应
{
int bbox_cls = bboxs.at<float>(bbox_num, 5);
if (class_index[bbox_cls] != 13 && class_index[bbox_cls] != 14) // 排除扩散物体(风扇底座、吧台椅)
{
//std::cout << "bboxcls:" << bbox_cls << std::endl;
for (int j = 0; j < this->upScan.clustedCloud[i].size(); j++) // 遍历一簇雷达的所有点赋语义
{
int laser_index = this->upScan.clustedCloud[i][j].at<float>(0, 3);
this->upScan.intensities[laser_index] = class_intensity[bbox_cls + 1];
}
}
else
{
if (upScan.clustedCloud[i].size() > 0 && upScan.clustedCloud[i].size() < 4) // 扩散物体雷达点应该很少
{
//cout << "******start draw circle******" << endl;
double d = dwha.at<float>(bbox_num, 0);
if (d > 0)
{
draw_circle(i, bbox_num, bboxs, dwha);
} // 扩散物体赋语义
}
}
}
}
}
void LCFusion::small_fusion(Mat &bboxs, Mat &dwha) // 小物体u赋语义
{
std::cout << "small_fusion" << std::endl;
for (int i = 0; i < bboxs.rows; i++)
{
int c = bboxs.at<float>(i, 5);
float x_mim = bboxs.at<float>(i, 0);
float y_min = bboxs.at<float>(i, 1);
float x_max = bboxs.at<float>(i, 2);
float y_max = bboxs.at<float>(i, 3);
std::cout << "cls:" << c << " xmin:" << x_mim << " ymin:" << y_min << " xmax:" << x_max << " ymax:" << y_max << std::endl;
if (class_index[c] == 0 || class_index[c] == 2 || (class_index[c] == 4 || class_index[c] == 5)) // 判断小物体u
{
Mat temp(2, 1, CV_32F);
double d = dwha.at<float>(i, 0);
double a = dwha.at<float>(i, 3); // 取距离和角度
if (d == -1)
continue;
//std::cout << "d:" << d << " a: " << a << std::endl;
temp.at<float>(0, 0) = float((dwha.at<float>(i, 0) / 100.0) * cos(dwha.at<float>(i, 3)));
temp.at<float>(1, 0) = float((dwha.at<float>(i, 0) / 100.0) * sin(dwha.at<float>(i, 3)));
temp = c2lR * temp + c2lT; // 相机坐标系-> 雷达坐标系转换
double y = temp.at<float>(0, 0);
double x = temp.at<float>(1, 0);
float dis = sqrt(x * x + y * y); // 转极坐标
float angle = 0.0; // 得到雷达点索引
if (y >= 0)
angle = (acos(x / dis) - this->upScan.angleMin) / this->upScan.angleInc;
else
angle = ((2 * M_PI - acos(x / dis)) - this->upScan.angleMin) / this->upScan.angleInc;
double data_angle = atan((dwha.at<float>(i, 1)) / (200 * dis));
data_angle /= this->upScan.angleInc; // 由测距宽度信息得到需更改的雷达点数的一半
angle = (int)angle - (int)data_angle; // 起始雷达索引
int num_point = 2 * (int)data_angle + 1; // 更改的雷达点数
int len = this->upScan.distance.size();
for (int i = 0; i < num_point; i++)
{
if ((angle + i) < 0 || (angle + i) > (len - 1)) // 防止越界
continue;
this->upScan.distance[angle + i] = dis;
this->upScan.intensities[angle + i] = class_intensity[c + 1];
}
}
else if (class_index[c] == 1 || class_index[c] == 3 ||class_index[c] == 6)
{
int len = this->upScan.distance.size();
Mat z = r->Z.clone();
if (countNonZero(z) < 1)
{
return;
}
if (z.cols >= 2)
{
int begin_index = 0;
for(int j = z.cols-1; j >=0; j-- )
{
int d = z.at<float>(0, j);
if (d != 0)
{
begin_index = j;
break;
}
}
cout << "tizhongcheng " << z.cols << endl;
float d_begin = z.at<float>(0, begin_index)/1000.;
float a_begin = z.at<float>(1, begin_index);
cout << "d_begin: " << d_begin << " a_begin: " << a_begin << endl;
Mat temp(2, 1, CV_32F);
temp.at<float>(0, 0) = float(d_begin * cos(a_begin));
temp.at<float>(1, 0) = float(d_begin * sin(a_begin));
temp = c2lR * temp + c2lT;
double y = temp.at<float>(0, 0);
double x = temp.at<float>(1, 0);
float dis = sqrt(x * x + y * y);
float angle_index = 0.0; // 得到雷达点索引
if (y >= 0)
angle_index = (acos(x / dis) - this->upScan.angleMin) / this->upScan.angleInc;
else
angle_index = ((2 * M_PI - acos(x / dis)) - this->upScan.angleMin) / this->upScan.angleInc;
angle_index = (int)angle_index;
if (angle_index >= 0 && angle_index <len) // 防止越界
{
this->upScan.distance[angle_index] = dis;
this->upScan.intensities[angle_index] = class_intensity[c + 1];
}
cout << "dis: " << dis << " angle_index: " << angle_index << endl;
angle_index += 1;
for (int i = begin_index-1; i >= 0; i--)
{
float temp_d = z.at<float>(0, i)/1000.;
float temp_a = z.at<float>(1, i);
temp.at<float>(0, 0) = float(temp_d * cos(temp_a));
temp.at<float>(1, 0) = float(temp_d * sin(temp_a));
temp = c2lR * temp + c2lT;
y = temp.at<float>(0, 0);
x = temp.at<float>(1, 0);
dis = sqrt(x * x + y * y);
float temp_angle = 0.0; // 得到雷达点索引
if (y >= 0)
temp_angle = acos(x / dis);
else
temp_angle =2 * M_PI - acos(x / dis);
float angle_now = this->upScan.angleMin + this->upScan.angleInc*angle_index;
if (temp_angle >= angle_now)
{
if (angle_index >= 0 && angle_index < len) // 防止越界
{
this->upScan.distance[angle_index] = dis;
// cout << "dis: " << dis << " angle_index: " << angle_index << endl;
this->upScan.intensities[angle_index] = class_intensity[c + 1];
}
angle_index += 1;
}
}
r->Z = Mat::zeros(2, 1, CV_32FC1);
}
}
}
}
void LCFusion::optimize_clusterlable(vector<int> &clusterlable, Mat &bboxs) // 针对镂空物体选择对应的雷达簇,寻找镂空物体检测框对应的所有雷达簇
{
for (int i = 0; i < bboxs.rows; i++) // 寻找每个框对应的所有雷达簇
{
vector<int> temp_cluster_num;
Mat temp_bbox = bboxs.row(i).clone();
for (int j = 0; j < clusterlable.size(); j++)
{
if (clusterlable[j] == i)
{
temp_cluster_num.push_back(j);
}
}
if (temp_cluster_num.size() == 0)
continue;
choose_forceground(temp_cluster_num, temp_bbox, clusterlable); // 筛选镂空物体的前景
}
}
void LCFusion::choose_forceground(vector<int> &cluster_num, Mat &bbox, vector<int> &clusterlable) // 筛选镂空物体的前景
{
int box_class = bbox.at<float>(0, 5);
if ((class_index[box_class] >= 15 && class_index[box_class] <= 18 || class_index[box_class] == 12 || class_index[box_class] == 10) && cluster_num.size() > 1)
{
cout << "class " << box_class << " have " << cluster_num.size() << " clusters" << endl;
int total_point = 0;
for (int i = 0; i < cluster_num.size(); i++) // 计算框内雷达点总数
{
total_point += (this->upScan.clustedCloud[cluster_num[i]].size());
}
for (int j = 0; j < cluster_num.size(); j++) // 计算每个雷达簇占比大于0.2认为是背景
{
float num_cluster = this->upScan.clustedCloud[cluster_num[j]].size();
if ((num_cluster / total_point > 0.2) || num_cluster > 4)
{
clusterlable[cluster_num[j]] = -1;
}
}
}
}
void LCFusion::draw_point(std::vector<Mat> &camInfo) // 画出映射到图像的雷达点
{
projectedPoints.clear();
for (int i = 0; i < this->upScan.pointCloud.rows; i++)
{
Mat temp_pointcloud = this->upScan.pointCloud.row(i).clone();
Mat uv = pointcloud_pixel(temp_pointcloud);
int x = uv.at<float>(0, 0);
int y = uv.at<float>(1, 0);
if (x > 0 && x < 640 && y > 0 && y < 480)
{
Point2d pt_uv(x, y);
projectedPoints.push_back(pt_uv);
}
}
}
// void LCFusion::filter_laser(const FrameData &laserData, float angle_min, float angleInc)
// {
// upScan.distance.clear();
// upScan.intensities.clear();
// float pointAngle;
// upScan.angleInc = angleInc;
// for (int i = 0; i < laserData.len; i++)
// {
// pointAngle = angle_min + i * upScan.angleInc;
// if (pointAngle > cameraAnglemin)
// {
// if (pointAngle > cameraAnglemax)
// {
// upScan.angleMax = pointAngle - upScan.angleInc;
// break;
// }
// upScan.distance.push_back(laserData.distance[i] / 1000.f);
// upScan.intensities.push_back(0);
// if (i == (laserData.len - 1))
// upScan.angleMax = pointAngle;
// // upScan.intensities.push_back(laserData.intensities[i]);
// }
// }
// upScan.angleMin = upScan.angleMax - (upScan.distance.size() - 1) * upScan.angleInc;
// this->scan_to_pointcloud();
// this->cluster();
// }
void LCFusion::filter_laser(const FrameData &laserData, float angle_min, float angleInc) // 雷达点和相机视野同步
{
upScan.distance.clear();
upScan.intensities.clear();
upScan.angleMin = angle_min;
upScan.angleInc = angleInc;
upScan.angleMax = ANGLE_TO_RADIAN(laserData.angle_max);
float pointAngle;
for (int i = 0; i < laserData.len; i++)
{
upScan.distance.push_back(laserData.distance[i] / 1000.f);
pointAngle = angle_min + i * upScan.angleInc;
if (pointAngle > cameraAnglemin && pointAngle < cameraAnglemax)
upScan.intensities.push_back(1); // 视野范围内强度赋1
else
upScan.intensities.push_back(0); // 视野范围外强度赋0
}
this->scan_to_pointcloud(); // 雷达极坐标到二维坐标转换
this->cluster(); // 雷达聚类
}
void LCFusion::scan_to_pointcloud() // 雷达极坐标到二维坐标转换
{
u_int32_t len = upScan.distance.size();
Mat pointcloud(len, 4, CV_32F);
for (int i = 0; i < len; i++)
{
float dis = this->upScan.distance[i];
float x_temp = cos(upScan.angleMin + i * upScan.angleInc) * dis;
float y_temp = sin(upScan.angleMin + i * upScan.angleInc) * dis;
pointcloud.at<float>(i, 0) = x_temp;
pointcloud.at<float>(i, 1) = y_temp;
pointcloud.at<float>(i, 2) = 0;
pointcloud.at<float>(i, 3) = i;
}
this->upScan.pointCloud = pointcloud.clone();
}
uchar LCFusion::lidar2box(Mat uv, Mat boxes, uchar *boxflag) // 无用
{
int x = uv.at<float>(0, 0);
int y = uv.at<float>(1, 0);
// 实验测试,过滤类别
std::set<uchar> filterCls = {0, 7, 11, 14, 15, 21};
for (uchar i = 0; i < boxes.rows; i++)
{
int clsIdx = boxes.at<float>(i, 5);
if (filterCls.find(clsIdx) != filterCls.end())
continue;
float xmin = boxes.at<float>(i, 0);
float xmax = boxes.at<float>(i, 2);
float ymin = boxes.at<float>(i, 1);
float ymax = boxes.at<float>(i, 3);
// 实验测试,过滤过大的误检框
float ratio = (xmax - xmin) * (ymax - ymin) / 308480.;
if (ratio > 0.7)
{
boxflag[i] += 1;
continue;
}
// 若雷达点处于某个目标框内就返回其对应强度值,未考虑目标框重叠情况
if (x >= xmin && x <= xmax && y >= ymin && y <= ymax)
{
boxflag[i] += 1;
return class_intensity[clsIdx + 1];
}
}
return 0;
}
std::vector<int> LCFusion::clusters_bboxs(Mat &bboxs) // 所有的聚类的雷达簇和所有的检测框的匹配
{
std::vector<int> clusterlable;
for (int i = 0; i < this->upScan.clustedCloud.size(); i++) // 遍历每簇雷达
{
std::vector<Mat> temp_cluster = this->upScan.clustedCloud[i];
int temp_clusters_bbox = cluster_bboxs(temp_cluster, bboxs); // 为一簇雷达寻找匹配的检测框
clusterlable.push_back(temp_clusters_bbox);
}
return clusterlable;
}
int LCFusion::cluster_bboxs(vector<Mat> &one_cluster, Mat &bboxs) // 为一簇雷达寻找匹配的检测框
{
vector<int> temp_result;
Mat begin = pointcloud_pixel(one_cluster[one_cluster.size() - 1]);
Mat end = pointcloud_pixel(one_cluster[0]); // 一簇雷达起始点和终止点在像素坐标系的坐标
for (int i = 0; i < bboxs.rows; i++)
{
Mat bbox = bboxs.row(i).clone();
int cls = bbox.at<float>(0, 5);
float ratio = ratio_in_1box(begin, end, bbox); // 在框中占比
if (ratio > 0.8 && this->class_index[cls] >= 7 && class_index[cls] <= 18) // 适用于大物体
temp_result.push_back(i);
}
if (temp_result.size() < 1) // 若一簇雷达落入多个框里,选择宽度较小的框
return -1;
else if (temp_result.size() < 2)
return temp_result[0];
else
{
int min = temp_result[0];
int width_min = bboxs.at<float>(min, 2) - bboxs.at<float>(min, 0);
for (int i = 0; i < temp_result.size(); i++)
{
int width_new = bboxs.at<float>(temp_result[i], 2) - bboxs.at<float>(temp_result[i], 0);
if (width_new < width_min)
{
min = temp_result[i];
width_min = width_new;
}
}
return min;
}
return -1;
}
Mat LCFusion::pointcloud_pixel(Mat &pointcloud) // 二维雷达点到像素坐标系转换
{
Mat uv(3, 1, CV_32F), cutPointCloud(pointcloud.colRange(0, 3));
Mat camPoint = rotate * cutPointCloud.t() + translation;
// float ppp = camPoint.at<float>(2, 0);
if (camPoint.at<float>(2, 0) <= 0)
{
uv = (Mat_<float>(3, 1) << -1, -1, -1);
return uv;
}
float scaleX = camPoint.at<float>(0, 0) / camPoint.at<float>(2, 0);
float scaleY = camPoint.at<float>(1, 0) / camPoint.at<float>(2, 0);
float scaleD = scaleX * scaleX + scaleY * scaleY;
float tempD = 1 + distCoeff.at<float>(0, 0) * scaleD + distCoeff.at<float>(0, 1) * scaleD * scaleD + distCoeff.at<float>(0, 4) * scaleD * scaleD * scaleD;
camPoint.at<float>(0, 0) = scaleX * tempD + 2 * distCoeff.at<float>(0, 2) * scaleX * scaleY +
distCoeff.at<float>(0, 3) * (scaleD + 2 * scaleX * scaleX);
camPoint.at<float>(1, 0) = scaleY * tempD + distCoeff.at<float>(0, 2) * (scaleD + 2 * scaleY * scaleY) +
2 * distCoeff.at<float>(0, 3) * scaleX * scaleY;
camPoint.at<float>(2, 0) = 1.0;
uv = cameraMat * camPoint; // uv为像素坐标
uv = p2r * uv + t; // 矫正
return uv;
}
float LCFusion::ratio_in_1box(Mat &begin, Mat &end, Mat &box) // 计算一簇雷达落入检测框的比例
{
int x_begin = begin.at<float>(0, 0), x_end = end.at<float>(0, 0), y_begin = begin.at<float>(1, 0), y_end = end.at<float>(1, 0);
int xmin = box.at<float>(0, 0), ymin = box.at<float>(0, 1), xmax = box.at<float>(0, 2), ymax = box.at<float>(0, 3);
if (y_begin < ymin || y_begin > ymax)
return 0;
if (x_begin < xmin)
{
if (x_end < xmin)
return 0;
else if (x_end < xmax)
return (float)(x_end - xmin) / (x_end - x_begin);
else
return (float)(xmax - xmin) / (x_end - x_begin);
}
else if (x_begin < xmax)
{
if (x_end < xmax)
return 1;
else
return (float)(xmax - x_begin) / (x_end - x_begin);
}
else
return 0;
}
void LCFusion::cluster() // 聚类
{
this->upScan.clustedCloud.clear();
int i = 0;
for (; i < this->upScan.distance.size();)
{
std::vector<Mat> temp;
int j = i;
temp.push_back(this->upScan.pointCloud.row(i));
for (; j < this->upScan.distance.size() - 1; j++)
{
// 距离的6%作为分类的阈值
if (abs(this->upScan.distance[j] - this->upScan.distance[j + 1]) < this->upScan.distance[j] * 0.07 )
{
temp.push_back(this->upScan.pointCloud.row(j + 1));
}
else
{
i = j + 1;
break;
}
}
this->upScan.clustedCloud.push_back(temp);
if (j == this->upScan.distance.size() - 1)
break;
if (i == upScan.distance.size() - 1)
{
temp.push_back(upScan.pointCloud.row(i));
this->upScan.clustedCloud.push_back(temp);
break;
}
}
this->clusterIdx.clear();
int ii = 0, totoalNum = 0;
for (auto c : this->upScan.clustedCloud)
{
for (int jj = 0; jj < c.size(); jj++)
{
this->clusterIdx.push_back(ii);
}
totoalNum += c.size();
ii++;
}
//std::cout << "cluster num: " << this->upScan.clustedCloud.size() << " ii:" << ii << " total:" << totoalNum << " oriNum: " << this->upScan.distance.size() << std::endl;
}
void LCFusion::draw_circle(int cluster_num, int bbox_num, Mat &bboxs, Mat &dwha) // 扩散物体生成圆弧状伪激光雷达
{
vector<float> center_point = find_circle_center(cluster_num);
float alfa = center_point[1] * upScan.angleInc + upScan.angleMin, r = dwha.at<float>(bbox_num, 1)/200.f,
d = center_point[0], half_data_num = atan(r/d)/(upScan.angleInc);
int num_laser_circle = half_data_num * 2 + 1, cls = bboxs.at<float>(bbox_num, 5);
float laser_index = center_point[1] - half_data_num;
//cout << "alfa: " << alfa << " r: " << r << " d: " << d << " num_laser_circle: " << num_laser_circle << " start_index: " << laser_index << endl;
for (int i = 0; i < num_laser_circle; i++)
{
float theta = upScan.angleMin + laser_index * upScan.angleInc;
if (laser_index < 0 || laser_index > upScan.distance.size())
{
laser_index++;
continue;
}
float data = pow(r, 2) - pow(d, 2) * pow(sin(theta - alfa), 2);
if (data >= 0)
{
//cout << "draw_one_point" << endl;
upScan.distance[laser_index] = circle(d, alfa ,theta, data);
upScan.intensities[laser_index] = class_intensity[cls + 1];
}
laser_index++;
}
}
vector<float> LCFusion::find_circle_center(int cluster_num) // 寻找扩散物体圆弧的圆心
{
vector<float> center_point(2);
int index = 0;
float total_dis = 0;
int num = upScan.clustedCloud[cluster_num].size();
int no_zero_dis = 0; // 非零距离和
for (int i = 0 ; i < num; i++)
{
index = upScan.clustedCloud[cluster_num][i].at<float>(0, 3);
if (upScan.distance[index] != 0)
no_zero_dis++;
total_dis += upScan.distance[index];
}
center_point[0] = total_dis/ no_zero_dis;
index -= (num/2);
float center_laser_index = index;
center_point[1] = center_laser_index;
return center_point;
}
float LCFusion::circle(float d, float alfa ,float theta, float data) // d 中心点距离 r 圆半径 a 中心点角度 theta 雷达角度
{
float ro = d * cos(theta - alfa) - sqrt(data);
return ro > 0 ? ro:0;
}
vector<float> LCFusion::width_ladar(const int* coordinates)
{
int x1 = coordinates[0], y1 = coordinates[1], x2 = coordinates[2], y2 = coordinates[3];
std::vector<float> mid = r-> pic2cam(640 / 2, 480); //<2F>ҵ<EFBFBD><D2B5><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
std::vector<float> loc_tar_start = r-> pic2cam(x1, 480); // <20>ҵ<EFBFBD>Ŀ<EFBFBD><C4BF><EFBFBD><EFBFBD><EFBFBD>ʼλ<CABC><CEBB>
std::vector<float> loc_tar_end = r-> pic2cam(x2, 480); //<2F>ҵ<EFBFBD>Ŀ<EFBFBD><C4BF><EFBFBD><EFBFBD><EFBFBD>ֹλ<D6B9><CEBB>
//ת<><D7AA><EFBFBD>ɻ<EFBFBD><C9BB><EFBFBD>ֵ
float alfa_start = atan((loc_tar_start[0] - mid[0]) / r-> q.at<double>(2, 3));
float alfa_end = atan((loc_tar_end[0] - mid[0]) / r-> q.at<double>(2, 3));
std::vector<float> ladar_range;
ladar_range.push_back(alfa_start);
ladar_range.push_back(alfa_end);
return ladar_range;
}
void LCFusion::small_box_without_dis(Mat & bboxs, Mat & dwha) // 无距离检测框到像素坐标系映射过程和small_fusion类似
{
std::cout << "small_box_without_dis" << std::endl;
double set_dis = 2.0; // 设置距离为2米
for (int i = 0; i < bboxs.rows; i++)
{
int c = bboxs.at<float>(i, 5);
int x_mim = bboxs.at<float>(i, 0);
int y_min = bboxs.at<float>(i, 1);
int x_max = bboxs.at<float>(i, 2);
int y_max = bboxs.at<float>(i, 3);
double d = dwha.at<float>(i, 0);
int coordinates[4] = {x_mim, y_min, x_max, y_max};
if ((class_index[c] >= 0 && class_index[c] <= 1) || (class_index[c] >= 3 && class_index[c] <= 6))
{
Mat temp(2, 1, CV_32F);
if (d == -1)
{
vector<float> box_angle = width_ladar(coordinates);
//std::cout << "d:" << d << " a: " << a << std::endl;
temp.at<float>(0, 0) = float(set_dis * cos(box_angle[0]));
temp.at<float>(1, 0) = float(set_dis * sin(box_angle[0]));
temp = c2lR * temp + c2lT;
double y = temp.at<float>(0, 0);
double x = temp.at<float>(1, 0);
float dis = sqrt(x * x + y * y);
float angle_end = 0.0;
if (y >= 0)
angle_end = (acos(x / dis) - this->upScan.angleMin) / this->upScan.angleInc;
else
angle_end = ((2 * M_PI - acos(x / dis)) - this->upScan.angleMin) / this->upScan.angleInc;
angle_end = int(angle_end);
// std:: cout << "angle end: " << angle_end << std::endl;
temp.at<float>(0, 0) = float(set_dis * cos(box_angle[1]));
temp.at<float>(1, 0) = float(set_dis * sin(box_angle[1]));
temp = c2lR * temp + c2lT;
y = temp.at<float>(0, 0);
x = temp.at<float>(1, 0);
dis = sqrt(x * x + y * y);
float angle_start = 0.0;
if (y >= 0)
angle_start = (acos(x / dis) - this->upScan.angleMin) / this->upScan.angleInc;
else
angle_start = ((2 * M_PI - acos(x / dis)) - this->upScan.angleMin) / this->upScan.angleInc;
angle_start = int(angle_start);
// std:: cout << "angle start: " << angle_start << std::endl;
if (angle_end-angle_start <= 0)
continue;
int num_point = angle_end-angle_start+1;
for (int i = 0; i < num_point; i++)
{
if ((angle_start + i) < 0 || (angle_start + i) > (upScan.distance.size() - 1))
continue;
this->upScan.intensities[angle_start + i] = 0;
}
}
}
}
}
void LCFusion::set_laser_data(sensor_msgs::LaserScan &scan, FrameData &laserData) // 设置雷达发布数据(原始雷达)
{
float angle_min = ANGLE_TO_RADIAN(laserData.angle_min);
float angle_max = ANGLE_TO_RADIAN(laserData.angle_max);
uint32_t len = laserData.len;
float angle_inc = (angle_max - angle_min) / (len - 1.);
scan.header.frame_id = "down_laser_link";
scan.range_min = 0.15;
scan.range_max = 8.0;
scan.angle_min = angle_min;
scan.angle_max = angle_max;
scan.angle_increment = angle_inc;
scan.ranges.resize(len);
scan.intensities.resize(len);
scan.time_increment = 1. / 2400.;
for (int i = 0; i < len; i++)
{
scan.ranges[i] = laserData.distance[i] / 1000.f;
scan.intensities[i] = 0;
}
}
void LCFusion::set_laser_data(sensor_msgs::LaserScan &scan) // 设置雷达发布数据(语义雷达)
{
scan.header.frame_id = "up_laser_link";
scan.range_min = 0.15;
scan.range_max = 8.0;
scan.angle_increment = upScan.angleInc;
scan.ranges.resize(upScan.distance.size());
scan.intensities.resize(upScan.distance.size());
scan.angle_min = upScan.angleMin;
scan.angle_max = upScan.angleMax;
for (int i = 0; i < upScan.distance.size(); i++)
{
scan.ranges[i] = upScan.distance[i];
scan.intensities[i] = upScan.intensities[i];
}
scan.time_increment = 1. / 2400.;
}

100
fusion.h Normal file
View File

@ -0,0 +1,100 @@
#pragma once
#include <opencv2/opencv.hpp>
#include <ros/ros.h>
#include <sensor_msgs/LaserScan.h>
#include <stdint.h>
#include "lidar.h"
#include "ranging.h"
using namespace cv;
using namespace std;
struct ImgScan
{
float angleMin; //弧度
float angleMax;
float angleInc;
std::vector<float> distance; // 米
std::vector<uint8_t> intensities;
Mat pointCloud;
std::vector<std::vector<Mat>> clustedCloud;
};
class Ranging; //友元类
class LCFusion
{
public:
LCFusion(Ranging* ranging)
{
r = ranging;
rotate = cameraExtrinsicMat.rowRange(0, 3).colRange(0, 3).t(); // 雷达 -> 相机坐标系旋转矩阵
translation = cameraExtrinsicMat.col(3).rowRange(0, 3);
translation = -rotate * translation; // 雷达 -> 相机坐标系平移矩阵
p2r = r_y * r_c; // 像素坐标系矫正矩阵(旋转)
};
char fusion(std::vector<Mat> &camInfo); // fusion函数
uchar lidar2box(Mat uv, Mat boxes, uchar *boxFlag); // 无用
void filter_laser(const FrameData &lidarInfo, float angle_min, float angle_inc); // 雷达点和相机视野同步
void scan_to_pointcloud(); // 雷达极坐标到二维坐标转换
std::vector<int> clusters_bboxs(Mat &bboxs); // 所有的聚类的雷达簇和所有的检测框的匹配
int cluster_bboxs(std::vector<Mat> &one_cluster, Mat &bboxs); // 寻找一簇雷达簇匹配的检测框
void optimize_clusterlable(vector<int> & clusterlable, Mat & bboxs); // 针对镂空物体选择对应的雷达簇,寻找镂空物体检测框对应的所有雷达簇
void choose_forceground(vector<int> & cluster_num, Mat & bbox, vector<int> & clusterlable); // 针对镂空物体选择对应的雷达簇,筛选过程
void big_fusion(vector<int> & clusterlable, Mat & bboxs, Mat &dwha); // 大物体赋语义
void small_fusion(Mat & bboxs, Mat & dwha); // 小物体赋语义
Mat pointcloud_pixel(Mat &pointcloud); // 二维雷达点到像素坐标系转换
void cluster(); // 聚类
float ratio_in_1box(Mat &begin, Mat &end, Mat &box); // 计算一簇雷达落入检测框的比例
void draw_point(std::vector<Mat> &camInfo); // 画出雷达在图像中的映射点
void draw_circle(int cluster_num, int bbox_num, Mat &bboxs, Mat &dwha); // 扩散物体生成圆弧状伪激光雷达
vector<float> find_circle_center(int cluster_num); // 寻找扩散物体圆弧的圆心
float circle(float d, float alfa ,float theta, float data); // 圆弧角度和距离的数学关系
vector<float> width_ladar(const int* coordinates);
void small_box_without_dis(Mat & bboxs, Mat & dwha); // 检测框到雷达坐标系的映射关系
void set_laser_data(sensor_msgs::LaserScan &scan, FrameData &laserData); // 设置雷达发布数据(原始雷达)
void set_laser_data(sensor_msgs::LaserScan& scan); // 设置雷达发布数据(语义雷达)
ImgScan upScan;
// 调试使用,将雷达点映射到图像中显示
vector<Point2f> projectedPoints; // 雷达 -> 像素坐标系映射的点
std::vector<uchar> clusterIdx;
private:
Ranging* r;
//float cameraAnglemin = 1.117, cameraAnglemax = 2.0071;
float cameraAnglemin = 1.345, cameraAnglemax = 2.0595; // 相机视野角度
Mat rotate, translation, p2r; // 雷达点映射矫正
Mat cameraExtrinsicMat = (Mat_<float>(4, 4) << 9.9710325100937192e-01, 5.9677185357037893e-02,
-4.7156551765403301e-02, 2.6591864512557874e-02,
4.7136795335845721e-02, 1.7395474156941537e-03,
9.9888692878636431e-01, 7.4034983851991365e-02,
5.9692791457662736e-02, -9.9821621281296091e-01,
-1.0784828889205400e-03, -9.6176066499866750e-02, 0., 0., 0., 1.);
Mat cameraMat = (Mat_<float>(3, 3) << 5.0280958247840368e+02, 0., 3.1925413622163182e+02, 0.,
5.0206199606708202e+02, 2.1999125419411467e+02, 0., 0., 1.);
Mat distCoeff = (Mat_<float>(1, 5) << 1.2903666948900971e-01, -9.2734018203156590e-02,
-7.0528508350750380e-03, 8.3183285124261049e-04,
7.2727934388070986e-02);
// Mat c2lR = (Mat_<float>(2, 2) << 0.99664806, 0.0802792, -0.0802792, 0.99664806);
Mat c2lR = (Mat_<float>(2, 2) << 0.98163, 0.19081, -0.19081, 0.98163); //11
//Mat c2lR = (Mat_<float>(2, 2) << 0.9703, 0.24192, -0.24192, 0.9703); //13
Mat c2lT = (Mat_<float>(2, 1) << 0.15, 0.01);
//Mat c2lT = (Mat_<float>(2, 1) << 0.13693697, 0.05094143);
Mat r_c = (Mat_<float>(3, 3) << 9.977e-1, 6.7744e-02, 0., -6.7744e-02, 9.977e-1, 0., 0., 0., 1.);
Mat t = (Mat_<float>(3, 1) << 50., 0., 0.);
Mat r_y = (Mat_<float>(3, 3) << 1., 0., 0., 0., 1.19444e+00, 0., 0., 0., 1.);
/*int class_intensity[24] = {
0, 120, 130, 130, 130, 130, 130, 130, 146, 146,
146, 162, 162, 178, 178, 196, 196, 212, 212, 228,
228, 0, 0, 0}; */
int class_intensity[25] = {
0, 115, 116, 117, 118, 119, 120, 121, 150, 151,
152, 153, 154, 155, 156, 200, 201, 202, 203, 204,
205, 206, 207, 208, 209}; // 物体类别和强度映射关系
map<int, int> class_index = {{-1, 23}, {0, 0}, {1, 13}, {2, 14}, {3, 7}, {4, 8}, {5, 9}, {6, 1},
{7, 2}, {8, 19}, {9, 18}, {10, 6}, {11, 3}, {12, 15}, {13, 10}, {14, 4}, {15, 5}, {16, 11}, {17, 16},
{18, 12}, {19, 17}, {20, 20}, {21, 21}, {22, 22}};
}; // 类别映射,方便大小物体判断

583
lidar.cpp Normal file
View File

@ -0,0 +1,583 @@
/**
* @file lipkg.cpp
* @author LD Robot
* @version V01
* @brief
* @note
* @attention COPYRIGHT LDROBOT
**/
#include "lidar.h"
#include <math.h>
#include <algorithm>
// #ifdef USE_SLBI
// #include "slbi.h"
// #endif
// #ifdef USE_SLBF
// #include "slbf.h"
// #endif
//#define ANGLE_TO_RADIAN(angle) ((angle)*3141.59/180000)
#define MAX_ACK_BUF_LEN 2304000
#define ANGLE_TO_RADIAN(angle) ((angle)*3141.59 / 180000)
static const uint8_t CrcTable[256] =
{
0x00, 0x4d, 0x9a, 0xd7, 0x79, 0x34, 0xe3,
0xae, 0xf2, 0xbf, 0x68, 0x25, 0x8b, 0xc6, 0x11, 0x5c, 0xa9, 0xe4, 0x33,
0x7e, 0xd0, 0x9d, 0x4a, 0x07, 0x5b, 0x16, 0xc1, 0x8c, 0x22, 0x6f, 0xb8,
0xf5, 0x1f, 0x52, 0x85, 0xc8, 0x66, 0x2b, 0xfc, 0xb1, 0xed, 0xa0, 0x77,
0x3a, 0x94, 0xd9, 0x0e, 0x43, 0xb6, 0xfb, 0x2c, 0x61, 0xcf, 0x82, 0x55,
0x18, 0x44, 0x09, 0xde, 0x93, 0x3d, 0x70, 0xa7, 0xea, 0x3e, 0x73, 0xa4,
0xe9, 0x47, 0x0a, 0xdd, 0x90, 0xcc, 0x81, 0x56, 0x1b, 0xb5, 0xf8, 0x2f,
0x62, 0x97, 0xda, 0x0d, 0x40, 0xee, 0xa3, 0x74, 0x39, 0x65, 0x28, 0xff,
0xb2, 0x1c, 0x51, 0x86, 0xcb, 0x21, 0x6c, 0xbb, 0xf6, 0x58, 0x15, 0xc2,
0x8f, 0xd3, 0x9e, 0x49, 0x04, 0xaa, 0xe7, 0x30, 0x7d, 0x88, 0xc5, 0x12,
0x5f, 0xf1, 0xbc, 0x6b, 0x26, 0x7a, 0x37, 0xe0, 0xad, 0x03, 0x4e, 0x99,
0xd4, 0x7c, 0x31, 0xe6, 0xab, 0x05, 0x48, 0x9f, 0xd2, 0x8e, 0xc3, 0x14,
0x59, 0xf7, 0xba, 0x6d, 0x20, 0xd5, 0x98, 0x4f, 0x02, 0xac, 0xe1, 0x36,
0x7b, 0x27, 0x6a, 0xbd, 0xf0, 0x5e, 0x13, 0xc4, 0x89, 0x63, 0x2e, 0xf9,
0xb4, 0x1a, 0x57, 0x80, 0xcd, 0x91, 0xdc, 0x0b, 0x46, 0xe8, 0xa5, 0x72,
0x3f, 0xca, 0x87, 0x50, 0x1d, 0xb3, 0xfe, 0x29, 0x64, 0x38, 0x75, 0xa2,
0xef, 0x41, 0x0c, 0xdb, 0x96, 0x42, 0x0f, 0xd8, 0x95, 0x3b, 0x76, 0xa1,
0xec, 0xb0, 0xfd, 0x2a, 0x67, 0xc9, 0x84, 0x53, 0x1e, 0xeb, 0xa6, 0x71,
0x3c, 0x92, 0xdf, 0x08, 0x45, 0x19, 0x54, 0x83, 0xce, 0x60, 0x2d, 0xfa,
0xb7, 0x5d, 0x10, 0xc7, 0x8a, 0x24, 0x69, 0xbe, 0xf3, 0xaf, 0xe2, 0x35,
0x78, 0xd6, 0x9b, 0x4c, 0x01, 0xf4, 0xb9, 0x6e, 0x23, 0x8d, 0xc0, 0x17,
0x5a, 0x06, 0x4b, 0x9c, 0xd1, 0x7f, 0x32, 0xe5, 0xa8};
CmdInterfaceLinux::CmdInterfaceLinux(int32_t ver) : mRxThread(nullptr),
mRxCount(0),
mReadCallback(nullptr),
version(ver)
{
mComHandle = -1;
}
CmdInterfaceLinux::~CmdInterfaceLinux()
{
Close();
}
bool CmdInterfaceLinux::Open(std::string &port_name)
{
int flags = (O_RDWR | O_NOCTTY | O_NONBLOCK);
mComHandle = open(port_name.c_str(), flags);
if (-1 == mComHandle)
{
std::cout << "CmdInterfaceLinux::Open " << port_name << " error !" << std::endl;
return false;
}
// get port options
struct termios options;
if (-1 == tcgetattr(mComHandle, &options))
{
Close();
std::cout << "CmdInterfaceLinux::Open tcgetattr error!" << std::endl;
return false;
}
options.c_cflag |= (tcflag_t)(CLOCAL | CREAD | CS8 | CRTSCTS);
options.c_cflag &= (tcflag_t) ~(CSTOPB | PARENB | PARODD);
options.c_lflag &= (tcflag_t) ~(ICANON | ECHO | ECHOE | ECHOK | ECHONL |
ISIG | IEXTEN); //|ECHOPRT
options.c_oflag &= (tcflag_t) ~(OPOST);
options.c_iflag &= (tcflag_t) ~(IXON | IXOFF | INLCR | IGNCR | ICRNL | IGNBRK);
options.c_cc[VMIN] = 0;
options.c_cc[VTIME] = 0;
switch (version)
{
case 0:
cfsetispeed(&options, B115200);
break;
case 3:
cfsetispeed(&options, B115200);
break;
case 6:
cfsetispeed(&options, B230400);
break;
case 8:
cfsetispeed(&options, B115200);
break;
case 14:
cfsetispeed(&options, B115200);
break;
default:
cfsetispeed(&options, B115200);
}
if (tcsetattr(mComHandle, TCSANOW, &options) < 0)
{
std::cout << "CmdInterfaceLinux::Open tcsetattr error!" << std::endl;
Close();
return false;
}
tcflush(mComHandle, TCIFLUSH);
mRxThreadExitFlag = false;
mRxThread = new std::thread(mRxThreadProc, this);
mIsCmdOpened = true;
return true;
}
bool CmdInterfaceLinux::Close()
{
if (mIsCmdOpened == false)
{
return true;
}
mRxThreadExitFlag = true;
if (mComHandle != -1)
{
close(mComHandle);
mComHandle = -1;
}
if ((mRxThread != nullptr) && mRxThread->joinable())
{
mRxThread->join();
delete mRxThread;
mRxThread = NULL;
}
mIsCmdOpened = false;
return true;
}
bool CmdInterfaceLinux::GetCmdDevices(std::vector<std::pair<std::string, std::string>> &device_list)
{
struct udev *udev;
struct udev_enumerate *enumerate;
struct udev_list_entry *devices, *dev_list_entry;
struct udev_device *dev;
udev = udev_new();
if (!udev)
{
return false;
}
enumerate = udev_enumerate_new(udev);
udev_enumerate_add_match_subsystem(enumerate, "tty");
udev_enumerate_scan_devices(enumerate);
devices = udev_enumerate_get_list_entry(enumerate);
udev_list_entry_foreach(dev_list_entry, devices)
{
const char *path;
path = udev_list_entry_get_name(dev_list_entry);
dev = udev_device_new_from_syspath(udev, path);
std::string dev_path = std::string(udev_device_get_devnode(dev));
dev = udev_device_get_parent_with_subsystem_devtype(dev, "usb", "usb_device");
if (dev)
{
std::pair<std::string, std::string> p;
p.first = dev_path;
p.second = udev_device_get_sysattr_value(dev, "product");
device_list.push_back(p);
udev_device_unref(dev);
}
else
{
continue;
}
}
udev_enumerate_unref(enumerate);
udev_unref(udev);
return true;
}
bool CmdInterfaceLinux::ReadFromIO(uint8_t *rx_buf, uint32_t rx_buf_len, uint32_t *rx_len)
{
static timespec timeout = {0, (long)(100 * 1e6)};
int32_t len = -1;
if (IsOpened())
{
fd_set read_fds;
FD_ZERO(&read_fds);
FD_SET(mComHandle, &read_fds);
int r = pselect(mComHandle + 1, &read_fds, NULL, NULL, &timeout, NULL);
if (r < 0)
{
// Select was interrupted
if (errno == EINTR)
{
return false;
}
}
else if (r == 0) // timeout
{
return false;
}
if (FD_ISSET(mComHandle, &read_fds))
{
len = (int32_t)read(mComHandle, rx_buf, rx_buf_len);
if ((len != -1) && rx_len)
{
*rx_len = len;
}
}
}
return len == -1 ? false : true;
}
bool CmdInterfaceLinux::WriteToIo(const uint8_t *tx_buf, uint32_t tx_buf_len, uint32_t *tx_len)
{
int32_t len = -1;
if (IsOpened())
{
len = (int32_t)write(mComHandle, tx_buf, tx_buf_len);
if ((len != -1) && tx_len)
{
*tx_len = len;
}
}
return len == -1 ? false : true;
}
void CmdInterfaceLinux::mRxThreadProc(void *param)
{
CmdInterfaceLinux *cmd_if = (CmdInterfaceLinux *)param;
char *rx_buf = new char[MAX_ACK_BUF_LEN + 1];
uchar failedNum = 0;
while (!cmd_if->mRxThreadExitFlag.load())
{
uint32_t readed = 0;
bool res = cmd_if->ReadFromIO((uint8_t *)rx_buf, MAX_ACK_BUF_LEN, &readed);
if (res && readed)
{
cmd_if->mRxCount += readed;
if (cmd_if->mReadCallback != nullptr)
{
cmd_if->mReadCallback(rx_buf, readed);
}
failedNum = 0;
}
else
{
if(++failedNum == 255)
break;
}
}
cmd_if->mRxThreadExitFlag = true;
delete[] rx_buf;
}
LiPkg::LiPkg() : mTimestamp(0),
mSpeed(0),
mErrorTimes(0),
mIsFrameReady(false),
mIsPkgReady(false)
{
}
double LiPkg::GetSpeed(void)
{
return mSpeed / 360.0;
}
bool LiPkg::Parse(const uint8_t *data, long len)
{
for (int i = 0; i < len; i++)
{
mDataTmp.push_back(*(data + i));
}
if (mDataTmp.size() < sizeof(LiDARFrameTypeDef))
return false;
if (mDataTmp.size() > sizeof(LiDARFrameTypeDef) * 100)
{
mErrorTimes++;
mDataTmp.clear();
return false;
}
uint16_t start = 0;
while (start < mDataTmp.size() - 2)
{
start = 0;
while (start < mDataTmp.size() - 2)
{
if ((mDataTmp[start] == PKG_HEADER) && (mDataTmp[start + 1] == PKG_VER_LEN))
{
break;
}
if ((mDataTmp[start] == PKG_HEADER) && (mDataTmp[start + 1] == (PKG_VER_LEN | (0x07 << 5))))
{
break;
}
start++;
}
if (start != 0)
{
mErrorTimes++;
for (int i = 0; i < start; i++)
{
mDataTmp.erase(mDataTmp.begin());
}
}
if (mDataTmp.size() < sizeof(LiDARFrameTypeDef))
return false;
LiDARFrameTypeDef *pkg = (LiDARFrameTypeDef *)mDataTmp.data();
uint8_t crc = 0;
for (uint32_t i = 0; i < sizeof(LiDARFrameTypeDef) - 1; i++)
{
crc = CrcTable[(crc ^ mDataTmp[i]) & 0xff];
}
if (crc == pkg->crc8)
{
double diff = (pkg->end_angle / 100 - pkg->start_angle / 100 + 360) % 360;
if (diff > (double)pkg->speed * POINT_PER_PACK / 2300 * 3 / 2)
{
mErrorTimes++;
}
else
{
mSpeed = pkg->speed;
mTimestamp = pkg->timestamp;
uint32_t diff = ((uint32_t)pkg->end_angle + 36000 - (uint32_t)pkg->start_angle) % 36000;
float step = diff / (POINT_PER_PACK - 1) / 100.0;
float start = (double)pkg->start_angle / 100.0;
float end = (double)(pkg->end_angle % 36000) / 100.0;
PointData data;
for (int i = 0; i < POINT_PER_PACK; i++)
{
data.distance = pkg->point[i].distance;
data.angle = start + i * step;
if (data.angle >= 360.0)
{
data.angle -= 360.0;
}
data.confidence = pkg->point[i].confidence;
mOnePkg[i] = data;
mFrameTemp.push_back(PointData(data.angle, data.distance, data.confidence));
}
// prevent angle invert
mOnePkg.back().angle = end;
mIsPkgReady = true;
}
for (uint32_t i = 0; i < sizeof(LiDARFrameTypeDef); i++)
{
mDataTmp.erase(mDataTmp.begin());
}
if (mDataTmp.size() < sizeof(LiDARFrameTypeDef))
{
break;
}
}
else
{
mErrorTimes++;
/*only remove header,not all frame,because lidar data may contain head*/
for (int i = 0; i < 2; i++)
{
mDataTmp.erase(mDataTmp.begin());
}
}
}
return true;
}
bool LiPkg::AssemblePacket()
{
float last_angle = 0;
std::vector<PointData> tmp;
int count = 0;
for (auto n : mFrameTemp)
{
/*wait for enough data, need enough data to show a circle*/
if (n.angle - last_angle < (-350.f)) /* enough data has been obtained */
{
mFrameData.len = tmp.size();
Transform(tmp); /*transform raw data to stantard data */
if (tmp.size() == 0)
{
mFrameTemp.clear();
mIsFrameReady = false;
return false;
}
#ifdef USE_SLBI
Slbi sb(mSpeed);
sb.FindBarcode(tmp);
#endif
#ifdef USE_SLBF
Slbf sb(mSpeed);
tmp = sb.NearFilter(tmp);
#endif
std::sort(tmp.begin(), tmp.end(), [](PointData a, PointData b)
{ return a.angle < b.angle; });
mFrameData.angle_min = tmp[0].angle;
mFrameData.angle_max = tmp.back().angle;
mFrameData.distance.clear();
mFrameData.intensities.clear();
mFrameData.distance.resize(mFrameData.len);
mFrameData.intensities.resize(mFrameData.len);
float step = (mFrameData.angle_max - mFrameData.angle_min) / mFrameData.len;
float angle_acc = mFrameData.angle_min;
int tmp_count = 0;
/* interpolation method */
for (uint32_t i = 0; i < mFrameData.len; i++)
{
if (angle_acc >= tmp[tmp_count].angle)
{
mFrameData.distance[i] = tmp[tmp_count].distance;
mFrameData.intensities[i] = tmp[tmp_count].confidence;
tmp_count++;
if (tmp_count == tmp.size())
{
break;
}
}
else
{
mFrameData.distance[i] = 0;
mFrameData.intensities[i] = 0;
}
angle_acc += step;
}
std::vector<PointData> tmp2;
for (uint32_t i = count; i < mFrameTemp.size(); i++)
{
tmp2.push_back(mFrameTemp[i]);
}
mFrameTemp.clear();
mFrameTemp = tmp2;
mIsFrameReady = true;
return true;
}
else
{
tmp.push_back(n); /* getting data */
}
count++;
last_angle = n.angle;
}
return false;
}
const std::array<PointData, POINT_PER_PACK> &LiPkg::GetPkgData(void)
{
mIsPkgReady = false;
return mOnePkg;
}
std::vector<cv::Mat> LiPkg::scan_to_pointcloud(FrameData mFrameData)
{
u_int32_t len = mFrameData.len;
float angle_min = ANGLE_TO_RADIAN(mFrameData.angle_min);
float angle_max = ANGLE_TO_RADIAN(mFrameData.angle_max);
cv::Mat pointcloud2(len, 3, CV_32F);
cv::Mat intensities(1, len, CV_32F);
cv::Mat pointdis(1, len, CV_32F);
float a_increment = (angle_max - angle_min) / (float)(len - 1);
for (int i = 0; i < len; i++)
{
float dis = mFrameData.distance[i] / 1000.f;
intensities.at<float>(0, i) = mFrameData.intensities[i];
pointdis.at<float>(0, i) = dis;
float x_temp = cos(angle_min + i * a_increment) * dis;
float y_temp = sin(angle_min + i * a_increment) * dis;
pointcloud2.at<float>(i, 0) = x_temp;
pointcloud2.at<float>(i, 1) = y_temp;
pointcloud2.at<float>(i, 2) = 0.0;
}
return std::vector<cv::Mat>{pointcloud2, intensities, pointdis};
}
/********************* (C) COPYRIGHT LD Robot *******END OF FILE ********/
SlTransform::SlTransform(LDVersion version, bool to_right_hand)
{
switch (version)
{
case LDVersion::LD_ZERO:
case LDVersion::LD_NINE:
offset_x = 8.1;
offset_y = -22.5156;
break;
case LDVersion::LD_THREE:
case LDVersion::LD_EIGHT:
offset_x = 5.9;
offset_y = -20.14;
break;
case LDVersion::LD_FOURTEENTH:
offset_x = 5.9;
offset_y = -20.14;
break;
default:
break;
}
}
Points2D SlTransform::Transform(const Points2D &data)
{
Points2D tmp2;
for (auto n : data)
{
/*Filter out invalid data*/
if (n.distance == 0)
{
continue;
}
/*transfer the origin to the center of lidar circle*/
/*The default direction of radar rotation is clockwise*/
/*transfer to the right-hand coordinate system*/
float right_hand = (360.f - n.angle);
double x = n.distance + offset_x;
double y = n.distance * 0.11923 + offset_y;
double d = sqrt(x * x + y * y);
double shift = atan(y / x) * 180.f / 3.14159;
/*Choose whether to use the right-hand system according to the flag*/
double angle;
if (to_right_hand)
angle = right_hand + shift;
else
angle = n.angle - shift;
if (angle > 360)
{
angle -= 360;
}
if (angle < 0)
{
angle += 360;
}
tmp2.push_back(PointData(angle, n.distance, n.confidence, x, y));
}
return tmp2;
}
SlTransform::~SlTransform()
{
}
void LD14_LiPkg::Transform(std::vector<PointData> &tmp)
{
SlTransform trans(LDVersion::LD_FOURTEENTH);
tmp = trans.Transform(tmp);
// std::cout << "Transform LD_FOURTEENTH !!" << std::endl;
}

174
lidar.h Normal file
View File

@ -0,0 +1,174 @@
/**
* @file lidar.h
* @author LD Robot
* @version V01
* @brief
* @note
* @attention COPYRIGHT LDROBOT
**/
#ifndef __LIPKG_H
#define __LIPKG_H
#include <stdint.h>
#include <vector>
#include <array>
#include <iostream>
#include <functional>
#include <thread>
#include <atomic>
#include <inttypes.h>
#include <mutex>
#include <string>
#include <condition_variable>
#include <sys/file.h>
#include <unistd.h>
#include <fcntl.h>
#include <errno.h>
#include <termios.h>
#include <memory.h>
#include <libudev.h>
#include<opencv2/opencv.hpp>
#include <ros/ros.h>
struct PointData
{
//<2F><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ʾ<EFBFBD><CABE>ʽ
float angle;
uint16_t distance;
uint8_t confidence;
//ֱ<><D6B1><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ʾ<EFBFBD><CABE>ʽ
double x;
double y;
PointData(float angle, uint16_t distance, uint8_t confidence, double x = 0, double y = 0)
{
this->angle = angle;
this->distance = distance;
this->confidence = confidence;
this->x = x;
this->y = y;
}
PointData() {}
friend std::ostream& operator<<(std::ostream& os, const PointData& data)
{
os << data.angle << " " << data.distance << " " << (int)data.confidence << " " << data.x << " " << data.y;
return os;
}
};
typedef std::vector<PointData> Points2D;
enum
{
PKG_HEADER = 0x54,
PKG_VER_LEN = 0x2C,
POINT_PER_PACK = 12,
};
typedef struct __attribute__((packed))
{
uint16_t distance;
uint8_t confidence;
}LidarPointStructDef;
typedef struct __attribute__((packed))
{
uint8_t header;
uint8_t ver_len;
uint16_t speed;
uint16_t start_angle;
LidarPointStructDef point[POINT_PER_PACK];
uint16_t end_angle;
uint16_t timestamp;
uint8_t crc8;
}LiDARFrameTypeDef;
struct FrameData
{
ros::Time timestamp;
float angle_min;
float angle_max;
uint32_t len;
std::vector<uint16_t> distance;
std::vector<uint8_t> intensities;
};
class LiPkg
{
public:
LiPkg();
double GetSpeed(void); /*Lidar spin speed (Hz)*/
uint16_t GetTimestamp(void) { return mTimestamp; } /*time stamp of the packet */
bool IsPkgReady(void) { return mIsPkgReady; }/*a packet is ready */
bool IsFrameReady(void) { return mIsFrameReady; }/*Lidar data frame is ready*/
long GetErrorTimes(void) { return mErrorTimes; }/*the number of errors in parser process of lidar data frame*/
const std::array<PointData, POINT_PER_PACK>& GetPkgData(void);/*original data package*/
bool Parse(const uint8_t* data, long len);/*parse single packet*/
virtual void Transform(std::vector<PointData>& tmp) = 0;/*transform raw data to stantard data */
bool AssemblePacket();/*combine stantard data into data frames and calibrate*/
const FrameData& GetFrameData(void) { mIsFrameReady = false; return mFrameData; }
std::vector<cv::Mat> scan_to_pointcloud(FrameData mFrameData);
private:
uint16_t mTimestamp;
double mSpeed;
std::vector<uint8_t> mDataTmp;
long mErrorTimes;
std::array<PointData, POINT_PER_PACK>mOnePkg;
std::vector<PointData> mFrameTemp;
FrameData mFrameData;
bool mIsPkgReady;
bool mIsFrameReady;
};
class LD14_LiPkg : public LiPkg
{
public:
virtual void Transform(std::vector<PointData>& tmp);
};
enum class LDVersion
{
LD_ZERO, /*Zero generation lidar*/
LD_THREE, /*Third generation lidar*/
LD_EIGHT, /*Eight generation radar*/
LD_NINE, /*Nine generation radar*/
LD_FOURTEENTH /*Fourteenth generation radar*/
};
class SlTransform
{
private:
bool to_right_hand = true;
double offset_x;
double offset_y;
public:
SlTransform(LDVersion version, bool to_right_hand = false);
Points2D Transform(const Points2D& data);
~SlTransform();
};
class CmdInterfaceLinux
{
public:
CmdInterfaceLinux(int32_t ver);
~CmdInterfaceLinux();
bool Open(std::string& port_name);
bool Close();
bool ReadFromIO(uint8_t* rx_buf, uint32_t rx_buf_len, uint32_t* rx_len);
bool WriteToIo(const uint8_t* tx_buf, uint32_t tx_buf_len, uint32_t* tx_len);
bool GetCmdDevices(std::vector<std::pair<std::string, std::string> >& device_list);
void SetReadCallback(std::function<void(const char*, size_t length)> callback) { mReadCallback = callback; }
bool IsOpened() { return mIsCmdOpened.load(); };
bool IsExited() { return mRxThreadExitFlag.load(); };
private:
std::thread* mRxThread;
static void mRxThreadProc(void* param);
long long mRxCount;
int32_t version;
int32_t mComHandle;
std::atomic<bool> mIsCmdOpened, mRxThreadExitFlag;
std::function<void(const char*, size_t length)> mReadCallback;
};
#endif
/********************* (C) COPYRIGHT LD Robot *******END OF FILE ********/

BIN
m28.rknn Normal file

Binary file not shown.

167
main.cpp Normal file
View File

@ -0,0 +1,167 @@
#include <iostream>
#include <stdio.h>
#include "lidar.h"
#include <ros/ros.h>
#include <sensor_msgs/LaserScan.h>
#include <opencv2/opencv.hpp>
#include "ranging.h"
#include "fusion.h"
#include <string>
#include <pthread.h>
#include <mutex>
#include <sys/time.h>
/*#include <cv_bridge/cv_bridge.h>
#include <image_transport/image_transport.h>*/
#include "opencv2/imgcodecs/imgcodecs.hpp"
#include <ctime>
#define ANGLE_TO_RADIAN(angle) ((angle)*3141.59 / 180000)
Ranging r(9, 640, 480);
std::queue<std::vector<Mat>> frameInfo;
std::queue<FrameData> laser_queue;
std::mutex g_mutex;
void *ranging(void *args) // ranging线程
{
while (true)
{
std::cout<<" ************ enter ranging *********** "<<std::endl;
std::vector<Mat> result = r.get_range();
g_mutex.lock();
for (uchar i = 0; i < frameInfo.size(); i++) // 只保存当前最新的图片帧信息
frameInfo.pop();
frameInfo.push(result);
g_mutex.unlock();
}
}
int main(int argc, char **argv)
{
uint8_t laser_queue_size = 2; // 雷达帧队列大小参数
LD14_LiPkg *pkg = new LD14_LiPkg;
CmdInterfaceLinux cmd_port(9);
std::string port_name = "/dev/ttyUSB0";
if (argc > 1)
port_name = argv[1];
// 雷达读取回调函数,一直在执行雷达读取和处理数据
cmd_port.SetReadCallback([&pkg](const char *byte, size_t len) {
if (pkg->Parse((uint8_t*)byte, len))
{
pkg->AssemblePacket();
} });
if (!cmd_port.Open(port_name))
return -1;
int type = VideoWriter::fourcc('M','J','P','G');
char name[256] = {0};
FILE *fp=NULL;
time_t timep;
struct tm *p;
time(&timep);
p = localtime(&timep);
sprintf(name,"%d.%d.%d %d:%02d.avi",1900+p->tm_year,1+p->tm_mon,p->tm_mday,p->tm_hour,p->tm_min);
VideoWriter writer(name, type, 30, Size(640, 480), true);
ros::init(argc, argv, "publish");
ros::NodeHandle nh; /* create a ROS Node */
ros::Publisher uplidar_pub = nh.advertise<sensor_msgs::LaserScan>("up_scan", 1);
//ros::Publisher downlidar_pub = nh.advertise<sensor_msgs::LaserScan>("down_scan", 1);
sensor_msgs::LaserScan upscan;
// sensor_msgs::LaserScan downscan;
/*image_transport::ImageTransport it(nh);
image_transport::Publisher img_pub = it.advertise("camera/image", 1);*/
pthread_t tids[1]; // 执行ranging线程
int ret = pthread_create(&tids[0], NULL, ranging, NULL);
if (ret != 0)
{
std::cout << "Multithreading error !" << std::endl;
return -2;
}
LCFusion lcFusion(&r);
namedWindow("camera");
while (ros::ok() && !cmd_port.IsExited())
{
int64 t = getTickCount();
if (pkg->IsFrameReady()) // 若一帧雷达处理好,执行后续操作
{
FrameData laserData_queue = pkg->GetFrameData(); // 得到雷达数据
laserData_queue.timestamp = ros::Time::now();
laser_queue.push(laserData_queue);
while(laser_queue.size() > laser_queue_size) // 只保存两帧雷达数据
{
laser_queue.pop();
}
}
else
continue;
if (laser_queue.size() == laser_queue_size) // 若队列有两帧数据,执行后续操作
{
FrameData laserData = laser_queue.front();
laser_queue.pop();
upscan.header.stamp = laserData.timestamp;
// upscan.header.stamp = ros::Time::now();
// downscan.header.stamp = upscan.header.stamp;
// uint16_t times_stamp = pkg->GetTimestamp();
// FrameData laserData = pkg->GetFrameData();
char state = -1;
float angle_min = ANGLE_TO_RADIAN(laserData.angle_min); // 角度弧度转换
float angle_max = ANGLE_TO_RADIAN(laserData.angle_max);
uint32_t len = laserData.len;
float angle_inc = (angle_max - angle_min) / (len - 1.); // 角度增量
lcFusion.filter_laser(laserData, angle_min, angle_inc); // 视角同步,处理视角内雷达数据
if (!frameInfo.empty())
{
g_mutex.lock();
std::vector<Mat> camInfo = frameInfo.front(); // 读取图像帧数据
frameInfo.pop();
g_mutex.unlock();
double fusion_old,fusion_now;
fusion_old = ros::Time::now().toSec();
state = lcFusion.fusion(camInfo); // 融合
fusion_now = ros::Time::now().toSec();
cout << "data_fusion: " << fusion_now - fusion_old << endl;
if (state != -2)
{
lcFusion.set_laser_data(upscan); // 设置雷达发布数据
uplidar_pub.publish(upscan); // 发布
}
// 调试使用,将雷达点映射到图像中显示
if (!lcFusion.projectedPoints.empty())
{
for (int i = 0; i < lcFusion.projectedPoints.size(); i++)
{
Point2f p = lcFusion.projectedPoints[i];
circle(camInfo[0], p, 3, Scalar(0, 255, 0), 1, 8);
}
}
/* sensor_msgs::ImagePtr msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", camInfo[0]).toImageMsg();
img_pub.publish(msg);*/
if (!camInfo[0].empty())
{
cv::Mat pic;
cv::Size dsize= Size(int(640*0.75),int(480*0.75));
cv::resize(camInfo[0],pic,dsize,0,0,INTER_AREA);
// cv::pyrDown(camInfo[0],pic);
writer.write(camInfo[0]);
imshow("camera", pic);
}
t = getTickCount() - t;
char fps[50];
std::cout << "fps: " << int(1 / (t / getTickFrequency())) << std::endl;
if (waitKey(1) == 81)
break;
}
//lcFusion.set_laser_data(downscan, laserData);
//downlidar_pub.publish(downscan);
}
}
std::cout << "ROS or lidar error !" << std::endl;
delete pkg;
cmd_port.Close();
return 0;
}

BIN
main.out Normal file

Binary file not shown.

444
ranging.cpp Normal file
View File

@ -0,0 +1,444 @@
#include <opencv2/opencv.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
#include <stdio.h>
#include "ranging.h"
#include <math.h>
#include <stdlib.h>
#include "rknn_sdk.h"
#include <string.h>
#include <sys/time.h>
#include <dlfcn.h>
#include "opencv2/core.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/imgproc.hpp"
#include <sys/stat.h>
#include <sys/types.h>
#include <dirent.h>
#include <vector>
#include <tuple>
#include <string>
#include <string.h>
#include <algorithm>
#include <ros/ros.h>
using namespace cv;
void Ranging::rectifyImage(Mat &oriImgL, Mat &oriImgR) //重映射函数
{
remap(oriImgL, oriImgL, mapX1, mapX2, INTER_LINEAR);
remap(oriImgR, oriImgR, mapY1, mapY2, INTER_LINEAR);
}
std::vector<float> Ranging::pic2cam(int u, int v) //像素坐标转相机坐标
{
//(u,v)->(x,y)"(loc[0],loc[1])"
std::vector<float> loc;
loc.push_back((u - cam_matrix_right.at<double>(0, 2)) * q.at<double>(2, 3) / cam_matrix_right.at<double>(0, 0));
loc.push_back((v - cam_matrix_right.at<double>(1, 2)) * q.at<double>(2, 3) / cam_matrix_right.at<double>(1, 1));
return loc;
}
std::vector<int> Ranging::muban(Mat &left_image, Mat &right_image, const int *coordinates) //模板匹配
{
int x1 = coordinates[0], y1 = coordinates[1], x2 = coordinates[2], y2 = coordinates[3];
// cv::Mat right_image_,left_image_;
// cv::pyrDown(right_image, right_image_);
// cv::pyrDown(left_image, left_image_);
Mat tpl = right_image.rowRange(max(y1 - 2, 0), min(y2 + 2, 479)).colRange(x1, x2); //获取目标框
Mat target = left_image.rowRange(max(y1 - 20, 0), min(y2 + 20, 479)).colRange(0, 639); //待匹配图像,极线约束,只需要同水平区域
// Mat tpl = right_image_.rowRange(max(int(0.5*y1 - 2), 0), min(int(0.5*y2 + 2), 479)).colRange(int(0.5*x1), int(0.5*x2));
// Mat target = left_image_.rowRange(max(int(0.5*y1 - 10), 0), min(int(0.5*y2 + 10), 239)).colRange(0, 319);
int th = tpl.rows, tw = tpl.cols;
Mat result;
// methods = TM_CCOEFF_NORMED, TM_SQDIFF_NORMED, TM_CCORR_NORMED]
matchTemplate(target, tpl, result, TM_CCOEFF_NORMED); //匹配方法:归一化相关系数即零均值归一化互相关
double minVal, maxVal;
Point minLoc, maxLoc;
minMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc); //得到匹配点坐标
Point tl = maxLoc, br;
br.x = min(maxLoc.x + tw, 639); //转为像素坐标系
br.y = min(maxLoc.y + th, 479); //转为像素坐标系
////展示匹配结果
// br.x = min(maxLoc.x + tw, 319);
// br.y = min(maxLoc.y + th, 239);
//rectangle(target, tl, br, (0, 255, 0), 3);
//imshow("match-", target);
//waitKey(2);
std::vector<int> maxloc;
maxloc.push_back(maxLoc.x);
maxloc.push_back(maxLoc.y);
return maxloc;
}
void Ranging::horizon_estimate(Mat& img, Mat& bboxs,int k)
{
//保证摄像头与地面平行
int x1 = bboxs.at<float>(k, 0);
int x2 = bboxs.at<float>(k, 2);
int y1 = bboxs.at<float>(k, 1);
int y2 = bboxs.at<float>(k, 3);
float Conf = bboxs.at<float>(k, 4);
int cls = bboxs.at<float>(k, 5);
float Y_B, Y_H;
Mat edge, grayImage;
vector<cv::Point> idx;
Mat tpl = img.rowRange(y1, min(y2+5,479)).colRange(x1, x2); // 取感兴趣范围
//Mat target = left_image.rowRange(max(y1 - 20, 0), min(y2 + 20, 479)).colRange(0, 639);
Mat Z = Mat::zeros(2, tpl.cols, CV_32FC1);
cvtColor(tpl, grayImage, COLOR_BGR2GRAY);
GaussianBlur(grayImage,grayImage,Size(5,5),0);
Canny(grayImage, edge, 120, 180, 3); //提取边缘,获取与地面接触点
//cv::imshow("1",edge);
//cv::waitKey(1);
float cluster[650];
for (int i = 0;i<650;i++)
{
cluster[i] = 0;
}
int y_b, y_h;
int j = 0;
for (int i = 0; i < x2-x1; i++)
{
//Mat temp = edge.rowRange(max(y1, 0), min(y2 + 4, 479)).colRange(x1, x2);
Mat temp = edge.col(i); //取第i列
// std::cout << "temp: " <<temp << std::endl;
cv::findNonZero(temp, idx);
std::vector<float> point_b = pic2cam(x1 + i, 240); //转为相机坐标系
std::vector<float> point_H = pic2cam(320, 240);
float alfa = atan((point_b[0] - point_H[0]) / q.at<double>(2, 3));
if (idx.size() < 1)
{
Z.at<float>(0, i) = 0;
Z.at<float>(1, i) = alfa;
continue;
}
int y_b = idx[idx.size() - 1].y + y1; //
y_b = int(y_b + y_b*0.03);
int y_h = 240;
point_b = pic2cam(x1 + i, y_b); //转为相机坐标系
point_H = pic2cam(320, y_h);
Y_B = point_b[1];
Y_H = point_H[1];
// std::cout << "y_b: " << y_b << std::endl;
float H_c = 60; //摄像头离地高度单位mm
float theta = 0; //摄像头与地面夹角,弧度
float d = (1/cos(theta)* cos(theta)) * q.at<double>(2, 3) * H_c / (Y_B - Y_H)- H_c*tan(theta);
alfa = atan((point_b[0] - point_H[0]) / q.at<double>(2, 3));
//cout << "d: " << d << endl;
if (d > 700)
{d = 0;}
Z.at<float>(0, i) = d/cos(alfa);
Z.at<float>(1, i) = alfa;
}
/*if (i > 0)
{
if (abs(Z.at<float>(0, i) - Z.at<float>(0, i - 1)) < 40) //聚类
{
cluster[j] = cluster[j] + 1;
}
else
{
j = j + 1;
cluster[j] = 1;
//std::cout<<"j : "<< j<< std::endl;
}
}
}
//int max_loc = distance(cluster,max_element(cluster,cluster + sizeof(cluster)/sizeof(cluster[0]))); //只保留数量最多的一类其余置0
//std::cout<<" max_loc : "<< max_loc<<std::endl;
int temp = 0;
for (int i = 0; i < j; i++)
{
if (cluster[i] < 3)
{
for (int t = temp; t < temp + cluster[i]; t++)
{
Z.at<float>(0, t) = 0;
}
}
temp = temp + cluster[i];
}*/
// std::cout << "z : " <<Z << std::endl;
this->Z = Z.clone();
}
void Ranging::getInfo(Mat &imgL, Mat &imgR, Mat &detBoxes, Mat &info)
{
// ֱ<><D6B1>ͼ<EFBFBD><CDBC><EFBFBD>
Mat imgGrayL, imgGrayR;
cvtColor(imgL, imgGrayL, COLOR_BGR2GRAY);
cvtColor(imgR, imgGrayR, COLOR_BGR2GRAY);
Mat imgR_weight = imgR.clone();
Mat infoRow;
for (uchar i = 0; i < detBoxes.rows; i++)
{
int x1 = detBoxes.at<float>(i, 0);
int y1 = detBoxes.at<float>(i, 1);
int x2 = detBoxes.at<float>(i, 2);
int y2 = detBoxes.at<float>(i, 3);
//std::cout<<"x1: "<<x1<<"x2: "<<x2<<"y1: "<<y1<<"y2: "<<y2<<std::endl;
float Conf = detBoxes.at<float>(i, 4);
int cls = detBoxes.at<float>(i, 5);
if (cls == 6 || cls == 10 || cls == 11) //体重秤,地毯和毛巾采用单目测距方法
{
horizon_estimate(imgR_weight, detBoxes, i);
}
if (x1 > 600 || x2 < 50 || y1 < 5 || y2 > 475 || x1 < 2 || x2 > 590 || abs(x2 - x1) > 550) //当目标框偏左、偏右或者过大,略去该物体
{
//char cm[15];
//sprintf(cm, "cannot match !");
char cm[15];
//sprintf(cm, "cannot match !");
sprintf(cm, "%d , %.2f", cls,Conf);
putText(imgR, cm, Point((x1), (y1)), FONT_HERSHEY_PLAIN, 2.2, Scalar(0, 0, 255), 2);
infoRow = (Mat_<float>(1, 4) << -1, -1, -1, -1);
infoRow.copyTo(info.row(i));
rectangle(imgR, Point(int(x1), int(y1)),
Point(int(x2), int(y2)), Scalar(0, 0, 255));
continue;
}
if (cls!=0 && cls!=1 && cls!=2 && cls!= 6 && cls!= 7 && cls!= 10 && cls!=11 && cls!=14 && cls!=15) //大物体不进行测距
{
if (x1 < 30 || x2 < 80 || x1>580 || x2 > 610) //删除边缘的目标框
{
detBoxes.at<float>(i, 5) = -1;
cls = detBoxes.at<float>(i, 5);
}
//char cm[15];
//sprintf(cm, "cannot match !");
char cm[15];
//sprintf(cm, "cannot match !");
sprintf(cm, "%d , %.2f", cls,Conf);
putText(imgR, cm, Point((x1), (y1)), FONT_HERSHEY_PLAIN, 1, Scalar(0, 0, 255), 2);
infoRow = (Mat_<float>(1, 4) << -1, -1, -1, -1);
infoRow.copyTo(info.row(i));
rectangle(imgR, Point(int(x1), int(y1)),
Point(int(x2), int(y2)), Scalar(0, 0, 255));
continue;
}
rectangle(imgR, Point(int(x1), int(y1)),
Point(int(x2), int(y2)), Scalar(0, 0, 255)); //绘制目标框
int coordinates[4] = {x1, y1, x2, y2};
std::vector<int> disp_pixel = muban(imgGrayL, imgGrayR, coordinates); //模板匹配
float disp_pixel_x = disp_pixel[0] - x1; //ˮ计算水平视差
float disp_pixel_y = disp_pixel[1] - y1; //
disp_pixel_x = (int)(disp_pixel_x + disp_pixel_x * 0.12); //0.12为模板匹配产生的误差,为经验值,通过拟合得到
//Mat disp_matrix = Mat(1, 1, CV_32F, Scalar(disp_pixel_x)), disp_pixel_xyz;
Mat disp_matrix = Mat(imgGrayL.rows, imgGrayL.cols, CV_32F, Scalar(disp_pixel_x)); //定义视差矩阵,所有值均为水平视差,方便转换为三维坐标,并具有水平距离信息
Mat threed_pixel_xyz, threedImage;
reprojectImageTo3D(disp_matrix, threedImage, q, false); //2d->3d
threed_pixel_xyz = threedImage.mul(threedImage); //每一像素点求平方,
std::vector<Mat> channels;
split(threed_pixel_xyz.clone(), channels);
threed_pixel_xyz = channels[0] + channels[1] + channels[2]; //计算欧式距离
threed_pixel_xyz.forEach<float>([](float &value, const int *position) { value = sqrt(value); }); // 获得距离d
int mid_pixel = int((x1 + x2) / 2);
std::vector<float> mid = pic2cam(imgGrayR.cols / 2, imgGrayR.rows); //计算角度,从像素坐标转为相机坐标
std::vector<float> loc_tar = pic2cam(mid_pixel, imgGrayR.rows);
float alfa = atan((loc_tar[0] - mid[0]) / q.at<double>(2, 3));
if (disp_pixel_x > 240) // 距离太近,视差过大
{
char cm[15];
//sprintf(cm, "cannot match !");
sprintf(cm, "%d , %.2f", cls,Conf);
putText(imgR, cm, Point((x1), (y1)), FONT_HERSHEY_PLAIN, 2.2, Scalar(0, 0, 255), 2);
infoRow = (Mat_<float>(1, 4) << -1, -1, -1, -1);
infoRow.copyTo(info.row(i));
continue;
}
else
{
float median = threed_pixel_xyz.at<float>((int)(y1 + y2) / 2, (int)(x1 + x2) / 2);
std::vector<float> ltPoint = pic2cam(x1, y1);
std::vector<float> rbPoint = pic2cam(x2, y2);
float xx1 = ltPoint[0], yy1 = ltPoint[1], xx2 = rbPoint[0], yy2 = rbPoint[1]; //计算宽高
float f = q.at<double>(2, 3);
float f1 = sqrt(xx1 * xx1 + yy1 * yy1 + f * f); //推导得出
//float w1 = median * sqrt((xx1 - xx2) * (xx1 - xx2) / 4) / f1;
float h1 = median * sqrt((yy1 - yy2) * (yy1 - yy2) / 4) / f1;
float f2 = sqrt(xx2 * xx2 + yy2 * yy2 + f * f);
//float w2 = median * sqrt((xx2 - xx1) * (xx2 - xx1) / 4) / f2;
float h2 = median * sqrt((yy2 - yy1) * (yy2 - yy1) / 4) / f2;
float w1 = sqrt(pow((threedImage.at<cv::Vec3f>(y2, x1)[0] - threedImage.at<cv::Vec3f>(y2, x2)[0]), 2) +
pow((threedImage.at<cv::Vec3f>(y2, x1)[1] - threedImage.at<cv::Vec3f>(y2, x2)[1]), 2) +
pow((threedImage.at<cv::Vec3f>(y2, x1)[2] - threedImage.at<cv::Vec3f>(y2, x2)[2]), 2));
w1 = w1 / 10;
h1 = (h1 + h2) / 10;
median /= 10;
if (median > 120) //过远测距误差较大
{
//char tf[9];
//sprintf(tf, "Too far!");
char cm[15];
//sprintf(cm, "cannot match !");
sprintf(cm, "%d , %.2f", cls,Conf);
putText(imgR, cm, Point((x1), (y1)), FONT_HERSHEY_PLAIN, 2.2, Scalar(0, 0, 255), 2);
infoRow = (Mat_<float>(1, 4) << -1, -1, -1, -1);
infoRow.copyTo(info.row(i));
continue;
}
// <20><>ͼ<EFBFBD><CDBC><EFBFBD>ϻ<EFBFBD><CFBB><EFBFBD><EFBFBD><EFBFBD>Ϣ
char dc[50], wh[50];
std::string cname = className[cls + 1];
sprintf(dc, "dis:%.2fcm %d", median, cls);
sprintf(wh, "W: %.2fcm H: %.2fcm alfa: %2f", w1, h1, alfa);
putText(imgR, dc, Point(x1, y2), FONT_HERSHEY_PLAIN, 1.5, Scalar(0, 0, 255), 2);
putText(imgR, wh, Point(x1, y1), FONT_HERSHEY_PLAIN, 1.5, Scalar(0, 0, 255), 1.5);
//返回数据
infoRow = (Mat_<float>(1, 4) << median, w1, h1, alfa);
infoRow.copyTo(info.row(i));
};
}
}
Ranging::Ranging(int index, int imgw, int imgh) : //初始化
mapX1(imgh, imgw, CV_64F), //初始化矩阵 ,用于计算无畸变和修正转换映射。
mapX2(imgh, imgw, CV_64F),
mapY1(imgh, imgw, CV_64F),
mapY2(imgh, imgw, CV_64F),
q(4, 4, CV_64F),
imgw(imgw),
imgh(imgh)
{
// Z = Mat::zeros(2, 1, CV_32FC1);
if (!vcapture.open(index))
{
std::cout << "Open camera failed !" << std::endl;
exit(EXIT_FAILURE);
}
else
{
//vcapture.set(CAP_PROP_FOURCC, CAP_OPENCV_MJPEG);
vcapture.set(CAP_PROP_FPS, 30);
vcapture.set(CAP_PROP_FRAME_WIDTH, imgw * 2);
vcapture.set(CAP_PROP_FRAME_HEIGHT, imgh);
vcapture.set(CAP_PROP_BUFFERSIZE, 1);
auto imgSize = Size(imgw, imgh);
Mat r1(3, 3, CV_64F), r2(3, 3, CV_64F), p1(3, 4, CV_64F), p2(3, 4, CV_64F);
stereoRectify(cam_matrix_left.t(), distortion_l, cam_matrix_right.t(), distortion_r,
imgSize, rotate.t(), trans, r1, r2, p1, p2, q);//立体校正
initUndistortRectifyMap(cam_matrix_left.t(), distortion_l, r1, p1, imgSize, CV_32F, mapX1, mapX2);//计算无畸变和修正转换映射
initUndistortRectifyMap(cam_matrix_right.t(), distortion_r, r2, p2, imgSize, CV_32F, mapY1, mapY2);//计算无畸变和修正转换映射
RKNN_Create(&hdx, modelPath); // 初始化检测模型
std::cout<< " ******************* CAMERA initialization ********************" << std::endl;
}
}
// std::vector<float> Ranging::pic2cam(int u, int v) //像素坐标转相机坐标
// {
// std::vector<float> loc;
// loc.push_back((u - cam_matrix_right.at<double>(0, 2)) * q.at<double>(2, 3) / cam_matrix_right.at<double>(0, 0));
// loc.push_back((v - cam_matrix_right.at<double>(1, 2)) * q.at<double>(2, 3) / cam_matrix_right.at<double>(1, 1));
// return loc;
// }
std::vector<Mat> Ranging::get_range()
{
double rang_old, rang_now;
rang_old = ros::Time::now().toSec(); //测试运行时间
Mat frame, lframe, rframe;
vcapture.read(frame); //获取视频帧
if (!frame.empty())
{
int64 t = getTickCount();
Mat lframe(frame.colRange(0, imgw).clone()); //拷贝左图
Mat rframe(frame.colRange(imgw, imgw * 2).clone()); //拷贝右图
//imshow("lframe",lframe);
//waitKey(1);
rectifyImage(lframe, rframe); //
DetRet *det_ret = NULL; //
InputData input_data; //定义输入数据
int shape[4] = {1, 3, rframe.rows, rframe.cols}, det_num = 0;
memcpy(input_data.shape, shape, sizeof(shape));
Mat Rframe = rframe.clone();
double detect_old, detect_now;
detect_old = ros::Time::now().toSec();
input_data.data = Rframe.data;
//std::cout<<"Rframe.data.shape"<<Rframe.rows<<std::endl;
RKNN_ObjDet(hdx, &input_data, &det_ret, &det_num);
detect_now = ros::Time::now().toSec();
//cout << "data_detect_time:" << detect_now-detect_old << endl;
//std::cout<<"det_num "<<det_num<<std::endl;
if (!det_num)
{
//std::cout<<"return NULL"<<std::endl;
return std::vector<Mat>{rframe}; //没有检测框时,退出
}
std::cout << "det_num: " << det_num << std::endl;
// detction box transfor to our format
Mat detBoxes(det_num, 6, CV_32F); //定义矩阵,存储目标检测内容,存储格式(x,y,x,y,conf,cls)
for (int i = 0; i < det_num; i++) //存储目标检测内容 (x,y,x,y,conf,cls)
{
DetRet det_result = det_ret[i];
// xyxy conf cls
float xmin = rframe.cols * det_result.fLeft;
float ymin = rframe.rows * det_result.fTop;
float xmax = rframe.cols * det_result.fRight;
float ymax = rframe.rows * det_result.fBottom;
detBoxes.at<float>(i, 0) = xmin;
detBoxes.at<float>(i, 1) = ymin;
detBoxes.at<float>(i, 2) = xmax;
detBoxes.at<float>(i, 3) = ymax;
detBoxes.at<float>(i, 4) = det_result.fConf;
// 实验测试,过滤过大的误检框
float ratio = (xmax - xmin) * (ymax - ymin) / 308480.;
if (ratio > 0.7)
{
detBoxes.at<float>(i, 5) = -1;
continue;
}
detBoxes.at<float>(i, 5) = det_result.nLabel;
if (det_result.nLabel == 1 || det_result.nLabel == 2) //检测目标为体重秤和风扇底座时,将目标框拉高,使杆子的雷达点能与目标框对应
{
detBoxes.at<float>(i, 1) = 100;
}
}
Mat info(det_num, 4, CV_32F); // 存储测距信息存储格式距离d宽w高h角度α
if (det_num)
{
// double rang_old, rang_now;
// rang_old = ros::Time::now().toSec();
getInfo(lframe, rframe, detBoxes, info);
// rang_now = ros::Time::now().toSec();
// cout << "data_dis_time: " << rang_now-rang_old << endl;
}
t = getTickCount() - t;
char fps[50];
sprintf(fps, "fps: %d", int(1 / (t / getTickFrequency())));
putText(rframe, fps, Point(20, 20), FONT_HERSHEY_PLAIN, 1, Scalar(0, 0, 255), 1.5);
// rang_now = ros::Time::now().toSec();
// cout << "data_rang_time" << rang_now - rang_old << endl;
return std::vector<Mat>{rframe, detBoxes, info};
}
return std::vector<Mat>{rframe};
}

134
ranging.h Normal file
View File

@ -0,0 +1,134 @@
#pragma once
#include <opencv2/opencv.hpp>
#include <opencv2/calib3d.hpp>
#include <cstdlib>
#include <vector>
#include "rknn_sdk.h"
#include "fusion.h"
using namespace cv;
class LCFusion;
class Ranging
{
private:
friend class LCFusion;
VideoCapture vcapture;
rknn_handle hdx;
const char *modelPath = "m28.rknn";
std::vector<std::string> className = {
"Unknown", //未知
"MaoGouWan", //猫碗/狗碗
"BTYDiZuo", //吧台怅底座
"FSDiZuo", //风扇底座
"XiYiJi", //掗衣机
"BingXiang", //冰箱
"MaTong", //马桶
"TiZhongCheng", //䜓重秀
"DianXian", //电线
"DianShiJi", //电视机
"CanZhuo", //逐桌
"DiTan", //地毯
"MaBu", //抹垃
"ChaJi", //茶几
"DianShiGu", //电视柜
"XieZi", //拖鞋/鞋子
"WaZi", //袜子
"YiGui", //衣柜
"Chuang", //床
"ShaFa", //沙发
"YiZi", //怅子
"Men", //闹
"DLS", //倧理石
"DiBan", //地板
};
// 双目参数
/*
Mat cam_matrix_left = (Mat_<double>(3, 3) <<
4.700950170847520e+02, 0, 0,
0, 4.709244004725060e+02, 0,
3.220628601465352e+02, 2.265265667516499e+02, 1);
Mat cam_matrix_right = (Mat_<double>(3, 3) <<
4.696016526986375e+02, 0, 0,
0, 4.702371464881610e+02, 0,
3.216994731643572e+02, 2.267449827655198e+02, 1);
Mat distortion_l = (Mat_<double>(1, 5) << 0.088023418049400, -0.047968406803285, 0,
0, 0);
Mat distortion_r = (Mat_<double>(1, 5) << 0.097018738959281, -0.081707209740704, 0,
0, 0);
Mat rotate = (Mat_<double>(3, 3) <<
0.999895538560624, 1.514179660491009e-04, 0.014452994124399,
-1.935031776135467e-04, 0.999995745723263, 0.002910514026079,
-0.014452491933249, -0.002913006689884, 0.999891314028152);
Mat trans = (Mat_<double>(3, 1) <<
-59.978995009996730, 0.050886852679934, -0.088565378305175);*/
Mat cam_matrix_left = (Mat_<double>(3, 3) <<
4.860907245880164e+02, 0, 0,
0, 4.899923700242200e+02, 0,
2.986108795628009e+02, 2.423211232345708e+02, 1);
Mat cam_matrix_right = (Mat_<double>(3, 3) <<
4.833477325585679e+02, 0, 0,
0, 4.868370803438677e+02, 0,
2.976599964943864e+02, 2.310278327659757e+02, 1);
Mat distortion_l = (Mat_<double>(1, 5) << 0.126100629219048, -0.152357884696085, 0, 0, 0);
Mat distortion_r = (Mat_<double>(1, 5) << 0.117495161060377, -0.131936017409618, 0, 0, 0);
Mat rotate = (Mat_<double>(3, 3) <<
0.999964066385317, 0.001108038258875, 0.008404652839816,
-0.001043883137806, 0.999970316987207, -0.007633836027844,
-0.008412861946779, 0.007624788241143, 0.999935541101596);
Mat trans = (Mat_<double>(3, 1) <<
-60.419864984062440, -0.059256838559736, -0.609155889994525);
/*
Mat cam_matrix_left = (Mat_<double>(3, 3) <<
3.627346593660044e+02, 0, 0,
0, 3.629168542317134e+02, 0,
3.093231127433517e+02, 2.409353074183058e+02, 1);
Mat cam_matrix_right = (Mat_<double>(3, 3) <<
3.615909129180773e+02, 0, 0,
0, 3.617772267770084e+02, 0,
3.062363746645480e+02, 2.298948842159400e+02, 1);
Mat distortion_l = (Mat_<double>(1, 5) << 0.117219440226075, -0.148594691012994, 0,
0, 0);
Mat distortion_r = (Mat_<double>(1, 5) << 0.116412295415583, -0.146817143572705, 0,
0, 0);
Mat rotate = (Mat_<double>(3, 3) <<
0.999962353120719, 4.648106081008926e-04, 0.008664657660520,
-4.493098874759400e-04, 0.999998295540464, -0.001790820145135,
-0.008665475284162, 0.001786859609987, 0.999960857569352);
Mat trans = (Mat_<double>(3, 1) <<
-60.097178758944190, -0.033859553542635, -0.423965574285253);
*/
/*
Mat cam_matrix_left = (Mat_<double>(3, 3) << 3.629995979572968e+02, 0, 0, 0, 3.605811984929196e+02, 0, 3.056618479641253e+02, 2.425978482096805e+02, 1);
Mat cam_matrix_right = (Mat_<double>(3, 3) << 3.642383320232306e+02, 0, 0, 0, 3.619176322068948e+02, 0, 3.048699917391443e+02, 2.355042537698953e+02, 1);
Mat distortion_l = (Mat_<double>(1, 5) << 0.114519742751324, -0.147768779338520, 0.001196745401736, -0.00274469994995, 0.008589264227682);
Mat distortion_r = (Mat_<double>(1, 5) << 0.107862414325132, -0.112272107836791, 0.002046119288761, -8.703752362970271e-04, -0.050706452190122);
Mat rotate = (Mat_<double>(3, 3) << 0.999984364022271, 6.994668898371339e-04, 0.005548194034452, -6.988446369697146e-04, 0.999999749299562, -1.140920137281147e-04, -0.005548272447104, 1.102129041420978e-04, 0.999984602144437);
Mat trans = (Mat_<double>(3, 1) << -60.134851044411256, 0.007279986569326, -0.065092184396760);
*/
Mat mapX1, mapX2, mapY1, mapY2, q, Z;
int imgw, imgh;
public:
Ranging(int index, int imgw, int imgh);
void rectifyImage(Mat &oriImgL, Mat &oriImgR);
void getInfo(Mat &imgl, Mat &imgr, Mat &detBoxes, Mat &info);
std::vector<float> pic2cam(int u, int v);
std::vector<int> muban(Mat &left_image, Mat &right_image, const int *coordinates);
std::vector<Mat> get_range();
void horizon_estimate(Mat& img, Mat& bboxs,int k);
};

110
rknn_sdk.h Normal file
View File

@ -0,0 +1,110 @@
#ifndef __RKNN_SDK_H__
#define __RKNN_SDK_H__
#include <stdint.h>
// 返回错误码
enum XErrorCode {
RKNN_SUCCEED = 0,
RKNN_NOT_INIT = -1, //SDK未初始化
RKNN_INVAILD_IMGSIZE = -2, //输入图像尺寸不匹配模型输入要求
RKNN_INVAILD_PARAM = -3, //参数无效
};
enum ObjCls {
OBJ_Unknown = -1, //未知
OBJ_MaoGouWan = 0, //猫碗/狗碗
OBJ_BTYDiZuo = 1, //吧台椅底座
OBJ_FSDiZuo = 2, //风扇底座
OBJ_XiYiJi = 3, //洗衣机
OBJ_BingXiang = 4, //冰箱
OBJ_MaTong = 5, //马桶
OBJ_TiZhongCheng = 6, //体重秤
OBJ_DianXian = 7, //电线
OBJ_DianShiJi = 8, //电视机
OBJ_CanZhuo = 9, //餐桌
OBJ_DiTan = 10, //地毯
OBJ_MaBu = 11, //抹布
OBJ_ChaJi = 12, //茶几
OBJ_DianShiGui = 13, //电视柜
OBJ_XieZi = 14, //拖鞋/鞋子
OBJ_WaZi = 15, //袜子
OBJ_YiGui = 16, //衣柜
OBJ_Chuang = 17, //床
OBJ_ShaFa = 18, //沙发
OBJ_YiZi = 19, //椅子
OBJ_Men = 20, //门
OBJ_DLS = 21, //da li shi
OBJ_DiBan = 22 //di ban
//OBJ_Xiang = 21 //宠物粪便
};
// 图像信息与buffer
typedef struct _InputData {
int shape[4]; // Num, Channel, Height, Width
void* data; // img.data
} InputData;
// 检测结果
typedef struct _DetRet
{
int nID; //图像ID
int nLabel; //物体类别 ObjCls nLabel; //物体类别
float fConf; //置信度
float fLeft; //检测框左上角比例坐标x_min -> int(x_min*width) 获取像素坐标
float fTop; //检测框左上角比例坐标y_min -> int(y_min*height) 获取像素坐标
float fRight; //检测框右下角比例坐标x_max -> int(x_max*width) 获取像素坐标
float fBottom; //检测框右下角比例坐标y_max -> int(y_max*height) 获取像素坐标
} DetRet;
#define RKNNSDK_API(retType) retType
///
#ifdef __cplusplus
extern "C" {
#endif
typedef uint64_t rknn_handle;
/* RKNN_Create
SDK;
input:
rknn_handle hd
const char* model_path
return:
int
*/
RKNNSDK_API(int) RKNN_Create(rknn_handle* hd, const char* model_path);
/* RKNN_ObjDet
;
input:
rknn_handle hd
const InputData* input_data
DetRet** det_ret
int* det_num
return:
int
*/
RKNNSDK_API(int) RKNN_ObjDet(rknn_handle hd, const InputData* input_data, DetRet** det_ret, int* det_num);
/* RKNN_Release
SDK退;
input:
rknn_handle hd
return:
int
*/
RKNNSDK_API(int) RKNN_Release(rknn_handle hd);
#ifdef __cplusplus
}
#endif
#endif //__RKNN_SDK_H__

1
zhixing-1 Normal file
View File

@ -0,0 +1 @@
g++ -std=c++11 ./*.cpp -o main.out -I /usr/local/include -I /usr/local/include/opencv4 -I /usr/include -I /opt/ros/melodic/include -I /home/firefly/Desktop/object_detect_sdk_1804/install/include -L /usr/local/lib -l opencv_core -l opencv_imgproc -l opencv_imgcodecs -l opencv_highgui -l opencv_dnn -l opencv_videoio -l opencv_calib3d -L /usr/lib/obj_sdk -l rknn_sdk -l rknnrt -l roscpp -l roslib -l rosconsole -l roscpp_serialization -L /opt/ros/melodic/lib -l rostime -l boost_system -l pthread -l udev

5
说明.txt Normal file
View File

@ -0,0 +1,5 @@
V 2.0
1. 添加了基于地平线的测距方法,稳定性待测试。
2. 解决断连bug。原因是融合的判空与双目判空条件不一致。
3.