(一)解析图像与点云数据
由于SDK提供的C#接口不支持同时显示深度图与RGB图,所以对RGB图与深度进行解析,通过winform的picbox控件进行显示
解析RGB图并显示
Marshal.Copy(stFrameData.stImageData[1].pData, m_pcDataBuf,0,(int)stFrameData.stImageData[1].nDataLen);
int m = (int)m_stImageInfo.nHeight;
int n = (int)m_stImageInfo.nWidth;
Mat image = new Mat(new OpenCvSharp.Size(n, m), MatType.CV_8UC3);
Mat result = image.CvtColor(ColorConversionCodes.RGB2BGR);
for (int y = 0; y < m; y++)
{
for (int x = 0; x < n; x++)
{
image.Set(y, x, new Vec3b(m_pcDataBuf[x + y * n + 2 * m * n], m_pcDataBuf[x + y * n + m * n], m_pcDataBuf[x + y * n]));
}
}
System.Drawing.Bitmap bitmap = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(image);
System.Drawing.Bitmap bitmaps = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(image);
pictureBox2.Image = bitmap;
解析深度图并显示
Mat depthData = new Mat((int)stFrameData.stImageData[0].nHeight, (int)stFrameData.stImageData[0].nWidth, MatType.CV_16UC1, stFrameData.stImageData[0].pData);
double scaleFactor = 255.0 / 1000.0;
Mat normalizedDepth = new Mat();
Cv2.ConvertScaleAbs(depthData, normalizedDepth, scaleFactor);
Mat depthImage8Bit = new Mat();
normalizedDepth.ConvertTo(depthImage8Bit, MatType.CV_8U);
double thresholdValue = 128;
double maxValue = 255;
Cv2.Threshold(depthImage8Bit, depthImage8Bit, thresholdValue, maxValue, ThresholdTypes.Binary);
Bitmap bmp1 = new Bitmap(depthImage8Bit.Cols, depthImage8Bit.Rows, (int)depthImage8Bit.Step(), System.Drawing.Imaging.PixelFormat.Format8bppIndexed, depthImage8Bit.Data);
pictureBox3.Image = bmp1;
pictureBox3.SizeMode = PictureBoxSizeMode.StretchImage;
解析点云数据,并将其转换为halcon的HTuple数据类型,使用hWindowControl控件对点云进行显示
Mv3dRgbdSDK.MV3D_RGBD_MapDepthToPointCloud(m_handle, stFrameData.stImageData[0], pointcloud); Marshal.Copy(pointcloud.pData, pointData, 0, (int)pointcloud.nDataLen);
float[] floatArray = new float[pointData.Length / sizeof(float)];
for (int g = 0; g < floatArray.Length; g++)
{
floatArray[g] = BitConverter.ToSingle(pointData, g * sizeof(float));
}
int nPointNum = (int)pointcloud.nDataLen / (sizeof(float) * 3);
List vecX = new List(nPointNum);
List vecY = new List(nPointNum);
List vecZ = new List(nPointNum);
for (int nPntIndex = 0; nPntIndex < nPointNum; ++nPntIndex)
{
vecX.Add(floatArray[nPntIndex * 3 + 0]);
vecY.Add(floatArray[nPntIndex * 3 + 1]);
vecZ.Add(floatArray[nPntIndex * 3 + 2]);
}
HTuple a = new HTuple();
HTuple b = new HTuple();
HTuple c = new HTuple();
for (int r = 0; r < nPointNum; r++)
{
a[r] = vecX[r];
b[r] = vecY[r];
c[r] = vecZ[r];
}
HOperatorSet.GenObjectModel3dFromPoints(a, b, c, out hv_selectObjectModel3D);
HOperatorSet.WriteObjectModel3d(hv_selectObjectModel3D, "ply", "oud_zzss.ply", new HTuple(), "false");

(二)点云预处理
sample_object_model_3d (cloud_a, 'fast', 1.5, [], [], SampledObjectModel3D)
get_object_model_3d_params (cloud_a, 'num_points', GenParamValuess)
get_object_model_3d_params (SampledObjectModel3D, 'num_points', GenParamValue1aa)
select_points_object_model_3d (SampledObjectModel3D, 'point_coord_z',500, 760, ObjectModel3D)
select_points_object_model_3d (ObjectModel3D, 'point_coord_x', -500, 325, ObjectModel3D1)
select_points_object_model_3d (ObjectModel3D1, 'point_coord_y', -350, 200, ObjectModel3D2)
connection_object_model_3d (ObjectModel3D2, 'distance_3d',2.2, ObjectModel3DConnected)
get_object_model_3d_params (ObjectModel3DConnected, 'num_points', GenParamValue1)
select_object_model_3d (ObjectModel3DConnected, 'num_points', 'and',2000,15000, ObjectModel3DSelected)
write_object_model_3d (ObjectModel3D2, 'ply', 'D:/项目程序/项目程序/海康3D/jingsai/BasicDemo_Callback/bin/win64/debug/point/g3.ply', [], 'false')
(三)读取鞋底点云模板
由于鞋底分为左右脚和正反面,所以建立4个模板,分别为左脚正面、左脚反面、右脚正面、右脚反面
read_object_model_3d ('C:/Users/93733/Desktop/分拣鞋/新建文件夹/hkmodel/zuozheng.ply', 'm', [], [], zuo_zheng, Status)
read_object_model_3d ('C:/Users/93733/Desktop/分拣鞋/新建文件夹/hkmodel/zuo_fan.ply', 'm', [], [], zuo_fan, Status)
read_object_model_3d ('C:/Users/93733/Desktop/分拣鞋/新建文件夹/hkmodel/youzheng.ply', 'm', [], [], you_zheng, Status)
read_object_model_3d ('C:/Users/93733/Desktop/分拣鞋/新建文件夹/hkmodel/you_fan.ply', 'm', [], [], you_fan, Status)
rigid_trans_object_model_3d (zuo_zheng, PoseA, ObjectModel3DStandard2)
rigid_trans_object_model_3d (ObjectModel3DStandard2, PoseB, ObjectModel3D_zuozheng)
rigid_trans_object_model_3d (zuo_fan, PoseA, ObjectModel3DStandard2)
rigid_trans_object_model_3d (ObjectModel3DStandard2, PoseB, ObjectModel3D_zuo_fan)
rigid_trans_object_model_3d (you_zheng, PoseA, ObjectModel3DStandard2)
rigid_trans_object_model_3d (ObjectModel3DStandard2, PoseB, ObjectModel3D_you_zheng)
rigid_trans_object_model_3d (you_fan, PoseA, ObjectModel3DStandard2)
rigid_trans_object_model_3d (ObjectModel3DStandard2, PoseB, ObjectModel3D_you_fan)
create_pose (743, 158.23,-131.77, 179, 0, 0, 'Rp+T', 'gba', 'point', Pose_youzheng)
create_pose (751, 218.35,-131.66, 179, 0, 0, 'Rp+T', 'gba', 'point', Pose_youfan)
create_pose (726.22, 239.44,-131.15, 179, 0, 0, 'Rp+T', 'gba', 'point', Pose_zuozheng)
create_pose (689.30,216.93,-138.92, 179, 0, 0, 'Rp+T', 'gba', 'point', Pose_zuofan)
(四)3D表面匹配
根据得到的相机与机械臂转换关系,通过刚体变换将目标点云变换到机器坐标系下,将四个模板点云分别与目标点云进行3D表面匹配,提取Scores最大的目标作为抓取目标,得到姿态Pose,利用Pose将模板变换到目标点云上,计算质心以及抓取姿态
jx:=813.54
jy:=33.9
jz:=-48.53
offset_x:=jx-(109.878)
offset_y:=jy-(5.18)
offset_z:=jz-(1.9068)
grap_pose:=[]
grap_score:=[]
grap_obk:=[]
grap_point:=[]
intput_object:=[ObjectModel3D_zuozheng,ObjectModel3D_zuo_fan,ObjectModel3D_you_zheng,ObjectModel3D_you_fan]
pp_pose:=[Pose_zuozheng,Pose_zuofan,Pose_youzheng,Pose_youfan]
for IndexX := 0 to |intput_object|-1 by 1
union_object_model_3d (ObjectModel3DSelected, 'points_surface', UnionObjectModel3D)
surface_normals_object_model_3d (intput_object[IndexX], 'mls', [], [], ObjectModel3DNormals)
create_surface_model (ObjectModel3DNormals, 0.03, [], [], SurfaceModelID)
*模板SurfaceModelID
find_surface_model (SurfaceModelID, UnionObjectModel3D,0.005, 0.005, 0.5, 'true', 'num_matches', 4, Pose_find, Score, SurfaceMatchingResultID)
tuple_length (Score, A)
if(A==0)
Score:=0
endif
tuple_max (Score, Max)
tuple_find (Score, Max, Indices)
selectNUM:=Indices
if(Max>0.7)
selectPose:=Pose_find[selectNUM*7:selectNUM*7+6]
pose_compose (selectPose,pp_pose[IndexX*7:IndexX*7+6], PoseCompose)
* pose_compose (selectPose, Pose_zuofan, PoseCompose)
convert_pose_type (PoseCompose, 'Rp+T', 'abg', 'point', PoseOut1)
pose_invert (PoseCompose, PoseResult)
rigid_trans_object_model_3d (intput_object[IndexX], selectPose, ObjectScene3DRigidTrans)
grap_pose:=[grap_pose,PoseOut1]
grap_score:=[grap_score,Max]
grap_obk:=[grap_obk,ObjectScene3DRigidTrans]
grap_point:=[grap_point,Pose_find]
* disp_object_model_3d (WindowHandle,[ObjectScene3DRigidTrans,UnionObjectModel3D], [], [],['color_0','color_1','alpha_1','disp_pose'], ['green','gray',0.5,'true'])
elseif(Max<0.7)
grap_pose:=[grap_pose,zero_pose]
grap_point:=[grap_point,zero_pose]
grap_score:=[grap_score,0]
grap_obk:=[grap_obk,0]
endif
endfor
tuple_max (grap_score, grap_score_Max)
tuple_find (grap_score, grap_score_Max, Indices_Max)
if(grap_score_Max!=0)
final_Pose:=grap_pose[Indices_Max*7:Indices_Max*7+6]
get_object_models_center (grap_obk[Indices_Max], CenterS)
x:=CenterS[0]+offset_x
y:=CenterS[1]+offset_y
z:=CenterS[2]+offset_z
rx:=final_Pose[3]
ry:=final_Pose[4]
rz:=final_Pose[5]
shose_flag:=Indices_Max
StartPose:=[x,y,z,0,0,0,0]
gen_sphere_object_model_3d (StartPose,20, StartPoint)
* visualize_object_model_3d (WindowHandle,[grap_obk[Indices_Max],UnionObjectModel3D,StartPoint],[],[],['color_0','color_1','color_2','alpha_1','disp_pose'], ['green','gray','red',0.5,'true'], [], [], [], PoseOut)
disp_object_model_3d (WindowHandle,[grap_obk[Indices_Max],UnionObjectModel3D,StartPoint],[],[],['color_0','color_1','color_2','alpha_1','disp_pose'], ['green','gray','red',0.5,'true'])
endif
(五)水果分拣
create_pose (-43.9651,19.352,1031.71,359.001,359.198,185.332,'Rp+T', 'gba', 'point', PoseA)
create_pose (-70.6744,-70.6666,2061.22,181.31,1.74528,169.493,'Rp+T', 'gba', 'point', PoseB)
pose_to_hom_mat3d (PoseA, HomMat3DA)
pose_to_hom_mat3d (PoseB, HomMat3DB)
rigid_trans_object_model_3d (cloud_a, PoseA, A)
rigid_trans_object_model_3d (A, PoseB, B)
sample_object_model_3d (B, 'fast', 1.5, [], [], SampledObjectModel3D)
get_object_model_3d_params (cloud_a, 'num_points', GenParamValuess)
get_object_model_3d_params (SampledObjectModel3D, 'num_points', GenParamValue1aa)
*visualize_object_model_3d (WindowHandle,cloud_a, [], [], ['red_channel_attrib','green_channel_attrib','blue_channel_attrib'], ['red','green','blue'], [], [], [], PoseOut)
select_points_object_model_3d (SampledObjectModel3D, 'point_coord_z',0, 100, ObjectModel3D)
select_points_object_model_3d (ObjectModel3D, 'point_coord_x', -130, 240, ObjectModel3D1)
select_points_object_model_3d (ObjectModel3D1, 'point_coord_y', -120, 120, ObjectModel3D2)
connection_object_model_3d (ObjectModel3D2, 'distance_3d',2.3, ObjectModel3DConnected)
get_object_model_3d_params (ObjectModel3DConnected, 'num_points', GenParamValue1)
select_object_model_3d (ObjectModel3DConnected, 'num_points', 'and',1000,5000, ObjectModel3DSelected)
union_object_model_3d (ObjectModel3DSelected, 'points_surface', UnionObjectModel3D1)
*visualize_object_model_3d (WindowHandle, ObjectModel3DSelected, [], [], ['red_channel_attrib','green_channel_attrib','blue_channel_attrib'], ['red','green','blue'], [], [], [], PoseOut)
jx:=744.50
jy:=304.77
jz:=-38.90
cx:=105.976
cy:=6.337556
cz:=4.744299
offset_x:=cx-cx
offset_y:=jy-cy
offset_z:=jz-cz
grap_flag:=[]
x:=[]
y:=[]
z:=[]
pp:=[]
if(|ObjectModel3DSelected|>0)
for Index := 0 to |ObjectModel3DSelected|-1 by 1
get_object_model_3d_params (ObjectModel3DSelected[Index], 'num_points', va)
if(va>500)
smallest_bounding_box_object_model_3d (ObjectModel3DSelected[Index], 'oriented', PoseBox,Length1, Length2, Length3)
get_object_models_center (ObjectModel3DSelected[Index], Center)
point:=[Center[0],Center[1],Center[2], 0,0,0,0]
gen_sphere_object_model_3d (point, 4, pPoint)
pp:=[pp,pPoint]
x:=Center[0]+offset_x
y:=Center[1]+offset_y
z:=Center[2]+offset_z
disp_object_model_3d (WindowHandle,[UnionObjectModel3D1,ObjectModel3DSelected[Index]], [], [],['color_0','color_1','alpha_1','disp_pose'], ['green','red',0.5,'true'])
if(Length1>=70 and Length2>=70)
grap_flag:=0
break
else
grap_flag:=1
break
endif
endif
endfor
endif
将上述halcon代码封装为外部函数,通过halcon引擎对其进行调用;

(六)上位机程序
机械臂作为客户端,上位机作为服务器,当机械臂离开规定区域时,客服端会发送拍照信号,在上位机开启一个线程来监听客户端是否发送拍照信号,当监听到客户端发送的拍照信号时,相机进行拍照,并将点云数据传送到halcon引擎中,进行点云处理获取抓取坐标和姿态,当机械臂复位时,发送下一次的抓取坐标和姿态
示教器端程序如图所示:

上位机与机器人通讯程序
void Robot_Init()
{
robot_socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
try
{
IPAddress ip = IPAddress.Parse("192.168.1.200");
IPEndPoint point = new IPEndPoint(ip, 6000);
robot_socket.Bind(point);
robot_socket.Listen(1);
listBox1.Items.Add("服务器创建成功");
}
catch
{
listBox1.Items.Add("请检查robot的IP和端口");
}
Thread listen_th = new Thread(listenA);
listen_th.IsBackground = true;
listen_th.Start(robot_socket);
}
private void listenA(object o)
{
// listBox1.Items.Clear();
Socket socketwatch = o as Socket;
try
{
while (true)
{
mysocket = socketwatch.Accept();
// MessageBox.Show("有客户端接入");
listBox1.Items.Add("有客户端接入");
Thread receiveer = new Thread(receive);
receiveer.IsBackground = true;
receiveer.Start(mysocket);
}
}
catch
{
listBox1.Items.Add("错误");
}
}
private void receive(object o)
{
string str = "";
byte[] buffer = new byte[1024];
int len = 0;
int time_m = 0;
Socket mysocket = o as Socket;
bool fg = false;
while (true)
{
len = mysocket.Receive(buffer, 0, buffer.Length, SocketFlags.None);
str = Encoding.UTF8.GetString(buffer, 0, len);
if (str == "6")
{
robot_flag = true;
}
if (str == "success")
{
label3.Text = "success";
}
}
}
由于在halcon中得到的姿态表示方式为欧拉角,而我们使用的协作机械臂姿态表达方式为轴角,所以需要将欧拉角转换为轴角,转换过程如下所示:
public static double[,] matrix_multiply(double[,] a, double[,] b)
{
int n = a.GetLength(0);
int m = b.GetLength(1);
int q = a.GetLength(1);
double[,] result = new double[n, m];
if (a.GetLength(1) != b.GetLength(0))
{
throw new Exception("matrix1 的列数与 matrix2 的行数不相等");
}
for (int i = 0; i < n; i++)
{
for (int j = 0; j < m; j++)
{
for (int k = 0; k < q; k++)
{
result[i, j] += a[i, k] * b[k, j];
}
}
}
return result;
}
public static double[,] Rx(double r)
{
double r1 = r / 180 * Math.PI;
double[] x1 = { 1, 0, 0 };
double[] x2 = { 0, Math.Cos(r1), -Math.Sin(r1) };
double[] x3 = { 0, Math.Sin(r1), Math.Cos(r1) };
double[,] data = new double[3, 3];
for (int i = 0; i < 3; i++)
{
data[0, i] = x1[i];
data[1, i] = x2[i];
data[2, i] = x3[i];
}
return data;
}
public static double[,] Ry(double r)
{
double r1 = r / 180 * Math.PI;
double[] x1 = { Math.Cos(r1), 0, Math.Sin(r1) };
double[] x2 = { 0, 1, 0 };
double[] x3 = { -Math.Sin(r1), 0, Math.Cos(r1) };
double[,] data = new double[3, 3];
for (int i = 0; i < 3; i++)
{
data[0, i] = x1[i];
data[1, i] = x2[i];
data[2, i] = x3[i];
}
return data;
}
public static double[,] Rz(double r)
{
double r1 = r / 180 * Math.PI;
double[] x1 = { Math.Cos(r1), -Math.Sin(r1), 0 };
double[] x2 = { Math.Sin(r1), Math.Cos(r1), 0 };
double[] x3 = { 0, 0, 1 };
double[,] data = new double[3, 3];
for (int i = 0; i < 3; i++)
{
data[0, i] = x1[i];
data[1, i] = x2[i];
data[2, i] = x3[i];
}
return data;
}
public static double[] RotationMatrixTo_Vec(double[,] R)
{
Double[] vec = new Double[3];
double x, y, z, theta; // x,y,z 表示轴向量,theta表示角度
double epsilon = 1E-12;
double v = (R[0, 0] + R[1, 1] + R[2, 2] - 1.0f) / 2.0f; //获得角度
if (Math.Abs(v) < 1 - epsilon)
{
theta = Math.Acos(v);
x = 1 / (2 * Math.Sin(theta)) * (R[2, 1] - R[1, 2]);
y = 1 / (2 * Math.Sin(theta)) * (R[0, 2] - R[2, 0]);
z = 1 / (2 * Math.Sin(theta)) * (R[1, 0] - R[0, 1]);
}
else
{
if (v > 0.0f)
{
// \theta = 0, diagonal elements approaching 1
theta = 0;
x = 0;
y = 0;
z = 1;
}
else
{
theta = Math.PI;
if (R[0, 0] >= R[1, 1] && R[0, 0] >= R[2, 2])
{
x = Math.Sqrt((R[0, 0] + 1) / 2);
y = R[0, 1] / (2 * x);
z = R[0, 2] / (2 * x);
}
else if (R[1, 1] >= R[0, 0] && R[1, 1] >= R[2, 2])
{
y = Math.Sqrt((R[1, 1] + 1) / 2);
x = R[1, 0] / (2 * y);
z = R[1, 2] / (2 * y);
}
else
{
z = Math.Sqrt((R[2, 2] + 1) / 2);
x = R[2, 0] / (2 * z);
y = R[2, 1] / (2 * z);
}
}
}
vec[0] = x * theta;
vec[1] = y * theta;
vec[2] = z * theta;
return vec;
}
通过点击radionbutton按键 选择分拣鞋底 还是分拣水果

if (radioButton1.Checked)
{
point_cloud_shose_select(hv_selectObjectModel3D);
}
else if (radioButton3.Checked)
{
point_cloud_fruit(hv_selectObjectModel3D);
}
private void button5_Click(object sender, EventArgs e)
{
_zidong_img = new Thread(z_d_camera);
_zidong_img.Start();
}
由于需要计算每个积木点云的质心坐标用于抓取,而通过颜色阈值分割的点云中可能存在多个相同颜色的色块,所以这里通过点的数量来判断每种颜色的点云中积木的数量是否大于一个,如果数量大于一个,通过聚类分割对该颜色的点云进行继续分割,分割出所有积木点云,并计算其质心坐标,通过vecflag来标记积木点云颜色种类;
采用机械臂作为客户端,上位机作为服务器,当用户点击自动运行按钮后,相机会进行拍照并通过PCL进行点云处理,并将得到的抓取坐标发送给机械臂,第一次抓拍之后的所有拍照操作由客户端控制,当机械臂第一次获取的所有坐标全部抓取完毕后,机械臂运动到最后一个物料的放料区域的同时会向上位机发送一个数字6,上位机接收到数字6会触发信号serverStatusChanged,并去执行对应的槽函数 ,抓拍点云数据并进行点云处理,将得到的抓取坐标发送给客户端,客户端会解析数据,引导机械臂进行抓取。