刚开始入手kinect 对于平台的搭建不是很熟悉,对于体感控制智能小车 ,这个过程思路不是很了解。我是通过kinect studio和gesture builder创建了一个手势库,这一步我也不知道做对没,下面这一步完全不知道怎么从kinect入手"计算出人体骨骼节点的坐标,最终完成对人体骨骼节点的跟踪。骨骼信息的识别。根据骨骼节点的坐标和人体的姿态共同处理得到与之对应的指令"希望哪位可以详细的给我说一下思路,在kinect中完全不知道该怎么入手,在我卡在这里很久了谢谢
体感智能车的原理非常的简单,就是利用Kinect采集人体的姿体信息,然后通过蓝牙串口向Arduino发送字符命令。Arduino通过相应的字符命令控制双H桥电机驱动模块实现小车的前进后退等动作。项目主要用到小车底盘套件、蓝牙、控制器等…
OpenNI_NITE_Installer-win32-0.27
和kinect的链接库SimpleOpenNI-0.27
import SimpleOpenNI.*;
SimpleOpenNI kinect;
import processing.serial.*;
Serial port;
PFont myFont;
String myStr;
String Str_w;
String Str_s;
String Str_a;
String Str_d;
String Str_x;
String Str_temp_NO;
String Str_temp_Yes;
PVector right_hand;
PVector right_hip;
PVector head;
PVector left_hand;
PVector left_hip;
PVector difference;
float shouju;
float youtou;
float zuotou;
float youhip;
float zuohip;
char ch_w= 'w';
char ch_s= 's';
char ch_a= 'a';
char ch_d= 'd';
char ch_x= 'x';
void setup()
{
size(640, 600);
kinect = new SimpleOpenNI(this);
kinect.enableDepth();
kinect.enableRGB();
kinect.setMirror(true);
kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
myFont = createFont("黑体", 32);
myStr = "重庆文理学院机器人研究所";
Str_w = "前进";
Str_s = "后退";
Str_a = "左转";
Str_d = "右转";
Str_x = "停止";
Str_temp_NO = "未识别";
Str_temp_Yes = "连接成功";
right_hand = new PVector();
right_hip = new PVector();
head = new PVector();
left_hand = new PVector();
left_hip = new PVector();
println(Serial.list());//定义串口
String portName = Serial.list()[0];
port = new Serial(this, portName, 115200);//波特率
}
void draw()
{
background(80,100,140);//
kinect.update();
//PImage depthImage = kinect.depthImage();
//image(kinect.depthImage(),0, 0);
PImage rgbImage = kinect.rgbImage();
image(rgbImage, 0, 0);
fill(0,0,255);
textFont(myFont);
text(myStr, 100, 530);
text("www.cqwu.net" , 195, 565);
IntVector userList = new IntVector();
kinect.getUsers(userList);
if (userList.size() > 0)
{
int userId = userList.get(0);
onEndCalibration(userId, true);
if ( kinect.isTrackingSkeleton(userId))
{
fill(0,0,255);
textFont(myFont);
text(myStr, 100, 530);
text("www.cqwu.net " , 195, 565);
fill(0, 255, 0);
textSize(25);
text(Str_temp_Yes, 270,30);
kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_HAND,right_hand);
kinect.convertRealWorldToProjective(right_hand, right_hand);
kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_HIP,right_hip);
kinect.convertRealWorldToProjective(right_hip, right_hip);
kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_HEAD,head);
kinect.convertRealWorldToProjective(head, head);
kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_LEFT_HAND,left_hand);
kinect.convertRealWorldToProjective(left_hand, left_hand);
kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_LEFT_HIP,left_hip);
kinect.convertRealWorldToProjective(left_hip, left_hip);
//textSize(20);
difference = PVector.sub(right_hand, left_hand);
shouju = difference.mag();
// text("shouju: " + int(shouju), 20, 20);
difference = PVector.sub(right_hand, head);
youtou = difference.mag();
// text("youtou: " + int(youtou), 20, 45);
difference = PVector.sub(left_hand, head);
zuotou = difference.mag();
// text("zuotou: " + int(zuotou), 20, 70);
difference = PVector.sub(right_hand, right_hip);
youhip = difference.mag();
// text("youhip: " + int(youhip), 20, 95);
difference = PVector.sub(left_hand, right_hip);
zuohip = difference.mag();
// text("zuohip: " + int(zuohip), 20, 120);
fill(255, 255, 0);
textSize(20);
if(shouju>400 && youtou>200 && zuotou>200 && youhip>200 && zuohip>200)
{
text(Str_w, 20, 40);
port.write(ch_w);
}
else if(shouju<200 && youtou<200 && zuotou<200 && youhip>200 && zuohip>200)
{
text(Str_s, 20, 40);
port.write(ch_s);
}
else if(shouju>300 && youtou>180 && zuotou>180 && youhip<150 && zuohip>250)
{
text(Str_a, 20, 40);
port.write(ch_a);
}
else if(shouju>300 && youtou>180 && zuotou>180 && youhip>200 && zuohip<150)
{
text(Str_d, 20, 40);
port.write(ch_d);
}
else if(shouju<250 && youtou>180 && zuotou>180 && youhip<180 && zuohip<180)
{
text(Str_x, 20, 40);
port.write(ch_x);
}
/***************************************************************
fill(255, 0, 0);
ellipse(left_hip.x, left_hip.y, 15, 15);
ellipse(left_hand.x, left_hand.y, 15, 15);
ellipse(head.x, head.y, 15, 15);
ellipse(right_hip.x, right_hip.y, 15, 15);
ellipse(right_hand.x, right_hand.y, 15, 15);
****************************************************************/
/****************************************************************
stroke(0, 255, 0);
strokeWeight(4);
line(right_hand.x, right_hand.y, head.x, head.y);
line(right_hand.x, right_hand.y, right_hip.x, right_hip.y);
line(left_hand.x, left_hand.y, head.x, head.y);
line(left_hand.x, left_hand.y, left_hip.x, left_hip.y);
****************************************************************/
}
}
else
{
fill(255, 0, 0);
textSize(25);
text(Str_temp_NO, 280, 30);
}
}
// user-tracking callbacks!
void onNewUser(int userId) {
println("start pose detection");
kinect.startPoseDetection("Psi", userId);
}
void onEndCalibration(int userId, boolean successful) {
if (successful) {
println(" User calibrated !!!");
kinect.startTrackingSkeleton(userId);
}
else {
println(" Failed to calibrate user !!!");
kinect.startPoseDetection("Psi", userId);
}
}
void onStartPose(String pose, int userId) {
println("Started pose for user");
kinect.stopPoseDetection(userId);
kinect.requestCalibrationSkeleton(userId, true);
}
感谢邀请
下边博客是别人写的,和您的需求比较接近,可以参考一下
https://blog.csdn.net/an_loser/article/details/110426755