7

Microsoft Kinect SDK kit http://www.microsoft.com/en-us/download/details.aspx?id=40278

Toolkit Microsoft Kinect : http://www.microsoft.com/en-us/download/details.aspx?id=40276

The book i'm following : http://www.arduinoandkinectprojects.com/

ebook link : http://it-ebooks.info/book/761/

the processing library i'm using : https://code.google.com/p/simple-openni/

[[Servo.serial.py]]

GroG

11 years ago

Try something like this in processing (make sure webgui is running on MRL)

 

import processing.net.*;
 
Client c;
String host = "127.0.0.1";
 
void setup() { 
 
  c = new Client(this, host, 7777);
  c.write("GET /services/servoX/moveTo/20 HTTP/1.1\r\n"); 
  c.write("\r\n");
 

Thanks grog...this worked great :) now the problem is to put the int value X in that path (X instead of 20)...

How can i insert a variable in a string in that point??

Just figured out how to insert the X coordinate in that string...

This is Processing code i'm using... Grog suggested me to get rid of the serial communication and to use  a RestAPI call to send servos the Servo.write(X) command... And it seems to work quite well...

/* --------------------------------------------------------------------------
 * SimpleOpenNI Hands3d Test
 * --------------------------------------------------------------------------
 * Processing Wrapper for the OpenNI/Kinect 2 library
 * --------------------------------------------------------------------------
 * prog:  Max Rheiner / Interaction Design / Zhdk / http://iad.zhdk.ch/
 * date:  12/12/2012 (m/d/y)
 * ----------------------------------------------------------------------------
 * This demos shows how to use the gesture/hand generator.
 * It's not the most reliable yet, a two hands example will follow
 * ----------------------------------------------------------------------------
 */
import processing.net.*;
import java.util.Map;
import java.util.Iterator;
import processing.serial.*;
import SimpleOpenNI.*;
Serial myPort;
String X = "";
String Y = "";
String host = "127.0.0.1";
 
SimpleOpenNI context;
int handVecListSize = 20;
Map<Integer,ArrayList<PVector>>  handPathList = new HashMap<Integer,ArrayList<PVector>>();
color[]       userClr = new color[]{ color(255,0,0),
                                     color(0,255,0),
                                     color(0,0,255),
                                     color(255,255,0),
                                     color(255,0,255),
                                     color(0,255,255)
                                   };
void setup()
{
//  frameRate(200);
  size(640,480);
   // This gets the first port on your computer.
 
  context = new SimpleOpenNI(this);
  if(context.isInit() == false)
  {
     println("Can't init SimpleOpenNI, maybe the camera is not connected!"); 
     exit();
     return;  
  }   
 
  // enable depthMap generation 
  context.enableDepth();
  
  // disable mirror
  context.setMirror(true);
 
  // enable hands + gesture generation
  //context.enableGesture();
  context.enableHand();
  context.startGesture(SimpleOpenNI.GESTURE_WAVE);
  
  // set how smooth the hand capturing should be
  //context.setSmoothingHands(.5);
 }
 
void draw()
{
  // update the cam
  context.update();
 
  image(context.depthImage(),0,0);
    
  // draw the tracked hands
  if(handPathList.size() > 0)  
  {    
    Iterator itr = handPathList.entrySet().iterator();     
    while(itr.hasNext())
    {
      Map.Entry mapEntry = (Map.Entry)itr.next(); 
      int handId =  (Integer)mapEntry.getKey();
      ArrayList<PVector> vecList = (ArrayList<PVector>)mapEntry.getValue();
      PVector p;
      PVector p2d = new PVector();
      
        stroke(userClr[ (handId - 1) % userClr.length ]);
        noFill(); 
        strokeWeight(1);        
        Iterator itrVec = vecList.iterator(); 
        beginShape();
          while( itrVec.hasNext() ) 
          { 
            p = (PVector) itrVec.next(); 
            
            context.convertRealWorldToProjective(p,p2d);
            vertex(p2d.x,p2d.y);
          }
        endShape();   
  
        stroke(userClr[ (handId - 1) % userClr.length ]);
        strokeWeight(4);
        p = vecList.get(0);
        context.convertRealWorldToProjective(p,p2d);
        point(p2d.x,p2d.y);
        float handX = map(p2d.x , 0 , 640 , 0 , 127);
        float handY = map(p2d.y,0,480,0,127);
        X = "GET /services/servoX/moveTo/" + int(handX) + " HTTP/1.0\r\n";  
        Y = "GET /services/servoY/moveTo/" + int(handY) + " HTTP/1.0\r\n";
        Client cx = new Client(this, host, 7777);
        cx.write(X); 
        cx.write("\r\n");
        Client cy = new Client(this, host, 7777);
        cy.write(Y); 
        cy.write("\r\n");
    }        
  }
}
 
 
// -----------------------------------------------------------------
// hand events
 
void onNewHand(SimpleOpenNI curContext,int handId,PVector pos)
{
  println("onNewHand - handId: " + handId + ", pos: " + pos);
 
  ArrayList<PVector> vecList = new ArrayList<PVector>();
  vecList.add(pos);
  
  handPathList.put(handId,vecList);
}
 
void onTrackedHand(SimpleOpenNI curContext,int handId,PVector pos)
{
  //println("onTrackedHand - handId: " + handId + ", pos: " + pos );
  
  ArrayList<PVector> vecList = handPathList.get(handId);
  if(vecList != null)
  {
    vecList.add(0,pos);
    if(vecList.size() >= handVecListSize)
      // remove the last point 
      vecList.remove(vecList.size()-1); 
  }  
}
 
void onLostHand(SimpleOpenNI curContext,int handId)
{
  println("onLostHand - handId: " + handId);
  handPathList.remove(handId);
}
 
// -----------------------------------------------------------------
// gesture events
 
void onCompletedGesture(SimpleOpenNI curContext,int gestureType, PVector pos)
{
  println("onCompletedGesture - gestureType: " + gestureType + ", pos: " + pos);
  
  int handId = context.startTrackingHand(pos);
  println("hand stracked: " + handId);
}
 
// -----------------------------------------------------------------
// Keyboard event
void keyPressed()
{
 
  switch(key)
  {
  case ' ':
    context.setMirror(!context.mirror());
    break;
  case '1':
    context.setMirror(true);
    break;
  case '2':
    context.setMirror(false);
    break;
  }
}

 

GroG

11 years ago

Nice video Ale !

I was really surprised at the speed, I thought the communication over the REST API would be slow. 

When the system pauses, is it because the kinect has lost track?

Excited to see you grab and move a block without touching anything - need a gripper though ...
Great Work !