Mouse Control with Kinect

video
video Info

  • 
    // Control Mouse with Kinect
    // processing.org, openkinect.org
    // DekWilde simple implementation with Robot() class
    // www.dekwilde.tk
    // Daniel Shiffman
    // Tracking the average location beyond a given depth threshold
    // Thanks to Dan O'Sullivan
    // http://www.shiffman.net
    // https://github.com/shiffman/libfreenect/tree/master/wrappers/java/processing
    
    import java.awt.AWTException;
    import java.awt.Robot;
    
    import org.openkinect.*;
    import org.openkinect.processing.*;
    
    // Showing how we can farm all the kinect stuff out to a separate class
    KinectTracker tracker;
    // Kinect Library object
    Kinect kinect;
    
    float deg = 15; // Start at 15 degrees
    
    Robot robby;
    int xx = 0, yy = 0;
    int stageWidth = 1280;
    int stageHeight = 800;
    int prW, prH;
    int stageScale = 5/3;
    
    void setup() {
      size(640,400);
    
      prW = stageWidth/width;
      prH = stageHeight/height;
    
      kinect = new Kinect(this);
      tracker = new KinectTracker();
      kinect.tilt(deg);
    
      try
      {
        robby = new Robot();
      }
      catch (AWTException e)
      {
        println("Robot class not supported by your system!");
        exit();
      }
    }
    
    void draw() {
      background(255);
    
      // Run the tracking analysis
      tracker.track();
      // Show the image
      tracker.display();
    
      // Let's draw the raw location
      PVector v1 = tracker.getPos();
      fill(50,100,250,200);
      noStroke();
      ellipse(v1.x,v1.y,20,20);
    
      // Let's draw the "lerped" location
      PVector v2 = tracker.getLerpedPos();
      fill(100,250,50,200);
      noStroke();
      ellipse(v2.x,v2.y,20,20);  
    
      xx = ((int(v2.x)*stageScale) * prW);
      yy = ((int(v2.y)*stageScale) * prH);    
      robby.mouseMove(xx, yy);
    
      // Display some info
      int t = tracker.getThreshold();
      fill(0);
      text("threshold: " + t + "    " +  "framerate: " + (int)frameRate + "    " + "UP increase threshold, DOWN decrease threshold",10,500);
    }
    
    void keyPressed() {
      int t = tracker.getThreshold();
      if (key == CODED) {
        if (keyCode == UP) {
          t+=5;
          tracker.setThreshold(t);
        } 
        else if (keyCode == DOWN) {
          t-=5;
          tracker.setThreshold(t);
        }
        if (keyCode == LEFT) {
          deg++;
        } 
        else if (keyCode == RIGHT) {
          deg--;
        }
        deg = constrain(deg,0,30);
        kinect.tilt(deg);
      }
    }
    
    void stop() {
      tracker.quit();
      super.stop();
    }
    
    
    
    class KinectTracker {
    
      // Size of kinect image
      int kw = 640;
      int kh = 480;
      int threshold = 745;
    
      // Raw location
      PVector loc;
    
      // Interpolated location
      PVector lerpedLoc;
    
      // Depth data
      int[] depth;
    
      PImage display;
    
      KinectTracker() {
        kinect.start();
        kinect.enableDepth(true);
    
        // We could skip processing the grayscale image for efficiency
        // but this example is just demonstrating everything
        kinect.processDepthImage(true);
    
        display = createImage(kw,kh,PConstants.RGB);
    
        loc = new PVector(0,0);
        lerpedLoc = new PVector(0,0);
      }
    
      void track() {
    
        // Get the raw depth as array of integers
        depth = kinect.getRawDepth();
    
        // Being overly cautious here
        if (depth == null) return;
    
        float sumX = 0;
        float sumY = 0;
        float count = 0;
    
        for(int x = 0; x < kw; x++) {
          for(int y = 0; y < kh; y++) {
            // Mirroring the image
            int offset = kw-x-1+y*kw;
            // Grabbing the raw depth
            int rawDepth = depth[offset];
    
            // Testing against threshold
            if (rawDepth < threshold) {
              sumX += x;
              sumY += y;
              count++;
            }
          }
        }
        // As long as we found something
        if (count != 0) {
          loc = new PVector(sumX/count,sumY/count);
        }
    
        // Interpolating the location, doing it arbitrarily for now
        lerpedLoc.x = PApplet.lerp(lerpedLoc.x, loc.x, 0.3f);
        lerpedLoc.y = PApplet.lerp(lerpedLoc.y, loc.y, 0.3f);
      }
    
      PVector getLerpedPos() {
        return lerpedLoc;
      }
    
      PVector getPos() {
        return loc;
      }
    
      void display() {
        PImage img = kinect.getDepthImage();
    
        // Being overly cautious here
        if (depth == null || img == null) return;
    
        // Going to rewrite the depth image to show which pixels are in threshold
        // A lot of this is redundant, but this is just for demonstration purposes
        display.loadPixels();
        for(int x = 0; x < kw; x++) {
          for(int y = 0; y < kh; y++) {
            // mirroring image
            int offset = kw-x-1+y*kw;
            // Raw depth
            int rawDepth = depth[offset];
    
            int pix = x+y*display.width;
            if (rawDepth < threshold) {
              // A red color instead
              display.pixels[pix] = color(150,50,50);
            } 
            else {
              display.pixels[pix] = img.pixels[offset];
            }
          }
        }
        display.updatePixels();
    
        // Draw the image
        image(display,0,0);
      }
    
      void quit() {
        kinect.quit();
      }
    
      int getThreshold() {
        return threshold;
      }
    
      void setThreshold(int t) {
        threshold =  t;
      }
    }