HandDetection Project in java..

 

after i am try running in netbeans
 
run:
Starting OpenCV...
No devices found
BUILD SUCCESSFUL (total time: 1 second)
 
 
What should I do in order for this to work..??
Please help me Grog......
 
 
 

 

 

 FingerName.java

package Handdetector;

 
// FingerName.java
// Andrew Davison, ad@fivedots.coe.psu.ac.th, November 2012
 
/* Names of the fingers, ordered counter-clockwise for the left hand
 
*/
 
 
public enum FingerName {
   LITTLE, RING, MIDDLE, INDEX, THUMB, 
   UNKNOWN;
 
 
  public FingerName getNext() 
  {  
    int nextIdx = ordinal()+1;
    if (nextIdx == (values().length))
      nextIdx = 0;
    return values()[nextIdx];  
  }  // end of getNext()
 
 
  public FingerName getPrev() 
  {  
    int prevIdx = ordinal()-1;
    if (prevIdx < 0)
      prevIdx = values().length-1;
    return values()[prevIdx];  
  }  // end of getPrev()
 
}  // end of FingerName enum
 
 
 
HandDetector
 
package Handdetector;
 
// FingerName.java
// Andrew Davison, ad@fivedots.coe.psu.ac.th, November 2012
 
/* Names of the fingers, ordered counter-clockwise for the left hand
 
*/
 
 
public enum FingerName {
   LITTLE, RING, MIDDLE, INDEX, THUMB, 
   UNKNOWN;
 
 
  public FingerName getNext() 
  {  
    int nextIdx = ordinal()+1;
    if (nextIdx == (values().length))
      nextIdx = 0;
    return values()[nextIdx];  
  }  // end of getNext()
 
 
  public FingerName getPrev() 
  {  
    int prevIdx = ordinal()-1;
    if (prevIdx < 0)
      prevIdx = values().length-1;
    return values()[prevIdx];  
  }  // end of getPrev()
 
}  // end of FingerName enum
 
HandPanel.java
 
package Handdetector;
 
 
// HandPanel.java
// Andrew Davison, November 2012, ad@fivedots.psu.ac.th
 
/* This panel repeatedly uses JMFCapture to snap a picture and draw it onto
   the panel. OpenCV is used, via the HandDetector class, to detect
   the user's gloved hand and label the fingers.
 
*/
 
import java.awt.*;
import javax.swing.*;
import java.awt.image.*;
import java.text.DecimalFormat;
import java.io.*;
 
import com.googlecode.javacv.cpp.*;
import com.googlecode.javacpp.Loader;
import static com.googlecode.javacv.cpp.opencv_objdetect.*;
 
 
 
public class HandPanel extends JPanel implements Runnable
{
  private static final Dimension PANEL_SIZE = new Dimension(200, 50);  
          // dimensions of panel initially;  later set to video's frame size
 
  private static final int DELAY = 200;  // time (ms) between redraws of the panel
 
 
 
 
  private JFrame top;
  private BufferedImage image = null;  // current webcam snap
  private JMFCapture camera; 
  private volatile boolean isRunning;
  
  // used for the average ms snap time information
  private int imageCount = 0;
  private long totalTime = 0;
  private DecimalFormat df;
  private Font msgFont;
 
  private HandDetector detector = null;   // for detecting hand and fingers
 
 
  public HandPanel(JFrame top)
  {
    this.top = top;
    setBackground(Color.white);
    setPreferredSize(PANEL_SIZE);
 
    df = new DecimalFormat("0.#");  // 1 dp
    msgFont = new Font("SansSerif", Font.BOLD, 18);
 
    startOpenCV();
 
    new Thread(this).start();   // start updating the panel's image
  } // end of HandPanel()
 
 
  private void startOpenCV()
  {
    System.out.println("Starting OpenCV...");
    try {
      // preload the opencv_objdetect module to work around a known bug
      Loader.load(opencv_objdetect.class);
    }
    catch (Exception e)
    {  System.out.println(e);
       System.exit(0);
    }
  }  // end of startOpenCV()
 
 
 
  public void run()
  /* display the current webcam image every DELAY ms.
     Find the coloured rectangles in the image using HandDetector
     objects.
     The time statistics gathered here include the time taken to
     detect movement.
  */
  { initDisplay();
 
    BufferedImage im;
    long duration;
    isRunning = true;
    while (isRunning) {
   long startTime = System.currentTimeMillis();
 
      im = camera.getImage();  // take a snap
      if (im == null) {
        System.out.println("Problem loading image " + (imageCount+1));
        duration = System.currentTimeMillis() - startTime;
      }
      else {
        image = im;   // only update image if it contains something
        imageCount++;
 
        detector.update(im);
 
        duration = System.currentTimeMillis() - startTime;
        totalTime += duration;
        repaint();
      }
 
      if (duration < DELAY) {
        try {
          Thread.sleep(DELAY-duration);  // wait until DELAY time has passed
        } 
        catch (Exception ex) {}
      }
    }
 
    camera.close();    // close down the camera
  }  // end of run()
 
 
  private void initDisplay()
  /* initialize the camera, and use its first picture to initialize the
     panel size and the hand detector. */
  {
    camera = new JMFCapture();
 
    BufferedImage im = camera.getImage();
    if (im == null) {
      System.out.println("Could not grab webcam image");
      System.exit(1);
    }
 
    // update panel and window sizes to fit webcam's frame size
    int imWidth = im.getWidth();
    int imHeight = im.getHeight();
 
    setPreferredSize(new Dimension(imWidth, imHeight));
    top.pack();   // resize and center JFrame
    top.setLocationRelativeTo(null);
 
    detector = new HandDetector("gloveHSV.txt", imWidth, imHeight);
              // include the HSV color info about the user's gloved hand
  }  // end of initDisplay()
 
 
 
 
  public void paintComponent(Graphics g)
  /* Draw the image, the detected hand and finger info, and the 
     average ms snap time at the bottom left of the panel. 
  */
  { 
    super.paintComponent(g);
    Graphics2D g2d = (Graphics2D) g;
 
    if (image != null)
      g2d.drawImage(image, 0, 0, this);
 
    if (detector != null)
      detector.draw(g2d);    // draws detected hand and finger info
 
    writeStats(g2d);
  } // end of paintComponent()
 
 
 
 
 
  private void writeStats(Graphics2D g2d)
  /* write statistics in bottom-left corner, or
     "Loading" at start time */
  {
g2d.setColor(Color.BLUE);
    g2d.setFont(msgFont);
    int panelHeight = getHeight();
    if (imageCount > 0) {
      double avgGrabTime = (double) totalTime / imageCount;
   g2d.drawString("Pic " + imageCount + "  " +
                   df.format(avgGrabTime) + " ms", 
                   5, panelHeight-10);  // bottom left
    }
    else  // no image yet
   g2d.drawString("Loading...", 5, panelHeight-10);
  }  // end of writeStats()
 
 
 
  public void closeDown()
  /* Terminate run() and wait for the camera to be closed.
     This stops the application from exiting until everything
     has finished. */
  { 
    isRunning = false;
    while (!camera.isClosed()) {
      try {
        Thread.sleep(DELAY);
      } 
      catch (Exception ex) {}
    }
  } // end of closeDown()
 
 
} // end of HandPanel class
 
Handy.java
 
package Handdetector;
 
 
// Handy.java
// Andrew Davison, November 2012, ad@fivedots.psu.ac.th
 
/* Detect the user's gloved hand and fingers, drawing information
   on top of a webcam image.
 
   Usage:
   > run Handy
*/
 
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import java.io.*;
 
 
public class Handy extends JFrame 
{
  // GUI components
  private HandPanel handPanel;
 
 
  public Handy()
  {
    super("Hand Detector");
 
    Container c = getContentPane();
    c.setLayout( new BorderLayout() );   
 
    handPanel = new HandPanel(this); // the webcam pictures and drums appear here
    c.add( handPanel, BorderLayout.CENTER);
 
    addWindowListener( new WindowAdapter() {
      public void windowClosing(WindowEvent e)
      { handPanel.closeDown();    // stop snapping pics, and any drum playing
        System.exit(0);
      }
    });
 
    setResizable(false);
    pack();  
    setVisible(true);
  } // end of Handy()
 
 
  // -------------------------------------------------------
 
  public static void main( String args[] )
  {  new Handy();  }
 
} // end of Handy class
 
 
JMFCapture.java
 
package Handdetector;
 
import java.awt.*;
import java.awt.image.*;
import java.io.*;
import java.util.*;
import java.awt.event.*;
 
import javax.media.*;
import javax.media.control.*;
import javax.media.protocol.*;
import javax.media.format.*;
import javax.media.util.*;
 
 
 
public class JMFCapture implements ControllerListener
{
  // I obtained this information from JMF Registry, under its capture devices tab
  private static final String CAP_DEVICE = "vfw:Microsoft WDM Image Capture (Win32):0";
       // common name in WinXP
  // private static final String CAP_DEVICE = "vfw:Logitech USB Video Camera:0";
 
  private static final String CAP_LOCATOR = "vfw://0";
 
 
  // used while waiting for the BufferToImage object to be initialized
  private static final int MAX_TRIES = 7;
  private static final int TRY_PERIOD = 1000;   // ms
 
  private VideoFormat largestVf = null;    // video format with largest frame size
  private Dimension frameSize = null;
 
  private Player p = null;
  private FrameGrabbingControl fg;
  private BufferToImage bufferToImage = null;
  private boolean closedDevice;
 
 
  // used for waiting until the player has started
  private Object waitSync = new Object();
  private boolean stateTransitionOK = true;
 
 
  public JMFCapture()
  {
    closedDevice = true;   // since device is not available yet
 
    // link player to capture device
    try {
      MediaLocator ml = findMedia(CAP_DEVICE);
   p = Manager.createRealizedPlayer(ml);
      System.out.println("Created player");
    }
    catch (Exception e) {
      System.out.println("Failed to create player");
      System.exit(0);
    }
 
    setToLargestVideoFrame(p);
    p.addControllerListener(this);
 
    // create the frame grabber
    fg =  (FrameGrabbingControl) p.getControl("javax.media.control.FrameGrabbingControl");
    if (fg == null) {
      System.out.println("Frame grabber could not be created");
      System.exit(0);
    }
 
    // wait until the player has started
    System.out.println("Starting the player...");
    p.start();
    if (!waitForStart()) {
      System.err.println("Failed to start the player.");
      System.exit(0);
    }
 
    waitForBufferToImage();
  }  // end of JMFCapture()
 
 
 
  private MediaLocator findMedia(String requireDeviceName)
  // return a media locator for the specified capture device
  {
    Vector devices = CaptureDeviceManager.getDeviceList(null);
    if (devices == null) {
      System.out.println("Devices list is null");
      System.exit(0);
    }
    if (devices.size() == 0) {
      System.out.println("No devices found");
      System.exit(0);
    }
 
    CaptureDeviceInfo devInfo = null;
    int idx;
    for (idx = 0; idx < devices.size(); idx++) {
      devInfo = (CaptureDeviceInfo) devices.elementAt(idx);
      // System.out.println("  " + idx + ". " + devInfo );
      String devName = devInfo.getName();
      if (devName.equals(requireDeviceName))   // found device
        break;
    }
 
    MediaLocator ml = null;
    if (idx == devices.size()) {   // no device found with that name
      System.out.println("Device " + requireDeviceName + " not found");
      System.out.println("Using default media locator: " + CAP_LOCATOR);
      ml = new MediaLocator(CAP_LOCATOR);
    }
    else {   // found a suitable device
      System.out.println("Found device: " + requireDeviceName);
      storeLargestVf(devInfo);
      ml = devInfo.getLocator();   // this method may not work
    }
    return ml;
  }  // end of findMedia()
 
 
  private void storeLargestVf(CaptureDeviceInfo devInfo)
  // store largest frame size video format for this device in largestVf
  // and must be RGB (Nov 2011 change)
  {
    Format[] forms = devInfo.getFormats();
 
    largestVf = null;
    double maxSize = -1;
    for (int i=0; i < forms.length; i++) {
      // System.out.println("  " + i + ". " + forms[i]);
      if (forms[i] instanceof VideoFormat) {
        VideoFormat vf = (VideoFormat) forms[i];
        Dimension dim = vf.getSize();
        // System.out.println("    frame size: " + dim + "\n");
        String encoding = vf.getEncoding();
        // System.out.println("    encoding: " + encoding + "\n");
        double size = dim.getWidth() * dim.getHeight();
        if ((size > maxSize) && (encoding.compareToIgnoreCase("RGB") == 0)) {
          largestVf = vf;
          maxSize = size;
        }
      }
    }
 
    if (largestVf == null)
      System.out.println("No RGB video format found");
    else
      System.out.println("Largest RGB format: " + largestVf);
  }  // end of storeLargestVf()
 
 
  public Dimension getFrameSize()
  // return the player's video frame size
  {
    if (p == null)
      return null;
 
    FormatControl formatControl = 
         (FormatControl) p.getControl("javax.media.control.FormatControl"); 
    if (formatControl == null)
      return null;
 
    VideoFormat vf = (VideoFormat) formatControl.getFormat(); 
    if (vf == null)
      return null;
 
    return vf.getSize();
  }  // end of getFrameSize()
 
 
 
  private void setToLargestVideoFrame(Player player)
  // change the player's video format to the one with the largest frame size
  {
    FormatControl formatControl =
        (FormatControl) player.getControl("javax.media.control.FormatControl"); 
    if (formatControl == null) {
      System.out.println("No format controller found");
      return;
    }
 
    Format format = formatControl.setFormat(largestVf);
    if (format == null) {
      System.out.println("Could not change video format");
      return;
    }
    System.out.println("Video format changed to largest frame size");
  }  // end of setToLargestVideoFrame()
 
 
 
  private boolean waitForStart()
  // wait for the player to enter its Started state
  { synchronized (waitSync) {
      try {
        while (p.getState() != Controller.Started && stateTransitionOK)
          waitSync.wait();
      }
      catch (Exception e) {}
    }
    return stateTransitionOK;
  } // end of waitForStart()
 
 
  public void controllerUpdate(ControllerEvent evt)
  // respond to events
  {
    if (evt instanceof StartEvent) {   // the player has started
      synchronized (waitSync) {
        stateTransitionOK = true;
        waitSync.notifyAll();
      }
    }
    else if (evt instanceof ResourceUnavailableEvent) {  
      synchronized (waitSync) {  // there was a problem getting a player resource
        stateTransitionOK = false;
        waitSync.notifyAll();
      }
    }
  } // end of controllerUpdate()
 
 
 
  private void waitForBufferToImage()
  /* Wait for the BufferToImage object to be initialized.
     May take several seconds to initialize this object, 
     so this method makes up to MAX_TRIES attempts.
  */
  {
    int tryCount = MAX_TRIES;
    System.out.println("Initializing BufferToImage...");
    while (tryCount > 0) {
      if (hasBufferToImage())   // initialization succeeded
        break;
      try {   // initialization failed so wait a while and try again
        System.out.println("Waiting...");
   Thread.sleep(TRY_PERIOD);
      }
      catch (InterruptedException e)
      {  System.out.println(e);  }
      tryCount--;
    }
 
    if (tryCount == 0) {
      System.out.println("Giving Up");
      System.exit(0);
    }
 
    closedDevice = false;   // device now available
  }  // end of waitForBufferToImage()
 
 
  private boolean hasBufferToImage()
  /*  The BufferToImage object is initialized here, so that when 
      getImage() is called later, the snap can be quickly changed to 
      an image.
 
      The object is initialized by taking a snap, which
      may be an actual picture or be 'empty'.
 
      An 'empty' snap is a Buffer object with no video information,
      as detected by examining its component VideoFormat data. 
 
      An 'empty' snap is caused by the delay in the player, which 
      although in its started state may still take several seconds to 
      start capturing.
 
      The dimensions of the snap are used to calculate the scale
      factor from the original image size to size*size.
  */
  {
    Buffer buf = fg.grabFrame();     // take a snap
    if (buf == null) {
      System.out.println("No grabbed frame");
      return false;
    }
    
    // there is a buffer, but check if it's empty or not
    VideoFormat vf = (VideoFormat) buf.getFormat();
    if (vf == null) {
      System.out.println("No video format");
      return false;
    }
 
    System.out.println("Video format: " + vf);
    // initialize bufferToImage with the video format info.
    bufferToImage = new BufferToImage(vf);
    return true;
  }  // end of hasBufferToImage()
 
 
  public int getFrameRate()
  {  return 30; }
 
 
  synchronized public BufferedImage getImage()
  /* Capture an image/frame.
     The frame is converted from Buffer object to Image,
     and finally to BufferedImage. 
  */
  {
    if (closedDevice)
      return null;
 
    // grab the current frame as a buffer object
    Buffer buf = fg.grabFrame();
    if (buf == null) {
      System.out.println("No grabbed buffer");
      return null;
    }
    
    // convert buffer to image
    Image im = bufferToImage.createImage(buf);
    if (im == null) {
      System.out.println("No grabbed image");
      return null;
    }
 
    return (BufferedImage) im;
  }  // end of getImage()
 
 
  synchronized public void close()
  /* close() and getImage() are synchronized so that it's not
     possible to close down the player while a frame is being
     snapped. */
  {  p.close();  
     closedDevice = true;
  } 
 
  public boolean isClosed()
  {  return closedDevice;  }
 
 
 
} // end of JMFCapture class
 
 
 
 
 
 
GroG's picture

Start debug in netbeans - I

Start debug in netbeans - I can see that this uses JavaCV.
I assume you have that as a referenced library, otherwise it would not compile.  Additionally, you need the correct version of OpenCV on your system and available through the PATH variable or you need to supply JVM arguments -Djava.library.path= <location of opencv dlls>

GroG's picture

Is your camera plugged in?   

Is your camera plugged in?   

Dedy_hidayat's picture

yes my camera always

yes my camera always on..
but, no device if i try run in netbeans