Skip to content

Instantly share code, notes, and snippets.

@dingledow
Created April 25, 2013 10:07
Show Gist options
  • Save dingledow/5458771 to your computer and use it in GitHub Desktop.
Save dingledow/5458771 to your computer and use it in GitHub Desktop.
Allows Kinect to track body movements to MIDI. View demo here: https://vimeo.com/64740273
Serial port; // The serial port
void portSetup(){
println(Serial.list()); // List COM-ports
//select second com-port from the list
port = new Serial(this, Serial.list()[1], 19200);
}
void portUpdate(float Pos){
int posx = (int)map(Pos,500,800,0,100);
port.write(posx);
}
MidiBus myBus; // The MidiBus
void MidiSetup(){
myBus.list();
myBus = new MidiBus(this, -1, 0); //check out input and output
}
void SendMidi(){
if(MidiHand.value()==1){
int mhandX = (int)map(handX.value(),-360,360,0,127);
myBus.sendControllerChange(1,(int) hx.value(),mhandX); // Send a controllerChange
int mhandY = (int)map(handY.value(),-300,300,0,127);
myBus.sendControllerChange(1, (int)hy.value(),mhandY); // Send a controllerChange
int mhandZ = (int)map(handZ.value(),498,1500,0,127);
myBus.sendControllerChange(1, (int)hz.value(),mhandZ); // Send a controllerChange
}
}
OpenCV opencv;
// contrast/brightness values
int contrast_value = 0;
int brightness_value = 0;
void OpenCVsetup(){
opencv = new OpenCV( this );
}
void OpenCVdraw() {
opencv.allocate(rgbImage.width,rgbImage.height);
opencv.cascade( OpenCV.CASCADE_FRONTALFACE_ALT );
opencv.copy(rgbImage);
opencv.convert( GRAY );
opencv.contrast( contrast_value );
opencv.brightness( brightness_value );
// proceed detection
Rectangle[] faces = opencv.detect( 1.2, 2, OpenCV.HAAR_DO_CANNY_PRUNING, 40, 40 );
image(opencv.image(),50,420,rgbImage.width/3,rgbImage.height/3);
pushStyle();
noFill();
strokeWeight(3);
stroke(255,0,0);
for( int i=0; i<faces.length; i++ ) {
float X = map(faces[i].x,0,rgbImage.width,50,50+rgbImage.width/3);
float Y = map(faces[i].y,0,rgbImage.height,420,420+rgbImage.height/3);
rect( X,Y, faces[i].width/3, faces[i].height/3);
}
popStyle();
}
void OpenCVstop(){
opencv.stop();
super.stop();
}
/* --------------------------------------------------------------------------
* SimpleOpenNI UserScene3d Test
* --------------------------------------------------------------------------
* Processing Wrapper for the OpenNI/Kinect library
* http://code.google.com/p/simple-openni
* --------------------------------------------------------------------------
* prog: Max Rheiner / Interaction Design / zhkd / http://iad.zhdk.ch/
* date: 02/16/2011 (m/d/y)
* ----------------------------------------------------------------------------
* this demos is at the moment only for 1 user, will be implemented later
* ----------------------------------------------------------------------------
*/
SimpleOpenNI context;
float zoomF =0.5f;
float rotX = radians(180); // by default rotate the hole scene 180deg around the x-axis,
// the data from openni comes upside down
float rotY = radians(0);
color[] userColors = { color(255,0,0), color(0,255,0), color(0,0,255), color(255,255,0), color(255,0,255), color(0,255,255) };
color[] userCoMColors = { color(255,100,100), color(100,255,100), color(100,100,255), color(255,255,100), color(255,100,255), color(100,255,255) };
UserManager userManager;
float testZ[]; // revised!
float testX[]; // revised!
float testY[]; // revised!
int transX;
int transY;
boolean Mir = true;
int CompareDistance=5000;
PImage rgbImage;
void OpenNIsetup(){
context = new SimpleOpenNI(this);
// disable mirror
context.setMirror(Mir);
// enable depthMap generation
if(context.enableDepth() == false)
{
println("Can't open the depthMap, maybe the camera is not connected!");
exit();
return;
}
if(context.enableRGB() == false)
{
println("Can't open the rgbMap, maybe the camera is not connected or there is no rgbSensor!");
exit();
return;
}
userManager = new UserManager(context);
// enable skeleton generation for all joints
context.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL,userManager);
context.alternativeViewPointDepthToImage();
// enable the scene, to get the floor
context.enableScene();
// revised for enabling RGB color
context.enableRGB();
// enable hands + gesture generation
context.enableGesture();
context.enableHands();
// add focus gestures / here i do have some problems on the mac, i only recognize raiseHand ? Maybe cpu performance ?
context.addGesture("Wave");
context.addGesture("Click");
context.addGesture("RaiseHand");
stroke(255,255,255);
smooth();
//perspective(radians(45), float(width)/float(height), 10,150000);
transX = width/2;
transY = height/2;
}
void OpenNIdraw(){
// update the cam
context.update();
context.setMirror(Mir);
pushMatrix();
// set the scene pos
translate(transX, transY, 0);
rotateX(rotX);
rotateY(rotY);
scale(zoomF);
rgbImage = context.rgbImage(); // revised for colored pointcloud
color pixelColor; // revised for colored pointcloud
int[] depthMap = context.depthMap();
int steps = 3; // to speed up the drawing, draw every third point
int index;
PVector realWorldPoint=new PVector();
PVector realWorldPoint2=new PVector();
translate(0,0,-1000); // set the rotation center of the scene 1000 infront of the camera
int userCount = context.getNumberOfUsers();
int[] userMap = null;
if(userCount > 0)
{
userMap = context.getUsersPixels(SimpleOpenNI.USERS_ALL);
testZ = new float[userCount]; // revised!
testX = new float[userCount]; // revised!
testY = new float[userCount]; // revised!
}
for(int y=0;y < context.depthHeight();y+=steps)
{
for(int x=0;x < context.depthWidth();x+=steps)
{
index = x + y * context.depthWidth();
if(depthMap[index] > 0)
{
if(context.depthMapRealWorld()[index].z<CompareDistance){
realWorldPoint= context.depthMapRealWorld()[index];
realWorldPoint2= context.depthMapRealWorld()[index+steps];
}
// get the realworld points
// check if there is a user
if(userMap != null && userMap[index] != 0)
{ // calc the user color
if(drawing.value()==0)
{
int colorIndex = userMap[index] % userColors.length;
stroke(userColors[colorIndex]);
}
else{
pixelColor = rgbImage.pixels[index];
pushStyle();
noStroke();
float grd = map(index,0,context.depthWidth()+context.depthWidth()*context.depthHeight(),255,0);
fill(pixelColor,grd);
pushMatrix();
translate(realWorldPoint.x,realWorldPoint.y,realWorldPoint.z);
ellipse(0,0,10,10);
popMatrix();
popStyle();
}
}
else{
// default color
stroke(50);
}
if(defaultDrawing.value()==0)
{
point(realWorldPoint.x,realWorldPoint.y,realWorldPoint.z);
}
else{
if(dist(realWorldPoint2.x,realWorldPoint2.y,realWorldPoint2.z,
realWorldPoint.x,realWorldPoint.y,realWorldPoint.z)<300){
beginShape(TRIANGLE_STRIP);
fill(0);
vertex(realWorldPoint.x,realWorldPoint.y,realWorldPoint.z);
vertex(realWorldPoint2.x,realWorldPoint2.y,realWorldPoint2.z);
endShape();
}
}
}
}
}
if(TrackCenter.value()==1)
{
// draw the center of mass
PVector pos = new PVector();
pushStyle();
strokeWeight(15);
for(int userId=1;userId <= userCount;userId++)
{
context.getCoM(userId,pos);
stroke(userCoMColors[userId % userCoMColors.length]);
point(pos.x,pos.y,pos.z);
testX[userId-1] = pos.x; // revised!
testY[userId-1] = pos.y; // revised!
testZ[userId-1] = pos.z; // revised!
}
popStyle();
}
if(TrackSkel.value()==1)
{
for(int userId=1;userId <= userCount;userId++)
{
if(context.isTrackingSkeleton(userId)) drawSkeleton(userId);
}
}
//if(userCount>0) println(testZ[0]); // revised!
if(CamFrustum.value()==1) context.drawCamFrustum();
// Making toggle for handTrack
if(TrackHand.getState()) HandTrack();
popMatrix();
}
boolean handsTrackFlag = false;
PVector handVec = new PVector();
ArrayList handVecList = new ArrayList();
int handVecListSize = 30;
String lastGesture = "";
void HandTrack(){
// draw the tracked hand
if(handsTrackFlag)
{
pushStyle();
stroke(255,0,0,200);
noFill();
Iterator itr = handVecList.iterator();
beginShape();
while( itr.hasNext() )
{
PVector p = (PVector) itr.next();
vertex(p.x,p.y,p.z);
}
endShape();
stroke(255,0,0);
strokeWeight(14);
point(handVec.x,handVec.y,handVec.z);
handX.setValue(handVec.x);
handY.setValue(handVec.y);
handZ.setValue(handVec.z);
println("handPoint"+":"+ handVec.z);
portUpdate(handVec.z);
popStyle();
}else{
portUpdate(500);
}
}
// -----------------------------------------------------------------
// hand events
void onCreateHands(int handId,PVector pos,float time)
{
println("onCreateHands - handId: " + handId + ", pos: " + pos + ", time:" + time);
handsTrackFlag = true;
handVec = pos;
handVecList.clear();
handVecList.add(pos);
}
void onUpdateHands(int handId,PVector pos,float time)
{
//println("onUpdateHandsCb - handId: " + handId + ", pos: " + pos + ", time:" + time);
handVec = pos;
handVecList.add(0,pos);
if(handVecList.size() >= handVecListSize)
{ // remove the last point
handVecList.remove(handVecList.size()-1);
}
}
void onDestroyHands(int handId,float time)
{
println("onDestroyHandsCb - handId: " + handId + ", time:" + time);
handsTrackFlag = false;
context.addGesture(lastGesture);
}
// -----------------------------------------------------------------
// gesture events
void onRecognizeGesture(String strGesture, PVector idPosition, PVector endPosition)
{
println("onRecognizeGesture - strGesture: " + strGesture + ", idPosition: " + idPosition + ", endPosition:" + endPosition);
lastGesture = strGesture;
context.removeGesture(strGesture);
context.startTrackingHands(endPosition);
}
// -----------------------------------------------------------------
// SimpleOpenNI user events
void onNewUser(int userId)
{
println("onNewUser - userId: " + userId);
}
void onLostUser(int userId)
{
println("onLostUser - userId: " + userId);
}
// -------------------------------------------------------------------
// draw the skeleton with the selected joints
void drawSkeleton(int userId)
{
strokeWeight(3);
// to get the 3d joint data
drawLimb(userId, SimpleOpenNI.SKEL_HEAD, SimpleOpenNI.SKEL_NECK);
drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_LEFT_SHOULDER);
drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_LEFT_ELBOW);
drawLimb(userId, SimpleOpenNI.SKEL_LEFT_ELBOW, SimpleOpenNI.SKEL_LEFT_HAND);
drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_RIGHT_SHOULDER);
drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_RIGHT_ELBOW);
drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW, SimpleOpenNI.SKEL_RIGHT_HAND);
drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_LEFT_HIP);
drawLimb(userId, SimpleOpenNI.SKEL_LEFT_HIP, SimpleOpenNI.SKEL_LEFT_KNEE);
drawLimb(userId, SimpleOpenNI.SKEL_LEFT_KNEE, SimpleOpenNI.SKEL_LEFT_FOOT);
drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_RIGHT_HIP);
drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_RIGHT_KNEE);
drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_KNEE, SimpleOpenNI.SKEL_RIGHT_FOOT);
strokeWeight(1);
}
void drawLimb(int userId, int jointType1, int jointType2)
{
PVector jointPos1 = new PVector();
PVector jointPos2 = new PVector();
float confidence;
// draw the joint position
confidence = context.getJointPositionSkeleton(userId, jointType1, jointPos1);
confidence = context.getJointPositionSkeleton(userId, jointType2, jointPos2);
stroke(255, 0, 0, confidence * 200 + 55);
line(jointPos1.x, jointPos1.y, jointPos1.z,
jointPos2.x, jointPos2.y, jointPos2.z);
drawJointOrientation(userId, jointType1, jointPos1, 50);
}
void drawJointOrientation(int userId, int jointType, PVector pos, float length)
{
// draw the joint orientation
PMatrix3D orientation = new PMatrix3D();
float confidence = context.getJointOrientationSkeleton(userId, jointType, orientation);
if (confidence < 0.001f)
// nothing to draw, orientation data is useless
return;
pushMatrix();
translate(pos.x, pos.y, pos.z);
// set the local coordsys
applyMatrix(orientation);
// coordsys lines are 100mm long
// x - r
stroke(255, 0, 0, confidence * 200 + 55);
line(0, 0, 0,
length, 0, 0);
// y - g
stroke(0, 255, 0, confidence * 200 + 55);
line(0, 0, 0,
0, length, 0);
// z - b
stroke(0, 0, 255, confidence * 200 + 55);
line(0, 0, 0,
0, 0, length);
popMatrix();
}
ControlP5 controlP5;
PMatrix3D currCameraMatrix;
PGraphics3D g3;
Toggle GUI;
Toggle Mirror;
Toggle CamFrustum;
Toggle TrackSkel;
Toggle TrackHand;
Toggle TrackCenter;
RadioButton drawing;
RadioButton defaultDrawing;
Slider Zoom;
Slider RotateX;
Slider RotateY;
Slider TransX;
Slider TransY;
Slider CDistance;
Button resetPosition;
Toggle FaceDetection;
Slider handX,handY,handZ;
Toggle MidiHand;
Numberbox hx,hy,hz;
void UIsetup(){
g3 = (PGraphics3D)g;
controlP5 = new ControlP5(this);
controlP5.setColorForeground(color(255,0,0,170));
controlP5.setColorActive(color(255,0,0,170));
controlP5.setColorBackground(color(100,170));
controlP5.setColorLabel(color(255,0,0,170));
controlP5.setColorValue(color(255,0,0,170));
GUI = controlP5.addToggle("GUI",true,0,0,15,15);
Mirror = controlP5.addToggle("Mirror",true,50,30,15,15);
CamFrustum = controlP5.addToggle("CamFrustum",false,50,70,15,15);
TrackSkel = controlP5.addToggle("Track Skeleton",false,50,110,15,15);
TrackCenter = controlP5.addToggle("Track Center",true,50,150,15,15);
TrackHand = controlP5.addToggle("Track Hand",false,120,150,15,15);
drawing = controlP5.addRadioButton("drawing",50,190);
drawing.setColorForeground(color(120));
drawing.setColorLabel(color(255,0,0,170));
drawing.setItemsPerRow(5);
drawing.setSpacingColumn(70);
addToRadioButton(drawing,"User color",0);
addToRadioButton(drawing,"RGB color",1);
drawing.activate(0);
// drawing.setColorActive(color(255,0,0,170));
defaultDrawing = controlP5.addRadioButton("defaultDrawing",50,210);
defaultDrawing.setColorForeground(color(120));
defaultDrawing.setColorLabel(color(255,0,0,170));
defaultDrawing.setItemsPerRow(5);
defaultDrawing.setSpacingColumn(70);
addToRadioButton(defaultDrawing,"point cloud",0);
addToRadioButton(defaultDrawing,"line",1);
defaultDrawing.activate(0);
Zoom = controlP5.addSlider("Zoom",0.f,5.f,0.5f,50,240,10,100);
RotateX = controlP5.addSlider("Rotate X",-360,360,180,100,240,10,100);
RotateY = controlP5.addSlider("Rotate Y",-360,360,0,150,240,10,100);
TransX = controlP5.addSlider("Translate X",0,width,width/2,200,240,10,100);
TransY = controlP5.addSlider("Translate Y",0,height,height/2,260,240,10,100);
CDistance = controlP5.addSlider("Kinect Distance",500,5000,2000,320,240,10,100);
resetPosition = controlP5.addButton("reset",1,50,360,80,15);
resetPosition.setLabel("reset position");
FaceDetection = controlP5.addToggle("Face Detection",false,50,380,15,15);
handX = controlP5.addSlider("hand X",-360,360,0,450,40,10,100);
handY = controlP5.addSlider("habd Y",-300,300,0,520,40,10,100);
handZ = controlP5.addSlider("hand Z",498,1500,0,590,40,10,100);
MidiHand = controlP5.addToggle("Midi Hand Data",false,450,155,15,15);
hx = controlP5.addNumberbox("MidiNumber(X)",10,450,190,30,14);
hy = controlP5.addNumberbox("MidiNumber(y)",12,520,190,30,14);
hz = controlP5.addNumberbox("MidiNumber(z)",13,590,190,30,14);
hx.setMax(127); hy.setMax(127); hz.setMax(127);
hx.setMin(1); hy.setMin(1); hz.setMin(1);
controlP5.setAutoDraw(false);
}
void addToRadioButton(RadioButton theRadioButton, String theName, int theValue ) {
RadioButton t = theRadioButton.addItem(theName,theValue);
t.captionLabel().style().movePadding(2,0,-1,2);
t.captionLabel().style().moveMargin(-2,0,0,-3);
t.captionLabel().style().backgroundWidth = 56;
}
void UIdraw(){
currCameraMatrix = new PMatrix3D(g3.camera);
camera();
pushStyle();
popStyle();
controlP5.draw();
if(GUI.value()==1){
pushStyle();
fill(100,70);
rect(30,20,400,600);
popStyle();
pushStyle();
fill(100,70);
rect(440,20,400,220);
fill(20);
text("Hand Data",440,30);
popStyle();
Mirror.show();
CamFrustum.show();
TrackSkel.show();
TrackHand.show();
TrackCenter.show();
drawing.show();
defaultDrawing.show();
Zoom.show();
RotateX.show();
RotateY.show();
TransX.show();
TransY.show();
CDistance.show();
resetPosition.show();
FaceDetection.show();
if(FaceDetection.value()==1) OpenCVdraw();
handX.show(); handY.show();handZ.show();
MidiHand.show();
if(MidiHand.value()==1) { hx.show(); hy.show(); hz.show(); }
else{hx.hide(); hy.hide(); hz.hide();}
}else{
Mirror.hide();
CamFrustum.hide();
TrackSkel.hide();
TrackHand.hide();
TrackCenter.hide();
drawing.hide();
defaultDrawing.hide();
Zoom.hide();
RotateX.hide();
RotateY.hide();
TransX.hide();
TransY.hide();
CDistance.hide();
resetPosition.hide();
FaceDetection.hide();
handX.hide(); handY.hide();handZ.hide();
MidiHand.hide();
hx.hide(); hy.hide(); hz.hide();
}
g3.camera = currCameraMatrix;
Mir = Mirror.getState();
zoomF = Zoom.value();
rotX = radians(RotateX.value());
rotY = radians(RotateY.value());
transX = (int)TransX.value();
transY = (int)TransY.value();
CompareDistance = (int)CDistance.value();
}
public void reset(int theValue) {
Zoom.setValue(0.5f);
RotateX.setValue(180);
RotateY.setValue(0);
TransX.setValue(width/2);
TransY.setValue(height/2);
CDistance.setValue(5000);
}
public class UserManager
{
protected SimpleOpenNI _context;
public UserManager(SimpleOpenNI context)
{
_context = context;
}
public void onNewUser(int userId)
{
println("onNewUser - userId: " + userId);
println(" start pose detection");
_context.startPoseDetection("Psi", userId);
}
public void onLostUser(int userId)
{
println("onLostUser - userId: " + userId);
}
public void onStartCalibration(int userId)
{
println("onStartCalibration - userId: " + userId);
}
public void onEndCalibration(int userId, boolean successfull)
{
println("onEndCalibration - userId: " + userId + ", successfull: " + successfull);
if (successfull)
{
println(" User calibrated !!!");
_context.startTrackingSkeleton(userId);
}
else
{
println(" Failed to calibrate user !!!");
println(" Start pose detection");
_context.startPoseDetection("Psi", userId);
}
}
public void onStartPose(String pose, int userId)
{
println("onStartdPose - userId: " + userId + ", pose: " + pose);
println(" stop pose detection");
_context.stopPoseDetection(userId);
_context.requestCalibrationSkeleton(userId, true);
}
public void onEndPose(String pose, int userId)
{
println("onEndPose - userId: " + userId + ", pose: " + pose);
}
}
import processing.opengl.*;
import processing.serial.*;
import themidibus.*; //Import the library
import hypermedia.video.*;
import java.awt.Rectangle;
import SimpleOpenNI.*;
import controlP5.*;
void setup()
{
size(1024,768,OPENGL); // strange, get drawing error in the cameraFrustum if i use P3D, in opengl there is no problem
OpenNIsetup();
OpenCVsetup();
UIsetup();
portSetup();
MidiSetup();
}
void draw()
{
background(255);
OpenNIdraw();
UIdraw();
SendMidi();
}
public void stop() {
OpenCVstop();
}
// Keyboard events
void keyPressed()
{
switch(key)
{
case ' ':
context.setMirror(!context.mirror());
break;
}
switch(keyCode)
{
case LEFT:
rotY += 0.1f;
break;
case RIGHT:
// zoom out
rotY -= 0.1f;
break;
case UP:
if(keyEvent.isShiftDown())
zoomF += 0.01f;
else
rotX += 0.1f;
break;
case DOWN:
if(keyEvent.isShiftDown())
{
zoomF -= 0.01f;
if(zoomF < 0.01)
zoomF = 0.01;
}
else
rotX -= 0.1f;
break;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment