Final Project Documentation (yay!)

For my final, I made a 3d illusion that rotates a camera based on the position of your head using face detection. I used an OpenCV  library for processing (https://github.com/atduskgreg/opencv-processing) and sent the position and size of the closest face detected to Unity.

Here’s how it came out:

Here’s the Processing code (which I commented for once):

import oscP5.*;
import netP5.*;
import processing.serial.*;
import gab.opencv.*;
import processing.video.*;
import java.awt.*;

Capture video;
OpenCV opencv;

OscP5 oscP5;
NetAddress myBroadcastLocation;

//my variables
int Index;

float xpos;
float ypos;
float zpos;

float xprev;
float yprev;
float zprev;

float ease = 0.5;

float xaxis = 0;
float yaxis = 0;
float zaxis = 0;

float[] axisArr = new float[3];

float countdown = 255;

//use 30 or 60
int myfps = 60;

//use 1 for 640px by 480px
//use 2 for 320px by 240px
int res = 2;

void setup() {
////
size(640/res, 480/res);
frameRate(myfps);
video = new Capture(this, 640/(2*res), 480/(2*res), myfps);
opencv = new OpenCV(this, 640/(2*res), 480/(2*res));
opencv.loadCascade(OpenCV.CASCADE_FRONTALFACE);

video.start();

//init variables
xpos = width/4;
ypos = height/4;
zpos = 0;
xprev = xpos;
yprev = ypos;
zprev = zpos;

//start oscP5, listening for incoming messages at port 12000
oscP5 = new OscP5(this,12000);
//broadcasting on port 8000
myBroadcastLocation = new NetAddress(“127.0.0.1”,8000);

}

//change fps and/or res vars, then call resetup in draw
//lets user pick between lower cpu usage or higher quality tracking
void resetup() {
video.stop();

//resize window
frame.setResizable(true);
frame.setSize(640/res, 480/res);
frame.setResizable(false);
frameRate(myfps);
video = new Capture(this, 640/(2*res), 480/(2*res), myfps);
opencv = new OpenCV(this, 640/(2*res), 480/(2*res));
opencv.loadCascade(OpenCV.CASCADE_FRONTALFACE);

video.start();

//re-init variables
xpos = width/4;
ypos = height/4;
zpos = 0;
xprev = xpos;
yprev = ypos;
zprev = zpos;
}

void draw() {
////
background(200);
scale(2);
opencv.loadImage(video);

//draw camera feed
image(video, 0, 0);

//for testing – green outline
/*
noFill();
stroke(0, 255, 0);
strokeWeight(3);
*/

Rectangle[] faces = opencv.detect();
println(faces.length);

//clears saved closest face index
Index = 0;

//loop through array faces[]
for (int i = 0; i < faces.length; i++) {

//for testing – draws rectangles around faces
/*
//println(faces[i].x + “,” + faces[i].y);
//rect(faces[i].x, faces[i].y, faces[i].width, faces[i].height);
*/

//save the index of the closest face to the screen in array faces[]
if(faces[i].width*faces[i].height > faces[Index].width*faces[Index].height) {
Index = i;
}
}

//save closest face’s position in the screen space
if (faces.length>=1) {
xpos = faces[Index].x + (faces[Index].width/2);
ypos = faces[Index].y + (faces[Index].height/2);
zpos = faces[Index].height;
}

//convert the positions to ratios
xaxis = ((xpos/(width/2))-0.5)*2;
yaxis = ((ypos/(height/2))-0.5)*2;
zaxis = ((zpos/(height/2))-0.5)*2;;

//save position ratios in an array to send to Unity
axisArr[0] = xaxis;
axisArr[1] = yaxis;
axisArr[2] = zaxis;

//ease the raw positions
xpos = xprev+((xpos-xprev)*ease);
ypos = yprev+((ypos-yprev)*ease);
zpos = zprev+((zpos-zprev)*(ease/2));
//*depth is eased twice as much because it’s extra jittery

//draw marker overlay
noStroke();
fill(255,100);
rect(0,0,width, height);
ellipse(xpos, ypos, zpos, zpos);
ellipse(xpos, ypos, zpos*1.35, zpos*1.35);

println(axisArr);

//save face position of last frame
xprev = xpos;
yprev = ypos;
zprev = zpos;

//fade in screen from white
fill(255,countdown);
if (countdown>0){
countdown-=5;
}
rect(0,0,width, height);
////

/*Sends axisArr to Unity
UDPHost adress 127.0.0.1 port 8000*/
sendAxis();
}

void sendAxis() {
OscMessage myOscMessage = new OscMessage(“/axisTest”);
myOscMessage.add(axisArr);
oscP5.send(myOscMessage, myBroadcastLocation);

}

void captureEvent(Capture c) {
c.read();
}

 

void oscEvent(OscMessage theOscMessage) {
/* get and print the address pattern and the typetag of the received OscMessage */
println(“### received an osc message with addrpattern “+theOscMessage.addrPattern()+” and typetag “+theOscMessage.typetag());
theOscMessage.print();
}

The code in unity just places an object in 3d space relative to the camera at the head’s x and y positions, using size as the z axis. It communicates with an OSC socket connection. The camera is then set to always face opposite the object representing the player’s head (which is positioned behind the camera).

Leave a comment

Filed under Uncategorized

Leave a comment