-
-
Save alskipp/1501766 to your computer and use it in GitHub Desktop.
macruby video Santa-fy (with support for multiple Santas)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
framework 'Cocoa' | |
framework 'avfoundation' | |
class Santa | |
t_url = NSURL.URLWithString("http://dl.dropbox.com/u/349788/mustache.png") | |
t_source = CGImageSourceCreateWithURL t_url, nil | |
@@tache = CGImageSourceCreateImageAtIndex t_source, 0, nil | |
g_url = NSURL.URLWithString("http://dl.dropbox.com/u/349788/glasses.png") | |
g_source = CGImageSourceCreateWithURL g_url, nil | |
@@glasses = CGImageSourceCreateImageAtIndex g_source, 0, nil | |
h_url = NSURL.URLWithString("http://dl.dropbox.com/u/349788/hat.png") | |
h_source = CGImageSourceCreateWithURL h_url, nil | |
@@hat = CGImageSourceCreateImageAtIndex h_source, 0, nil | |
def initialize(layer) | |
@bounds = CGRectZero | |
Dispatch::Queue.main.async do | |
@face_layer = layer | |
add_feature_layer(:@tache, @@tache) | |
add_feature_layer(:@glasses, @@glasses) | |
add_feature_layer(:@hat, @@hat) | |
end | |
end | |
def add_feature_layer(instance_var_name, image) | |
layer = instance_variable_set instance_var_name, CALayer.layer | |
layer.contents = image | |
layer.contentsGravity = KCAGravityResize | |
@face_layer.addSublayer layer | |
end | |
def feature_intersection_size(feature) | |
r = CGRectIntersection(feature.bounds, @bounds) | |
r.size.width * r.size.height | |
end | |
def hide_features | |
Dispatch::Queue.main.async do | |
[@tache, @hat, @glasses].each { |e| e.opacity = 0 } | |
end | |
end | |
def rearrange_features(feature) | |
Dispatch::Queue.main.async do | |
@bounds = feature.bounds | |
if feature.hasRightEyePosition && feature.hasLeftEyePosition && feature.hasMouthPosition | |
w = feature.bounds.size.width | |
h = feature.bounds.size.height/5 | |
@tache.opacity = 0.9 | |
@tache.bounds = [0, 0, w, h] | |
@tache.position = [(feature.mouthPosition.x + (feature.leftEyePosition.x + feature.rightEyePosition.x)/2)/2, feature.mouthPosition.y+ h/2] | |
rotation = Math.atan2(feature.rightEyePosition.y-feature.leftEyePosition.y,feature.rightEyePosition.x-feature.leftEyePosition.x) | |
@tache.setValue rotation, forKeyPath: "transform.rotation" | |
w = feature.bounds.size.width | |
h = feature.bounds.size.height/2.5 | |
@glasses.opacity = 1.0 | |
@glasses.bounds = [0, 0, w, h] | |
@glasses.position = [(feature.leftEyePosition.x+feature.rightEyePosition.x)/2, (feature.rightEyePosition.y + feature.leftEyePosition.y)/2] | |
rotation = Math.atan2(feature.rightEyePosition.y-feature.leftEyePosition.y,feature.rightEyePosition.x-feature.leftEyePosition.x) | |
@glasses.setValue rotation, forKeyPath: "transform.rotation" | |
w = feature.bounds.size.width*5/4 | |
h = feature.bounds.size.height*5/4 | |
@hat.opacity = 1.0 | |
@hat.bounds = [0, 0, w, h] | |
@hat.position = [(feature.rightEyePosition.x + feature.leftEyePosition.x + feature.mouthPosition.x)/3, (feature.leftEyePosition.y+feature.rightEyePosition.y)/2- h/7 +h/2] | |
rotation = 25*Math::PI/180+Math.atan2(feature.rightEyePosition.y-feature.leftEyePosition.y,feature.rightEyePosition.x-feature.leftEyePosition.x) | |
@hat.setValue rotation, forKeyPath: "transform.rotation" | |
end | |
end | |
end | |
end | |
class FaceDetectionDelegate | |
attr_accessor :window | |
def applicationDidFinishLaunching(aNotification) | |
@santas_array = [] | |
@detector = CIDetector.detectorOfType "CIDetectorTypeFace", context:nil, options:{CIDetectorAccuracy: CIDetectorAccuracyLow} | |
session = AVCaptureSession.new | |
device = AVCaptureDevice.defaultDeviceWithMediaType AVMediaTypeVideo | |
case device.supportsAVCaptureSessionPreset AVCaptureSessionPreset1280x720 | |
when true | |
session.sessionPreset = AVCaptureSessionPreset1280x720 | |
width = 1280 | |
height = 720 | |
else | |
session.sessionPreset = AVCaptureSessionPreset640x480 | |
width = 640 | |
height = 480 | |
end | |
input = AVCaptureDeviceInput.deviceInputWithDevice device, error:nil | |
output = AVCaptureVideoDataOutput.new | |
output.alwaysDiscardsLateVideoFrames = true | |
queue = Dispatch::Queue.new('cameraQueue') | |
output.setSampleBufferDelegate self, queue:queue.dispatch_object | |
output.setVideoSettings KCVPixelBufferPixelFormatTypeKey => KCVPixelFormatType_32BGRA, KCVPixelBufferWidthKey => width, KCVPixelBufferHeightKey => height | |
session.addInput input | |
session.addOutput output | |
@preview_layer = AVCaptureVideoPreviewLayer.layerWithSession session | |
@preview_layer.frame = [0.0, 0.0, width, height] | |
@preview_layer.videoGravity = AVLayerVideoGravityResizeAspectFill | |
@preview_layer.affineTransform = CGAffineTransformMakeScale -1, 1 | |
session.startRunning | |
window.setFrame [0.0, 0.0, width, height], display:true, animate:true | |
window.center | |
window.delegate = self | |
window.contentView.wantsLayer = true | |
window.contentView.layer.addSublayer @preview_layer | |
window.orderFrontRegardless | |
end | |
def add_face | |
@santas_array << Santa.new(@preview_layer) | |
end | |
def captureOutput(captureOutput, didOutputSampleBuffer:sampleBuffer, fromConnection:connection) | |
imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) | |
image = CIImage.imageWithCVImageBuffer(imageBuffer) | |
features = @detector.featuresInImage(image) | |
add_face while features.size > @santas_array.size | |
# we need to match each new face from the feature_detector to the appropriate CALayers with Santa features | |
# otherwise the santa features jump from one person to another - complete bedlam. | |
santas = @santas_array.dup | |
features.each do |feature| | |
matched_face = santas.sort_by! {|s| s.feature_intersection_size(feature) }.pop | |
matched_face.rearrange_features feature | |
end | |
santas.each { |e| e.hide_features } # any remaining santas have exited the frame and should be hidden. | |
nil | |
end | |
def windowWillClose(sender); exit(1); end | |
end | |
# Create the Application | |
application = NSApplication.sharedApplication | |
NSApplication.sharedApplication.activationPolicy = NSApplicationActivationPolicyRegular | |
application.delegate = FaceDetectionDelegate.new | |
# create the Application Window | |
frame = [0.0, 0.0, 330, 250] | |
window = NSWindow.alloc.initWithContentRect frame, | |
styleMask: NSTitledWindowMask | NSClosableWindowMask, | |
backing: NSBackingStoreBuffered, | |
defer: false | |
application.delegate.window = window | |
window.orderOut(nil) | |
window.display | |
puts "Starting the app..." | |
application.run |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment