Last active
August 30, 2015 10:54
-
-
Save darkwave/bfcbb314e4d087c7d406 to your computer and use it in GitHub Desktop.
ARsenico new version
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<manifest xmlns:android="http://schemas.android.com/apk/res/android" android:versionCode="2" android:versionName="1.0" package=""><uses-sdk android:minSdkVersion="15" android:targetSdkVersion="15"/><application android:debuggable="true" android:icon="@drawable/icon" android:label=""><activity android:name="MainActivity" android:screenOrientation="landscape"><intent-filter><action android:name="android.intent.action.MAIN"/><category android:name="android.intent.category.LAUNCHER"/></intent-filter></activity></application><uses-permission android:name="android.permission.CAMERA"/><uses-permission android:name="android.permission.FLASHLIGHT"/></manifest> |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
ARsenico | |
Released under GPL v3 | |
*/ | |
KetaiCamera cam; | |
boolean touched; | |
UI ui; | |
int lastAR = 0; | |
void onCameraPreviewEvent() { | |
cam.read(); | |
if (ar != null && millis() - lastAR > frameRate) { | |
ar.refreshInput(cam); | |
lastAR = millis(); | |
} | |
} | |
AugmentedReality ar; | |
int cameraFrameRate = 30; | |
int cameraWidth = 352; | |
int cameraHeight = 288; | |
String[] markersFilenames = new String[] { | |
"4x4_1.patt", "4x4_2.patt", "4x4_3.patt", "4x4_4.patt", "4x4_5.patt", "4x4_6.patt", "4x4_7.patt", "4x4_8.patt", "4x4_9.patt", | |
}; | |
String[] modelsFilenames = new String[] { | |
"Olla01.obj", "Olla01.obj", "Olla01.obj", "Olla01.obj", "Olla01.obj", "Olla01.obj", "Olla01.obj", "Olla01.obj", "Olla01.obj", "Olla01.obj", | |
}; | |
int counter = 0; | |
PShape models; | |
boolean record = false; | |
static final int LIGHT_LOW = 0; | |
static final int LIGHT_MID = 1; | |
static final int LIGHT_HIGH = 2; | |
int currentWeather = LIGHT_LOW; | |
void setup() { | |
fullScreen(P3D); | |
background(255, 255); | |
PImage logo = loadImage("logo.png"); | |
imageMode(CENTER); | |
image(logo, width / 2, height / 2); | |
cam = new KetaiCamera(this, cameraWidth, cameraHeight, cameraFrameRate); | |
cam.start(); | |
ar = new AugmentedReality(this, cam.width, cam.height); | |
ar.loadMarkers(markersFilenames); | |
registerMethod("exit", this); | |
noStroke(); | |
imageMode(CORNERS); | |
textureWrap(REPEAT); | |
println("DISPLAY WIDTH " + displayWidth); | |
ui = new UI(this); | |
ui.add("changeSettings", "button2.png", 40, 40, displayWidth/10, displayWidth/10); | |
textFont(createFont("Robota", 100), 100); | |
textAlign(LEFT, TOP); | |
thread("loadAsset"); | |
} | |
boolean ready = false; | |
PShape[] modelsArray; | |
void loadAsset() { | |
ready = false; | |
models = createShape(GROUP); | |
modelsArray = new PShape[modelsFilenames.length]; | |
for (int i = 0; i < modelsFilenames.length; i++) { | |
PShape cube = loadShape(modelsFilenames[i]); | |
//cube.setStroke(false); | |
modelsArray[i] = cube; | |
cube.rotateX(radians(90)); | |
cube.scale(80); | |
println("added " + modelsFilenames[i]); | |
//models.rotateX(radians(90)); | |
//models.scale(80); | |
} | |
ready = true; | |
} | |
void exit() { | |
println("disposed"); | |
if (cam != null) { | |
cam.stop(); | |
cam.dispose(); | |
println("disposed webcam"); | |
} | |
super.exit(); | |
} | |
void changeSettings(UI.Button button, int action, int x, int y) { | |
println(button + ", " + action + " at " + x + "," + y); | |
if (action==2){ | |
if (currentWeather == LIGHT_LOW){ | |
currentWeather = LIGHT_MID; | |
cam.disableFlash(); | |
button.setImage("button2.png"); | |
} else if (currentWeather == LIGHT_MID){ //night | |
currentWeather = LIGHT_LOW; | |
cam.enableFlash(); | |
button.setImage("button1.png"); | |
} | |
} | |
} | |
void draw() { | |
if (!ready || !touched) | |
return; | |
image(cam, 0, 0, width, height); | |
noLights(); | |
ar.display(); | |
hint(DISABLE_DEPTH_TEST); | |
ui.display(); | |
fill(255, 0, 0); | |
text(frameRate, 200, 10); | |
} | |
void mousePressed() { | |
touched = true; | |
} | |
void displayScene(int markerID) { | |
//translate(0, 0, 40); | |
shape(modelsArray[markerID % modelsArray.length]); | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//package com.mondonerd.ARsenico; | |
/* | |
This file is part of ARsenico copyright by Massimo Avvisati ([email protected]) | |
It is released under Free Software license GPL v3 | |
Please visit http://mondonerd.com/arsenico for credits and informations. | |
*/ | |
import processing.core.*; | |
import jp.nyatla.nyar4psg.MultiMarker; | |
import jp.nyatla.nyar4psg.NyAR4PsgConfig; | |
import java.lang.reflect.*; | |
public class AugmentedReality { | |
PApplet parent; | |
boolean updating = false; | |
MultiMarker nya; | |
int totalCount = 0; | |
PMatrix3D globalMatrix; | |
PMatrix3D[] localMatrix; | |
public boolean debug = true; | |
static final int DEFAULT_WIDTH = 640; | |
static final int DEFAULT_HEIGHT = 480; | |
Method displayMethod; | |
private boolean mirroredInput = false; | |
boolean[] activeMarker; | |
/** | |
* Create an Augmented Reality environment. | |
* | |
* @param _parent | |
* Your sketch. For example inside {@code setup()} you can use | |
* {@code AugmentedReality(this);} | |
*/ | |
public AugmentedReality(PApplet _parent) { | |
this(_parent, DEFAULT_WIDTH, DEFAULT_HEIGHT); | |
} | |
/** | |
* Create an Augmented Reality environment. | |
* | |
* width and height are used to create the offscreen buffer used to store | |
* input for markers detection. They shouldn't be bigger than the input | |
* image width and height. You may specify a renderer (P3D, OPENGL...) | |
* | |
* @param _parent | |
* @param width | |
* input width | |
* @param height | |
* input width | |
* @param renderer | |
* {@code P2D}, {@code P3D}, {@code OPENGL} or any valid renderer | |
* | |
* @see PGraphics | |
*/ | |
public AugmentedReality(PApplet _parent, int w, int h) { | |
parent = _parent; | |
nya = new MultiMarker(parent, w, h, "camera_para.dat", | |
NyAR4PsgConfig.CONFIG_PSG); | |
try { | |
displayMethod = parent.getClass().getMethod("displayScene", | |
new Class[] { | |
int.class | |
} | |
); | |
} | |
catch (Exception e) { | |
// no such method, or an error.. which is fine, just ignore | |
debug(e.getMessage()); | |
} | |
reality = parent.createImage(w, h, PApplet.RGB); | |
} | |
/** | |
* Load markers from .patt and .png files. | |
* | |
* @param markersFilenames | |
* One or more filenames with markers. | |
*/ | |
public void loadMarkers(String... markersFilenames) { | |
for (int i = 0; i < markersFilenames.length; i++) { | |
if (markersFilenames[i].indexOf(".png") >= 0) { | |
PImage markerTemp = parent.loadImage(markersFilenames[i]); | |
if (markerTemp != null) | |
addMarker(markerTemp); | |
} else { | |
addMarker(markersFilenames[i]); | |
} | |
} | |
} | |
/* | |
* private void initOffscreen(int w, int h) { offscreen = | |
* parent.createGraphics(w, h, PConstants.P3D); debug("initOffscreen(" + w + | |
* "," + h + ") = " + offscreen.width + "x" + offscreen.height); } | |
*/ | |
protected void addMarker(String markerFilename) { | |
if (nya == null) { | |
debug("You are trying to addMarker(" + markerFilename | |
+ ") before to call initAR()"); | |
return; | |
} | |
try { | |
nya.addARMarker(markerFilename, 80); | |
} | |
catch (Exception ex) { | |
debug("You are trying to addMarker(" + markerFilename | |
+ ") but it doesn't seem to be a valid filename"); | |
} | |
finally { | |
totalCount++; | |
} | |
} | |
protected void addMarker(PImage markerImage) { | |
if (nya == null) { | |
debug("You are trying to addMarker() before to call initAR()"); | |
return; | |
} | |
try { | |
nya.addARMarker(markerImage, 16, 25, 80); | |
} | |
catch (Exception ex) { | |
debug("You are trying to addMarker() but it doesn't seem to be a valid file"); | |
} | |
finally { | |
totalCount++; | |
} | |
} | |
/** | |
* If your users face a webcam like a mirror you may want to "flip" the | |
* input image. | |
* | |
* @return | |
* @see #getReality() | |
*/ | |
public boolean toggleMirror() { | |
mirroredInput = !mirroredInput; | |
return mirroredInput; | |
} | |
/** | |
* A PImage of your reality. We will use it to detect markers | |
* | |
* @param inputImg | |
* It can be a PImage or Capture or Movie or whatever derives | |
* from PImage | |
* | |
* @see getReality(); | |
*/ | |
PImage reality; | |
PImage getReality() { | |
return reality.get(); | |
} | |
public void refreshInput(PImage inputImg) { | |
if (nya != null && inputImg != null) { | |
try { | |
//nya.detect(inputImg); | |
reality.set(0, 0, inputImg.get()); | |
nya.detectWithoutLoadPixels(reality); | |
} | |
catch (Exception ex) { | |
debug("Error while detecting"); | |
return; | |
} | |
} else { | |
debug("Error while detecting"); | |
return; | |
} | |
PMatrix3D projectMatrix = nya.getProjectionMatrix(); | |
// if (projectMatrix != null) | |
globalMatrix = projectMatrix.get(); | |
// TODO | |
// setPerspective(m, pg); | |
localMatrix = new PMatrix3D[totalCount]; | |
activeMarker = new boolean[totalCount]; | |
for (int i = 0; i < totalCount; i++) { | |
if (nya.isExistMarker(i)) { | |
localMatrix[i] = nya.getMarkerMatrix(i); | |
activeMarker[i] = true; | |
} else { | |
activeMarker[i] = false; | |
localMatrix[i] = new PMatrix3D(); | |
} | |
} | |
} | |
private void displayEvent(PGraphics pg, int i) { | |
if (displayMethod != null) { | |
try { | |
displayMethod.invoke(parent, i, pg); | |
} | |
catch (Exception e) { | |
debug("Disabling displayMethod() because of an error."); | |
// e.printStackTrace(); | |
displayMethod = null; | |
} | |
} | |
} | |
private void displayEvent(int i) { | |
if (displayMethod != null) { | |
try { | |
displayMethod.invoke(parent, i); | |
} | |
catch (Exception e) { | |
debug("Disabling displayMethod() because of an error."); | |
// e.printStackTrace(); | |
displayMethod = null; | |
} | |
} | |
} | |
/** | |
* Specify how strict is the markers detection. | |
* | |
* @param threshold | |
* A value between 0 and 256 or a -1 for automatic value ( | |
* @param confidenceThreshold | |
* Between 0.0 and 1.0 | |
* @param lostDelay | |
* 1 or more | |
*/ | |
public void setParameters(int threshold, double confidenceThreshold, | |
int lostDelay) { | |
nya.setThreshold(threshold); | |
nya.setConfidenceThreshold(confidenceThreshold); | |
nya.setLostDelay(lostDelay); | |
} | |
public int getThreshold() { | |
return nya.getCurrentThreshold(); | |
} | |
/** | |
* Get current "confidence" for a specific marker | |
* | |
* @param id | |
* @return | |
*/ | |
public double getConfidence(int id) { | |
return nya.getConfidence(id); | |
} | |
/** | |
* In order to display your augmented reality you must implement a | |
* displayScene(int markerID) method inside your sketch | |
* | |
* void displayScene(int markerID) { //here you are using the marker | |
* //projection matrix } | |
* | |
*/ | |
public void display() { | |
display(parent.g); | |
} | |
/** | |
* Display your augmented reality on a different PGraphics object (an | |
* offscreen buffer for example) | |
* | |
* @param offscreen | |
* buffer or other PGraphics | |
*/ | |
public void display(PGraphics pg) { | |
if (globalMatrix == null || localMatrix == null || activeMarker == | |
null) | |
return; | |
pg.hint(PApplet.ENABLE_DEPTH_TEST); | |
//pg.hint(PApplet.DISABLE_DEPTH_TEST); | |
pg.pushMatrix(); | |
pg.setMatrix(globalMatrix); | |
if (mirroredInput) { | |
pg.scale(-1, 1, 1); | |
PApplet.println("mirrored"); | |
} | |
setPerspective(globalMatrix, pg); | |
for (int i = 0; i < totalCount; i++) { | |
if (activeMarker[i]) { | |
pg.pushMatrix(); | |
if (localMatrix[i] != null) | |
pg.setMatrix(localMatrix[i]); // load Marker matrix | |
if (pg == parent.g) | |
displayEvent(i); | |
else | |
displayEvent(pg, i); | |
// displayScene(i); | |
pg.popMatrix(); | |
} | |
} | |
pg.popMatrix(); | |
pg.perspective(); | |
} | |
private void setPerspective(PMatrix3D i_projection, PGraphics pg) { | |
// Projection frustum | |
float far = i_projection.m23 / (i_projection.m22 + 1); | |
float near = i_projection.m23 / (i_projection.m22 - 1); | |
pg.frustum((i_projection.m02 - 1) * near / i_projection.m00, | |
(i_projection.m02 + 1) * near / i_projection.m00, | |
(i_projection.m12 - 1) * near / i_projection.m11, | |
(i_projection.m12 + 1) * near / i_projection.m11, near, far); | |
return; | |
} | |
private void debug(String message) { | |
if (!debug) | |
return; | |
PApplet.println(message); | |
} | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import java.io.File; | |
import java.io.FileNotFoundException; | |
import java.io.FileOutputStream; | |
import java.io.IOException; | |
import java.lang.reflect.InvocationTargetException; | |
import java.lang.reflect.Method; | |
import java.text.SimpleDateFormat; | |
import java.util.Collection; | |
import java.util.Date; | |
import java.util.List; | |
import java.util.Vector; | |
import javax.microedition.khronos.opengles.GL10; | |
import processing.core.PApplet; | |
import processing.core.PImage; | |
import android.content.pm.ApplicationInfo; | |
import android.content.pm.PackageManager; | |
import android.content.pm.PackageManager.NameNotFoundException; | |
import android.graphics.Bitmap; | |
import android.graphics.ImageFormat; | |
import android.graphics.SurfaceTexture; | |
import android.graphics.Bitmap.Config; | |
import android.hardware.Camera; | |
import android.hardware.Camera.AutoFocusCallback; | |
import android.hardware.Camera.CameraInfo; | |
import android.hardware.Camera.Parameters; | |
import android.hardware.Camera.PictureCallback; | |
import android.hardware.Camera.PreviewCallback; | |
import android.hardware.Camera.Size; | |
import android.media.MediaScannerConnection; | |
import android.media.MediaScannerConnection.OnScanCompletedListener; | |
import android.net.Uri; | |
import android.opengl.GLES20; | |
import android.os.Environment; | |
import android.view.Surface; | |
/** | |
* The Class KetaiCamera allows the processing sketches to access android | |
* cameras through an object modeled after the desktop/java processing Camera | |
* class. | |
* | |
*/ | |
public class KetaiCamera extends PImage { | |
/** The camera. */ | |
private Camera camera; | |
/** The my pixels. */ | |
private int[] myPixels; | |
/** The on face detection event method. */ | |
protected Method onPreviewEventMethod, onPreviewEventMethodPImage, | |
onSavePhotoEventMethod, onFaceDetectionEventMethod; | |
/** The camera id. */ | |
private int frameWidth, frameHeight, cameraFPS, cameraID; | |
/** The photo height. */ | |
private int photoWidth, photoHeight; | |
/** The is rgb preview supported. */ | |
public boolean isStarted, requestedStart, enableFlash, | |
isRGBPreviewSupported; | |
/** The save photo path. */ | |
private String savePhotoPath = ""; | |
private Vector<Method> listeners = new Vector<Method>(); | |
/** The self. */ | |
KetaiCamera self; | |
/** The save dir. */ | |
String SAVE_DIR = ""; | |
// Thread runner; | |
/** The available. */ | |
boolean available = false; | |
// public boolean isDetectingFaces = false; | |
/** The supports face detection. */ | |
boolean supportsFaceDetection = false; | |
/** The m texture. */ | |
SurfaceTexture mTexture; | |
public Object callbackdelegate; | |
public boolean requestedPortraitImage = false; | |
// private ketaiFaceDetectionListener facelistener; | |
/** | |
* Instantiates a new ketai camera. | |
* | |
* @param pParent | |
* reference to the main sketch(Activity) | |
* @param _width | |
* width of the camera image | |
* @param _height | |
* height of the camera image | |
* @param _framesPerSecond | |
* the frames per second | |
*/ | |
public KetaiCamera(PApplet pParent, int _width, int _height, | |
int _framesPerSecond) { | |
super(_width, _height, PImage.ARGB); | |
bitmap = Bitmap.createBitmap(pixels, width, height, Config.ARGB_8888); | |
parent = pParent; | |
frameWidth = _width; | |
frameHeight = _height; | |
photoWidth = frameWidth; | |
photoHeight = frameHeight; | |
cameraFPS = _framesPerSecond; | |
isStarted = false; | |
requestedStart = false; | |
myPixels = new int[_width * _height]; | |
self = this; | |
isRGBPreviewSupported = false; | |
enableFlash = false; | |
cameraID = 0; | |
callbackdelegate = parent; | |
// facelistener = new ketaiFaceDetectionListener(this); | |
determineObjectIntentions(pParent); | |
// we'll store our photos in a folder named after our application! | |
PackageManager pm = parent.getActivity().getApplicationContext().getPackageManager(); | |
ApplicationInfo ai; | |
try { | |
ai = pm.getApplicationInfo(parent.getActivity().getApplicationContext() | |
.getPackageName(), 0); | |
} catch (final NameNotFoundException e) { | |
ai = null; | |
} | |
SAVE_DIR = (String) (ai != null ? pm.getApplicationLabel(ai) | |
: "unknownApp"); | |
parent.registerMethod("resume", this); | |
parent.registerMethod("pause", this); | |
parent.registerMethod("dispose", this); | |
read(); | |
} | |
private void determineObjectIntentions(Object o) { | |
try { | |
// the following uses reflection to see if the parent | |
// exposes the callback method. The first argument is the method | |
// name followed by what should match the method argument(s) | |
onPreviewEventMethod = o.getClass().getMethod( | |
"onCameraPreviewEvent"); | |
PApplet.println("Found onCameraPreviewEvent "); | |
} catch (NoSuchMethodException e) { | |
// no such method, or an error.. which is fine, just ignore | |
onPreviewEventMethod = null; | |
} | |
try { | |
onPreviewEventMethodPImage = o.getClass().getMethod( | |
"onCameraPreviewEvent", new Class[] { KetaiCamera.class }); | |
} catch (NoSuchMethodException e) { | |
// no such method, or an error.. which is fine, just ignore | |
onPreviewEventMethodPImage = null; | |
} | |
try { | |
onFaceDetectionEventMethod = o.getClass().getMethod( | |
"onFaceDetectionEvent", new Class[] { KetaiFace[].class }); | |
} catch (NoSuchMethodException e) { | |
// no such method, or an error.. which is fine, just ignore | |
onFaceDetectionEventMethod = null; | |
} | |
try { | |
onSavePhotoEventMethod = o.getClass().getMethod("onSavePhotoEvent", | |
new Class[] { String.class }); | |
} catch (NoSuchMethodException e) { | |
// no such method, or an error.. which is fine, just ignore | |
onSavePhotoEventMethod = null; | |
} | |
} | |
/** | |
* Manual settings - attempt to disable "auto" adjustments (like focus, | |
* white balance, etc). | |
*/ | |
public void manualSettings() { | |
if (camera == null) | |
return; | |
Parameters cameraParameters = camera.getParameters(); | |
// camera.cancelAutoFocus(); | |
if (cameraParameters.isAutoExposureLockSupported()) | |
cameraParameters.setAutoExposureLock(true); | |
if (cameraParameters.isAutoWhiteBalanceLockSupported()) | |
cameraParameters.setAutoWhiteBalanceLock(true); | |
else { | |
List<String> w = cameraParameters.getSupportedWhiteBalance(); | |
for (String s : w) { | |
if (s.equalsIgnoreCase(Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT)) { | |
cameraParameters.setWhiteBalance(s); | |
break; | |
} | |
} | |
} | |
List<String> fModes = cameraParameters.getSupportedFocusModes(); | |
for (String s : fModes) { | |
if (s.equalsIgnoreCase(Parameters.FOCUS_MODE_FIXED)) { | |
cameraParameters.setFocusMode(Parameters.FOCUS_MODE_FIXED); | |
} | |
} | |
try { | |
camera.setParameters(cameraParameters); | |
} catch (RuntimeException x) { | |
PApplet.println("Failed to set parameters to manual." | |
+ x.getMessage()); | |
} | |
// PApplet.println("KetaiCamera manualSettings: " | |
// + camera.getParameters().flatten()); | |
} | |
// public void startFaceDetection() { | |
// isDetectingFaces = true; | |
// if (camera != null && isStarted && supportsFaceDetection) { | |
// if (isDetectingFaces) { | |
// camera.setFaceDetectionListener(this); | |
// camera.startFaceDetection(); | |
// } | |
// } | |
// } | |
// public void stopFaceDetection() { | |
// isDetectingFaces = false; | |
// if (camera != null && isStarted && supportsFaceDetection) | |
// camera.stopFaceDetection(); | |
// } | |
/** | |
* Sets the zoom. | |
* | |
* @param _zoom | |
* the new zoom | |
*/ | |
public void setZoom(int _zoom) { | |
if (camera == null) | |
return; | |
Parameters cameraParameters = camera.getParameters(); | |
if (_zoom > cameraParameters.getMaxZoom()) | |
_zoom = cameraParameters.getMaxZoom(); | |
else if (_zoom < 0) | |
_zoom = 0; | |
cameraParameters.setZoom(_zoom); | |
camera.setParameters(cameraParameters); | |
} | |
/** | |
* Gets the zoom. | |
* | |
* @return the zoom | |
*/ | |
public int getZoom() { | |
if (camera == null) | |
return 0; | |
Parameters p = camera.getParameters(); | |
return (p.getZoom()); | |
} | |
/** | |
* Auto settings - set camera to use auto adjusting settings | |
*/ | |
public void autoSettings() { | |
if (camera == null) | |
return; | |
// PApplet.println("KetaiCamera: setting camera settings to auto..."); | |
Parameters cameraParameters = camera.getParameters(); | |
if (cameraParameters.isAutoExposureLockSupported()) | |
cameraParameters.setAutoExposureLock(false); | |
if (cameraParameters.isAutoWhiteBalanceLockSupported()) | |
cameraParameters.setAutoWhiteBalanceLock(false); | |
List<String> fModes = cameraParameters.getSupportedFocusModes(); | |
for (String s : fModes) { | |
// PApplet.println("FocusMode: " + s); | |
if (s.equalsIgnoreCase(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) | |
cameraParameters | |
.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); | |
} | |
camera.setParameters(cameraParameters); | |
camera.autoFocus(autofocusCB); | |
// PApplet.println("KetaiCamera autoSettings: " | |
// + camera.getParameters().flatten()); | |
} | |
/** | |
* Dump out camera settings into a single string. | |
* | |
* @return the string | |
*/ | |
public String dump() { | |
String result = ""; | |
if (camera == null) | |
return result; | |
Parameters p = camera.getParameters(); | |
result += "Zoom: " + p.getZoom() + "\n"; | |
result += "White Balance: " + p.getWhiteBalance() + "\n"; | |
if (p.isAutoWhiteBalanceLockSupported()) | |
result += "\t Lock supported, state: " | |
+ p.getAutoWhiteBalanceLock() + "\n"; | |
else | |
result += "\t Lock NOT supported\n"; | |
float[] f = new float[3]; | |
String fd = ""; | |
p.getFocusDistances(f); | |
for (int i = 0; i < f.length; i++) | |
fd += String.valueOf(f[i]) + " "; | |
result += "Focal Distances: " + fd + " \n"; | |
result += "Focal Depth: " + p.getFocalLength() + "\n"; | |
result += "Focus Mode: " + p.getFocusMode() + "\n"; | |
result += "Exposure: " + p.getExposureCompensation() + "\n"; | |
if (p.isAutoExposureLockSupported()) | |
result += "\t Lock supported, state: " + p.getAutoExposureLock() | |
+ "\n"; | |
else | |
result += "\t Lock NOT supported\n"; | |
result += "Native camera face detection support: " | |
+ supportsFaceDetection; | |
return result; | |
} | |
/** | |
* Sets the save directory for image/photo settings | |
* | |
* @param _dirname | |
* the new save directory | |
*/ | |
public void setSaveDirectory(String _dirname) { | |
SAVE_DIR = _dirname; | |
} | |
/** | |
* Gets the photo width which may be different from the camera preview width | |
* since photo quality can be better than preview/camera image. | |
* | |
* @return the photo width | |
*/ | |
public int getPhotoWidth() { | |
return photoWidth; | |
} | |
/** | |
* Gets the photo height which may be different from the camera preview | |
* width since photo quality can be better than preview/camera image. | |
* | |
* @return the photo height | |
*/ | |
public int getPhotoHeight() { | |
return photoHeight; | |
} | |
/** | |
* Sets the photo dimensions. Photo dimensions default to camera preview | |
* dimensions but can be set for higher quality. Typically camera preview | |
* dimensions should be smaller than photo dimensions. | |
* | |
* @param width | |
* the width | |
* @param height | |
* the height | |
*/ | |
public void setPhotoSize(int width, int height) { | |
photoWidth = width; | |
photoHeight = height; | |
determineCameraParameters(); | |
} | |
/** | |
* Enable flash. | |
*/ | |
public void enableFlash() { | |
enableFlash = true; | |
if (camera == null) | |
return; | |
Parameters cameraParameters = camera.getParameters(); | |
cameraParameters.setFlashMode(Parameters.FLASH_MODE_TORCH); | |
// check if flash is supported before setting it | |
try { | |
camera.setParameters(cameraParameters); | |
} catch (RuntimeException x) { | |
}// doesnt support flash...its ok... | |
} | |
/** | |
* Disable flash. | |
*/ | |
public void disableFlash() { | |
enableFlash = false; | |
if (camera == null) | |
return; | |
Parameters cameraParameters = camera.getParameters(); | |
cameraParameters.setFlashMode(Parameters.FLASH_MODE_OFF); | |
try { | |
camera.setParameters(cameraParameters); | |
} catch (RuntimeException x) { | |
} // nopers | |
} | |
/** | |
* Sets the camera id for devices that support multiple cameras. | |
* | |
* @param _id | |
* the new camera id | |
*/ | |
public void setCameraID(int _id) { | |
if (_id < Camera.getNumberOfCameras()) | |
cameraID = _id; | |
if (cameraID == 1) { | |
int _temp = width; | |
width = height; | |
height = _temp; | |
resize(width, height); | |
bitmap = Bitmap.createBitmap(pixels, width, height, | |
Config.ARGB_8888); | |
} | |
} | |
/** | |
* Gets the camera id. | |
* | |
* @return the camera id | |
*/ | |
public int getCameraID() { | |
return cameraID; | |
} | |
/** | |
* Start the camera preview. Call this in order to start the camera preview | |
* updates. This will deliver pixels from the camera to the parent sketch. | |
* | |
* @return true, if successful | |
*/ | |
public boolean start() { | |
requestedStart = true; | |
if (isStarted) | |
return true; | |
try { | |
// PApplet.println("KetaiCamera: opening camera..."); | |
if (camera == null) | |
try { | |
camera = Camera.open(cameraID); | |
} catch (Exception x) { | |
// KetaiAlertDialog.popup( | |
// parent, | |
// "KetaiCamera", | |
// "Failed to connect to Camera.\n" | |
// + x.getMessage()); | |
PApplet.println("Failed to open camera for camera ID: " | |
+ cameraID + ":" + x.getMessage()); | |
return false; | |
} | |
Parameters cameraParameters = camera.getParameters(); | |
List<Integer> list = cameraParameters.getSupportedPreviewFormats(); | |
// PApplet.println("Supported preview modes..."); | |
for (Integer i : list) { | |
if (i == ImageFormat.RGB_565) { | |
// PApplet.println("RGB Image preview supported!!!!(try better resolutions/fps combos)"); | |
isRGBPreviewSupported = true; | |
} | |
PApplet.println("\t" + i); | |
} | |
if (isRGBPreviewSupported) | |
cameraParameters.setPreviewFormat(ImageFormat.RGB_565); | |
// else if (isNV21Supported) | |
// cameraParameters.setPreviewFormat(ImageFormat.NV21); | |
// else | |
// PApplet.println("Camera does not appear to provide data in a format we can convert. Sorry."); | |
PApplet.println("default imageformat:" | |
+ cameraParameters.getPreviewFormat()); | |
List<String> flashmodes = cameraParameters.getSupportedFlashModes(); | |
if (flashmodes != null && flashmodes.size() > 0) { | |
for (String s : flashmodes) | |
PApplet.println("supported flashmode: " + s); | |
if (enableFlash) | |
cameraParameters.setFlashMode(Parameters.FLASH_MODE_TORCH); | |
else | |
cameraParameters.setFlashMode(Parameters.FLASH_MODE_OFF); | |
} else | |
PApplet.println("No flash support."); | |
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo(); | |
android.hardware.Camera.getCameraInfo(cameraID, info); | |
int rotation = parent.getActivity().getWindowManager().getDefaultDisplay() | |
.getRotation(); | |
int degrees = 0; | |
switch (rotation) { | |
case Surface.ROTATION_0: | |
degrees = 0; | |
break; | |
case Surface.ROTATION_90: | |
degrees = 90; | |
break; | |
case Surface.ROTATION_180: | |
degrees = 180; | |
break; | |
case Surface.ROTATION_270: | |
degrees = 270; | |
break; | |
} | |
int cameraRotationOffset = 0; | |
switch (info.orientation) { | |
case Surface.ROTATION_0: | |
cameraRotationOffset = 0; | |
break; | |
case Surface.ROTATION_90: | |
cameraRotationOffset = 90; | |
break; | |
case Surface.ROTATION_180: | |
cameraRotationOffset = 180; | |
break; | |
case Surface.ROTATION_270: | |
cameraRotationOffset = 270; | |
break; | |
} | |
int result; | |
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { | |
requestedPortraitImage = true; | |
result = (cameraRotationOffset + degrees) % 360; | |
result = (360 - result) % 360; // compensate the mirror | |
} else { // back-facing | |
result = (cameraRotationOffset - degrees + 360) % 360; | |
} | |
camera.setDisplayOrientation(result); | |
PApplet.println("Rotation reported: " + degrees); | |
PApplet.println("camera: setting display orientation to: " + result | |
+ " degrees"); | |
camera.setDisplayOrientation(result); | |
camera.setParameters(cameraParameters); | |
camera.setPreviewCallback(previewcallback); | |
// set sizes | |
determineCameraParameters(); | |
try { | |
parent.getActivity().runOnUiThread(new Runnable() { | |
public void run() { | |
int[] textures = new int[1]; | |
// generate one texture pointer and bind it as an | |
// external texture so preview will start | |
GLES20.glGenTextures(1, textures, 0); | |
GLES20.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]); | |
int texture_id = textures[0]; | |
mTexture = new SurfaceTexture(texture_id); | |
try { | |
camera.setPreviewTexture(mTexture); | |
} catch (IOException iox) { | |
PApplet.println("Something bad happened trying set the texture when trying to open the preview: " | |
+ iox.getMessage()); | |
} | |
} | |
}); | |
camera.startPreview(); | |
} catch (NoClassDefFoundError x) { | |
camera.startPreview(); | |
PApplet.println("Something bad happened trying to open the preview: " | |
+ x.getMessage()); | |
} | |
isStarted = true; | |
// if (supportsFaceDetection && isDetectingFaces) { | |
// camera.setFaceDetectionListener(this); | |
// camera.startFaceDetection(); | |
// } | |
PApplet.println("Using preview format: " | |
+ camera.getParameters().getPreviewFormat()); | |
PApplet.println("Preview size: " + frameWidth + "x" + frameHeight | |
+ "," + cameraFPS); | |
PApplet.println("Photo size: " + photoWidth + "x" + photoHeight); | |
return true; | |
} catch (RuntimeException x) { | |
x.printStackTrace(); | |
if (camera != null) | |
camera.release(); | |
PApplet.println("Exception caught while trying to connect to camera service. Please check your sketch permissions or that another application is not using the camera."); | |
return false; | |
} | |
} | |
/** | |
* Checks if flash is enabled. | |
* | |
* @return true, if flash is enabled | |
*/ | |
public boolean isFlashEnabled() { | |
return enableFlash; | |
} | |
/** | |
* Saves photo to the file system using default settings ( | |
* | |
* @return true, if successful | |
*/ | |
public boolean savePhoto() { | |
if (camera != null && isStarted()) { | |
savePhotoPath = ""; | |
return savePhoto(savePhotoPath); | |
} | |
return false; | |
} | |
/** | |
* Save photo to the file system using the name provided. | |
* | |
* @param _filename | |
* the _filename | |
* @return true, if successful | |
*/ | |
public boolean savePhoto(String _filename) { | |
String filename = ""; | |
// we have an absolute file pathname.... | |
if (_filename.startsWith(File.separator)) { | |
savePhotoPath = _filename; | |
} else { | |
// construct the path using the filename specified... | |
if (_filename.equalsIgnoreCase("")) { | |
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss") | |
.format(new Date()); | |
filename = "IMG_" + timeStamp + ".jpg"; | |
} else | |
filename = _filename; | |
File mediaStorageDir = new File( | |
Environment | |
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), | |
SAVE_DIR); | |
// Create the storage directory if it does not exist | |
if (!mediaStorageDir.exists()) { | |
if (!mediaStorageDir.mkdirs()) { | |
PApplet.println("failed to create directory to save photo: " | |
+ mediaStorageDir.getAbsolutePath()); | |
return false; | |
} | |
} | |
savePhotoPath = mediaStorageDir.getAbsolutePath() + File.separator | |
+ filename; | |
}// end creating savePath... | |
// test for file write, return false if error | |
// otherwise call camera to save | |
PApplet.println("Calculated photo path: " + savePhotoPath); | |
try { | |
FileOutputStream outStream = new FileOutputStream(savePhotoPath); | |
outStream.write(1); | |
outStream.close(); | |
File f = new File(savePhotoPath); | |
if (!f.delete()) | |
PApplet.println("Failed to remove temp photoFile while testing permissions..oops"); | |
} catch (FileNotFoundException x) { | |
PApplet.println("Failed to save photo to " + savePhotoPath + "\n" | |
+ x.getMessage()); | |
return false; | |
} catch (IOException e) { | |
PApplet.println("Failed to save photo to " + savePhotoPath + "\n" | |
+ e.getMessage()); | |
return false; | |
} | |
if (camera != null && isStarted()) | |
camera.takePicture(null, null, jpegCallback); | |
return true; | |
} | |
/** | |
* Resume. | |
*/ | |
public void resume() { | |
if (camera == null) | |
return; | |
camera = Camera.open(cameraID); | |
if (!isStarted && requestedStart) | |
start(); | |
} | |
/** | |
* Read the pixels from the camera. | |
*/ | |
public synchronized void read() { | |
if (pixels.length != frameWidth * frameHeight) | |
pixels = new int[frameWidth * frameHeight]; | |
synchronized (pixels) { | |
// loadPixels(); | |
System.arraycopy(myPixels, 0, pixels, 0, frameWidth * frameHeight); | |
available = false; | |
updatePixels(); | |
} | |
} | |
/** | |
* Checks if the camera has been started. | |
* | |
* @return true, if is started | |
*/ | |
public boolean isStarted() { | |
return isStarted; | |
} | |
/** The last processed frame. */ | |
int lastProcessedFrame = 0; | |
/** The previewcallback. */ | |
PreviewCallback previewcallback = new PreviewCallback() { | |
public void onPreviewFrame(byte[] data, Camera camera) { | |
if (camera == null || !isStarted) | |
return; | |
if (myPixels == null || myPixels.length != frameWidth * frameHeight) | |
myPixels = new int[frameWidth * frameHeight]; | |
// issue using system.arraycopy between byte/int color data, go slow | |
// but sure route | |
// if (isRGBPreviewSupported) | |
// { | |
// System.arraycopy(myPixels, 0, data, 0, myPixels.length); | |
// }else | |
decodeYUV420SP(data); | |
// if (myPixels == null) | |
// return; | |
if ((parent.millis() - lastProcessedFrame) < (1000 / cameraFPS)) | |
return; | |
lastProcessedFrame = parent.millis(); | |
if (onPreviewEventMethod != null && myPixels != null) | |
try { | |
onPreviewEventMethod.invoke(callbackdelegate); | |
} catch (Exception e) { | |
PApplet.println(" onCameraPreviewEvent() had an error:" | |
+ e.getMessage()); | |
e.printStackTrace(); | |
} | |
if (onPreviewEventMethodPImage != null && myPixels != null) { | |
try { | |
onPreviewEventMethodPImage.invoke(callbackdelegate, | |
new Object[] { (PImage) self }); | |
} catch (Exception e) { | |
PApplet.println("Disabling onCameraPreviewEvent(KetaiCamera) because of an error:" | |
+ e.getMessage()); | |
e.printStackTrace(); | |
onPreviewEventMethodPImage = null; | |
} | |
} | |
for (Method m : listeners) { | |
try { | |
m.invoke(callbackdelegate, new Object[] { (PImage) self }); | |
} catch (Exception e) { | |
PApplet.println("Disabling onCameraPreviewEvent(KetaiCamera) because of an error:" | |
+ e.getMessage()); | |
e.printStackTrace(); | |
} | |
} | |
// if (!self.supportsFaceDetection && self.isDetectingFaces) { | |
// PApplet.println("Finding faces in preview using CV"); | |
// kFace[] faces = FaceFinder.findFaces((PImage) self, 5); | |
// | |
// int numberOfFaces = faces.length; | |
// if (numberOfFaces > 0) | |
// try { | |
// onFaceDetectionEventMethod.invoke(parent, | |
// new Object[] { faces }); | |
// } catch (Exception e) { | |
// PApplet.println("Exception trying to forward facedetection event (KetaiCamera):" | |
// + e.getMessage()); | |
// } | |
// } | |
} | |
}; | |
/** The autofocus cb. */ | |
private AutoFocusCallback autofocusCB = new AutoFocusCallback() { | |
public void onAutoFocus(boolean result, Camera c) { | |
PApplet.println("Autofocus result: " + result); | |
} | |
}; | |
/** The jpeg callback. */ | |
private PictureCallback jpegCallback = new PictureCallback() { | |
public void onPictureTaken(byte[] data, Camera camera) { | |
PApplet.println("pictureCallback entered..."); | |
if (camera == null) | |
return; | |
FileOutputStream outStream = null; | |
try { | |
PApplet.println("Saving image: " + savePhotoPath); | |
outStream = new FileOutputStream(savePhotoPath); | |
outStream.write(data); | |
outStream.close(); | |
// callback sketch with path of saved image | |
// ; | |
if (onSavePhotoEventMethod != null && myPixels != null | |
&& savePhotoPath != null) | |
try { | |
onSavePhotoEventMethod.invoke(parent, | |
new Object[] { (String) savePhotoPath }); | |
} catch (IllegalAccessException e) { | |
e.printStackTrace(); | |
} catch (InvocationTargetException e) { | |
e.printStackTrace(); | |
} | |
// restart preview | |
camera.startPreview(); | |
// try { | |
// SurfaceTexture st = new SurfaceTexture(0); | |
// camera.setPreviewTexture(st); | |
// camera.startPreview(); | |
// camera.setPreviewDisplay(null); | |
// } catch (NoClassDefFoundError x) { | |
// camera.startPreview(); | |
// } | |
} catch (FileNotFoundException e) { | |
e.printStackTrace(); | |
} catch (IOException e) { | |
e.printStackTrace(); | |
} catch (RuntimeException rtx) { | |
} finally { | |
} | |
} | |
}; | |
/** The my scanner callback. */ | |
private OnScanCompletedListener myScannerCallback = new OnScanCompletedListener() { | |
public void onScanCompleted(String arg0, Uri arg1) { | |
PApplet.println("Media Scanner returned: " + arg1.toString() | |
+ " => " + arg0); | |
} | |
}; | |
/** | |
* Adds the file to media library so that other applications can access it. | |
* | |
* @param _file | |
* the _file | |
*/ | |
public void addToMediaLibrary(String _file) { | |
// String[] paths = { mediaFile.getAbsolutePath() }; | |
String[] paths = { _file }; | |
MediaScannerConnection.scanFile(parent.getActivity().getApplicationContext(), paths, | |
null, myScannerCallback); | |
} | |
/** | |
* Pause the class as since the activity is being paused. | |
*/ | |
public void pause() { | |
if (camera != null && isStarted) { | |
isStarted = false; | |
camera.stopPreview(); | |
camera.setPreviewCallback(null); | |
camera.release(); | |
camera = null; | |
} | |
isStarted = false; | |
} | |
/** | |
* Stop the camera from receiving updates. | |
*/ | |
public void stop() { | |
PApplet.println("Stopping Camera..."); | |
requestedStart = false; | |
if (camera != null && isStarted) { | |
isStarted = false; | |
camera.stopPreview(); | |
camera.setPreviewCallback(null); | |
camera.release(); | |
camera = null; | |
} | |
} | |
/** | |
* Dispose. | |
*/ | |
public void dispose() { | |
stop(); | |
} | |
/** | |
* Decode yuv420 sp. | |
* | |
* @param yuv420sp | |
* the yuv420sp | |
*/ | |
public void decodeYUV420SP(byte[] yuv420sp) { | |
// here we're using our own internal PImage attributes | |
final int frameSize = width * height; | |
for (int j = 0, yp = 0; j < height; j++) { | |
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0; | |
for (int i = 0; i < width; i++, yp++) { | |
int y = (0xff & ((int) yuv420sp[yp])) - 16; | |
if (y < 0) | |
y = 0; | |
if ((i & 1) == 0) { | |
v = (0xff & yuv420sp[uvp++]) - 128; | |
u = (0xff & yuv420sp[uvp++]) - 128; | |
} | |
int y1192 = 1192 * y; | |
int r = (y1192 + 1634 * v); | |
int g = (y1192 - 833 * v - 400 * u); | |
int b = (y1192 + 2066 * u); | |
if (r < 0) | |
r = 0; | |
else if (r > 262143) | |
r = 262143; | |
if (g < 0) | |
g = 0; | |
else if (g > 262143) | |
g = 262143; | |
if (b < 0) | |
b = 0; | |
else if (b > 262143) | |
b = 262143; | |
// use interal buffer instead of pixels for UX reasons | |
myPixels[yp] = 0xff000000 | ((r << 6) & 0xff0000) | |
| ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); | |
} | |
} | |
} | |
/** | |
* Gets the number of cameras. | |
* | |
* @return the number of cameras | |
*/ | |
public int getNumberOfCameras() { | |
return Camera.getNumberOfCameras(); | |
} | |
/** | |
* List available cameras. | |
* | |
* @return the collection<? extends string> | |
*/ | |
public Collection<? extends String> list() { | |
Vector<String> list = new Vector<String>(); | |
String facing = ""; | |
int count = Camera.getNumberOfCameras(); | |
for (int i = 0; i < count; i++) { | |
Camera.CameraInfo info = new Camera.CameraInfo(); | |
Camera.getCameraInfo(i, info); | |
if (info.facing == CameraInfo.CAMERA_FACING_BACK) | |
facing = "backfacing"; | |
else | |
facing = "frontfacing"; | |
list.add("camera id [" + i + "] facing:" + facing); | |
PApplet.println("camera id[" + i + "] facing:" + facing); | |
} | |
return list; | |
} | |
/** | |
* Determine camera parameters based on requested parameters. Tries to get | |
* the closest resolution settings. | |
*/ | |
private void determineCameraParameters() { | |
if (camera == null) | |
return; | |
PApplet.println("Requested camera parameters as (w,h,fps):" | |
+ frameWidth + "," + frameHeight + "," + cameraFPS); | |
Parameters cameraParameters = camera.getParameters(); | |
// PApplet.println(cameraParameters.flatten()); | |
List<Size> supportedSizes = cameraParameters.getSupportedPreviewSizes(); | |
boolean foundSupportedSize = false; | |
Size nearestRequestedSize = null; | |
Size dim = supportedSizes.get(4); | |
for (Size s: supportedSizes) { | |
if (s.width < 800 && s.width > 320) { | |
dim = s; | |
break; | |
} | |
} | |
frameWidth = dim.width; | |
frameHeight = dim.height; | |
for (Size s : supportedSizes) { | |
PApplet.println("Checking supported preview size:" + s.width + "," | |
+ s.height); | |
if (nearestRequestedSize == null) | |
nearestRequestedSize = s; | |
if (!foundSupportedSize) { | |
if (s.width == frameWidth && s.height == frameHeight) { | |
PApplet.println("Found matching camera size"); | |
nearestRequestedSize = s; | |
foundSupportedSize = true; | |
} else { | |
int delta = (frameWidth * frameHeight) | |
- (nearestRequestedSize.height * nearestRequestedSize.width); | |
int current = (frameWidth * frameHeight) | |
- (s.height * s.width); | |
delta = Math.abs(delta); | |
current = Math.abs(current); | |
if (current < delta) | |
nearestRequestedSize = s; | |
} | |
} | |
} | |
if (nearestRequestedSize != null) { | |
frameWidth = nearestRequestedSize.width; | |
frameHeight = nearestRequestedSize.height; | |
} | |
cameraParameters.setPreviewSize(frameWidth, frameHeight); | |
supportedSizes = cameraParameters.getSupportedPictureSizes(); | |
foundSupportedSize = false; | |
nearestRequestedSize = null; | |
for (Size s : supportedSizes) { | |
if (!foundSupportedSize) { | |
if (s.width == photoWidth && s.height == photoHeight) { | |
nearestRequestedSize = s; | |
foundSupportedSize = true; | |
} else if (photoWidth <= s.width) { | |
nearestRequestedSize = s; | |
} | |
} | |
} | |
if (nearestRequestedSize != null) { | |
photoWidth = nearestRequestedSize.width; | |
photoHeight = nearestRequestedSize.height; | |
} | |
cameraParameters.setPictureSize(photoWidth, photoHeight); | |
List<Integer> supportedFPS = cameraParameters | |
.getSupportedPreviewFrameRates(); | |
int nearestFPS = 0; | |
for (int r : supportedFPS) { | |
PApplet.println("Supported preview FPS: " + r); | |
if (nearestFPS == 0) | |
nearestFPS = r; | |
if ((Math.abs(cameraFPS - r)) > (Math.abs(cameraFPS - nearestFPS))) { | |
nearestFPS = r; | |
} | |
} | |
PApplet.println("calculated preview FPS: " + nearestFPS); | |
cameraParameters.setPreviewFrameRate(nearestFPS); | |
// PApplet.println("Setting calculated parameters:" | |
// + cameraParameters.flatten()); | |
camera.setParameters(cameraParameters); | |
cameraParameters = camera.getParameters(); | |
frameHeight = cameraParameters.getPreviewSize().height; | |
frameWidth = cameraParameters.getPreviewSize().width; | |
// if what was requested is what we set then update | |
// otherwise we'll compensate here | |
if (cameraFPS == cameraParameters.getPreviewFrameRate()) | |
cameraFPS = cameraParameters.getPreviewFrameRate(); | |
PApplet.println("Calculated camera parameters as (w,h,fps):" | |
+ frameWidth + "," + frameHeight + "," + cameraFPS); | |
// PApplet.println(cameraParameters.flatten()); | |
if (cameraParameters.getMaxNumDetectedFaces() > 0) { | |
PApplet.println("Face detection supported!"); | |
supportsFaceDetection = true; | |
} | |
// update PImage | |
this.loadPixels(); | |
resize(frameWidth, frameHeight); | |
} | |
/** | |
* On frame available callback, used by the camera service. | |
* | |
* @param arg0 | |
* the arg0 | |
*/ | |
public void onFrameAvailable(SurfaceTexture arg0) { | |
PApplet.print("."); | |
} | |
public void register(Object o) { | |
callbackdelegate = o; | |
determineObjectIntentions(o); | |
} | |
// public void onFaceDetection(Face[] _faces, Camera _camera) { | |
// KetaiFace[] faces = new KetaiFace[_faces.length]; | |
// | |
// for (int i = 0; i < _faces.length; i++) { | |
// faces[i] = new KetaiFace(_faces[i], frameWidth, frameHeight); | |
// } | |
// if (onFaceDetectionEventMethod != null) { | |
// try { | |
// onFaceDetectionEventMethod.invoke(parent, | |
// new Object[] { faces }); | |
// } catch (Exception e) { | |
// PApplet.println("Disabling onFaceDetectionEventMethod(KetaiCamera) because of an error:" | |
// + e.getMessage()); | |
// e.printStackTrace(); | |
// onFaceDetectionEventMethod = null; | |
// } | |
// } | |
// } | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import processing.core.PApplet; | |
import processing.core.PVector; | |
import android.hardware.Camera.Face; | |
/** | |
* The Class KetaiFace wrap the internal android Face class. | |
*/ | |
public class KetaiFace extends Face { | |
/** The center. */ | |
public PVector leftEye, rightEye, mouth, center; | |
/** The height. */ | |
public int id, score, width, height; | |
/** | |
* Instantiates a new ketai face. | |
* | |
* @param f the face Object | |
* @param frameWidth the frame width to map the Face on | |
* @param frameHeight the frame height to map the Face on | |
*/ | |
public KetaiFace(Face f, int frameWidth, int frameHeight) { | |
leftEye = new PVector(PApplet.map(f.leftEye.x, -1000, 1000, 0, | |
frameWidth), PApplet.map(f.leftEye.y, -1000, 1000, 0, | |
frameHeight)); | |
rightEye = new PVector(PApplet.map(f.rightEye.x, -1000, 1000, 0, | |
frameWidth), PApplet.map(f.rightEye.y, -1000, 1000, 0, | |
frameHeight)); | |
mouth = new PVector(PApplet.map(f.mouth.x, -1000, 1000, 0, frameWidth), | |
PApplet.map(f.mouth.y, -1000, 1000, 0, frameHeight)); | |
id = f.id; | |
score = f.score; | |
center = new PVector(PApplet.map(f.rect.exactCenterX(), -1000, 1000, 0, | |
frameWidth), PApplet.map(f.rect.exactCenterY(), -1000, 1000, 0, | |
frameHeight)); | |
width = f.rect.width(); | |
height = f.rect.height(); | |
} | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import java.lang.reflect.Method; | |
import processing.core.*; | |
import processing.event.MouseEvent; | |
import java.util.ArrayList; | |
/** | |
Created by Massimo Avvisati <[email protected]> | |
GPL ver. 3 licensed. | |
**/ | |
public class UI { | |
public static final int RELEASE = MouseEvent.RELEASE; | |
public static final int PRESS = MouseEvent.PRESS; | |
public static final int CLICK = MouseEvent.CLICK; | |
public static final int DRAG = MouseEvent.DRAG; | |
ArrayList<Button> elements = new ArrayList<Button>(); | |
PApplet parent; | |
UI(PApplet parent) { | |
this.parent = parent; | |
parent.registerMethod("mouseEvent", this); | |
//parent.registerMethod("draw", this); | |
} | |
public void display() { | |
//parent.hint(parent.DISABLE_DEPTH_TEST); | |
for (Button element : elements) { | |
element.display(parent.g); | |
} | |
prune(); | |
} | |
private void prune() { | |
for (int i = elements.size() -1; i >= 0; i--) { | |
Button element = elements.get(i); | |
if (element.dead) | |
elements.remove(i); | |
} | |
} | |
Button add(String methodBaseName, String filename, float x, float y) { | |
return add( methodBaseName, x, y, 1, 1).setImage(filename, true); | |
} | |
Button add(String methodBaseName, String filename, float x, float y, float w, float h) { | |
return add( methodBaseName, x, y, w, h).setImage(filename, false); | |
} | |
Button add(String methodBaseName, float x, float y, float w, float h) { | |
Button element = new Button(parent, methodBaseName, (int) x, (int) y, (int) w, (int) h); | |
elements.add(element); | |
return element; | |
} | |
Button dragged = null; | |
public void mouseEvent(MouseEvent event) { | |
int x = event.getX(); | |
int y = event.getY(); | |
switch (event.getAction()) { | |
case MouseEvent.RELEASE: | |
case MouseEvent.PRESS: | |
case MouseEvent.CLICK: | |
dragged = null; | |
boolean eventCatched = false; | |
for (int i = elements.size () -1; i >= 0; i--) { | |
Button element = elements.get(i); | |
//(element.commandMethod != null) | |
if (!eventCatched && element.visible && element.isInside(x, y)) { | |
eventCatched = true; | |
if (dragged == null && event.getAction() == MouseEvent.PRESS) | |
dragged = element; | |
try { | |
element.commandMethod.invoke(parent, element, event.getAction(), x - element.x, y - element.y); | |
} | |
catch (Exception ex) { | |
} | |
} | |
} | |
break; | |
case MouseEvent.DRAG: | |
if (dragged != null && dragged.visible) | |
try { | |
dragged.commandMethod.invoke(parent, dragged, event.getAction(), x - dragged.x, y - dragged.y); | |
} | |
catch (Exception ex) { | |
} | |
break; | |
case MouseEvent.MOVE: | |
// umm... forgot | |
break; | |
} | |
} | |
public class Button { | |
int x, y, w, h; | |
Method commandMethod; | |
public int buttonColor = 255; | |
PShape buttonShape; | |
PImage buttonImage; | |
boolean dead = false; | |
protected Button(PApplet sketch, String method, int x, int y, int w, int h) { | |
this.x = x; | |
this.y = y; | |
this.w = w; | |
this.h = h; | |
setMethod(method, sketch); | |
} | |
public void destroy() { | |
dead = true; | |
} | |
public boolean isInside(int _x, int _y) { | |
return ( _x > x && _x < x + w && _y > y && _y < y + h); | |
} | |
private void setMethod(String method, PApplet sketch) { | |
try { | |
commandMethod = sketch.getClass().getMethod(method, //questo definisce il metodo necessario nello sketch | |
new Class[] { | |
Button.class, | |
int.class, | |
int.class, | |
int.class | |
} | |
); | |
} | |
catch (Exception ex) { | |
// no such method, or an error.. which is fine, just ignore | |
commandMethod = null; | |
PApplet.println(ex + "\nPlease implement a " + method + " method in your main sketch if you want to be informed"); | |
} | |
} | |
Button setImage(String filename) { | |
return setImage(filename, false); | |
} | |
Button setImage(String filename, boolean resize) { | |
if (filename.indexOf(".svg") > 0) { | |
buttonShape = parent.loadShape(filename); | |
if (resize) { | |
w = (int) buttonShape.width; | |
h = (int) buttonShape.height; | |
} | |
} else { | |
buttonImage = parent.loadImage(filename); | |
if (resize) { | |
w = (int) buttonImage.width; | |
h = (int) buttonImage.height; | |
} | |
} | |
return this; | |
} | |
private int alignment = PApplet.LEFT; | |
void setAlign(int alignment) { | |
if (this.alignment == alignment) | |
return; | |
if (this.alignment == PApplet.LEFT && alignment == PApplet.CENTER) | |
x -= w/ 2; | |
else if (this.alignment == PApplet.LEFT && alignment == PApplet.RIGHT) | |
x -= w; | |
else if (this.alignment == PApplet.CENTER && alignment == PApplet.LEFT) | |
x += w / 2; | |
else if (this.alignment == PApplet.CENTER && alignment == PApplet.RIGHT) | |
x -= w / 2; | |
else if (this.alignment == PApplet.RIGHT && alignment == PApplet.LEFT) | |
x += w; | |
else if (this.alignment == PApplet.RIGHT && alignment == PApplet.CENTER) | |
x += w / 2; | |
this.alignment = alignment; | |
} | |
boolean visible = true; | |
public void hide() { | |
visible = false; | |
} | |
public void show() { | |
visible = true; | |
} | |
void display(PGraphics pg) { | |
if (!visible) | |
return; | |
if (buttonImage != null) { | |
pg.image(buttonImage, x, y, w, h); | |
} else if (buttonShape == null) { | |
pg.noStroke(); | |
pg.fill(buttonColor); | |
pg.rect(x, y, w, h); | |
} else { | |
pg.shape(buttonShape, x, y, w, h); | |
} | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment