Skip to content

Instantly share code, notes, and snippets.

@codetricity
Created April 9, 2020 12:33
Show Gist options
  • Save codetricity/ef2e4f99739efd81c1a5786570d23d77 to your computer and use it in GitHub Desktop.
Save codetricity/ef2e4f99739efd81c1a5786570d23d77 to your computer and use it in GitHub Desktop.
RICOH THETA Plug-in sample code by KA-2 for TensorFlow Lite plug-in
/**
* Copyright 2018 Ricoh Company, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.theta360.extendedpreview;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.os.AsyncTask;
import android.util.Log;
import android.os.Bundle;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.List;
import android.view.KeyEvent;
import com.theta360.pluginlibrary.activity.PluginActivity;
import com.theta360.pluginlibrary.callback.KeyCallback;
import com.theta360.pluginlibrary.receiver.KeyReceiver;
import com.theta360.pluginapplication.task.TakePictureTask;
import com.theta360.pluginapplication.task.TakePictureTask.Callback;
import com.theta360.pluginapplication.task.GetLiveViewTask;
import com.theta360.pluginapplication.task.MjisTimeOutTask;
import com.theta360.pluginapplication.view.MJpegInputStream;
import com.theta360.pluginapplication.oled.Oled;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Color;
import android.graphics.RectF;
public class MainActivity extends PluginActivity {
private static final String TAG = "ExtendedPreview";
//Button Resorce
private boolean onKeyDownModeButton = false;
private boolean onKeyLongPressWlan = false;
private boolean onKeyLongPressFn = false;
//Preview Resorce
private int previewFormatNo;
GetLiveViewTask mGetLiveViewTask;
private byte[] latestLvFrame;
private byte[] latestFrame_Result;
//Preview Timeout Resorce
private static final long FRAME_READ_TIMEOUT_MSEC = 1000;
MjisTimeOutTask mTimeOutTask;
MJpegInputStream mjis;
//WebServer Resorce
private Context context;
private WebServer webServer;
//OLED Dislay Resorce
Oled oledDisplay = null;
private boolean mFinished;
private TakePictureTask.Callback mTakePictureTaskCallback = new Callback() {
@Override
public void onTakePicture(String fileUrl) {
startPreview(mGetLiveViewTaskCallback, previewFormatNo);
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Set enable to close by pluginlibrary, If you set false, please call close() after finishing your end processing.
setAutoClose(true);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
//init OLED
oledDisplay = new Oled(getApplicationContext());
oledDisplay.brightness(100);
oledDisplay.clear(oledDisplay.black);
oledDisplay.draw();
// Set a callback when a button operation event is acquired.
setKeyCallback(new KeyCallback() {
@Override
public void onKeyDown(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyReceiver.KEYCODE_CAMERA :
stopPreview();
new TakePictureTask(mTakePictureTaskCallback).execute();
break;
case KeyReceiver.KEYCODE_MEDIA_RECORD :
// Disable onKeyUp of startup operation.
onKeyDownModeButton = true;
break;
default:
break;
}
}
@Override
public void onKeyUp(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyReceiver.KEYCODE_WLAN_ON_OFF :
if (onKeyLongPressWlan) {
onKeyLongPressWlan=false;
} else {
//reset Object detection dir
lastDetectYaw = equiW/2; // Front
lastDetectPitch = equiH/2;
}
break;
case KeyReceiver.KEYCODE_MEDIA_RECORD :
if (onKeyDownModeButton) {
if (mGetLiveViewTask!=null) {
stopPreview();
} else {
startPreview(mGetLiveViewTaskCallback, previewFormatNo);
}
onKeyDownModeButton = false;
}
break;
case KeyEvent.KEYCODE_FUNCTION :
if (onKeyLongPressFn) {
onKeyLongPressFn=false;
} else {
//reset Object detection dir
lastDetectYaw = 0/*(equiW/2)*/; //Back
lastDetectPitch = equiH/2;
}
break;
default:
break;
}
}
@Override
public void onKeyLongPress(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyReceiver.KEYCODE_WLAN_ON_OFF:
onKeyLongPressWlan=true;
//NOP : KEYCODE_WLAN_ON_OFF
break;
case KeyEvent.KEYCODE_FUNCTION :
onKeyLongPressFn=true;
//NOP : KEYCODE_FUNCTION
break;
default:
break;
}
}
});
this.context = getApplicationContext();
this.webServer = new WebServer(this.context, mWebServerCallback);
try {
this.webServer.start();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
protected void onResume() {
super.onResume();
if (isApConnected()) {
}
//Start LivePreview
previewFormatNo = GetLiveViewTask.FORMAT_NO_1024_8FPS;
startPreview(mGetLiveViewTaskCallback, previewFormatNo);
//Start OLED thread
mFinished = false;
imageProcessingThread();
}
@Override
protected void onPause() {
// Do end processing
//close();
//Stop Web server
this.webServer.stop();
//Stop LivePreview
stopPreview();
//Stop OLED thread
mFinished = true;
super.onPause();
}
@Override
protected void onDestroy() {
super.onDestroy();
if (this.webServer != null) {
this.webServer.stop();
}
}
private void startPreview(GetLiveViewTask.Callback callback, int formatNo){
if (mGetLiveViewTask!=null) {
stopPreview();
try {
Thread.sleep(400);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
mGetLiveViewTask = new GetLiveViewTask(callback, formatNo);
mGetLiveViewTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
private void stopPreview(){
//At the intended stop, timeout monitoring also stops.
if (mTimeOutTask!=null) {
mTimeOutTask.cancel(false);
mTimeOutTask=null;
}
if (mGetLiveViewTask!=null) {
mGetLiveViewTask.cancel(false);
mGetLiveViewTask = null;
}
}
/**
* GetLiveViewTask Callback.
*/
private GetLiveViewTask.Callback mGetLiveViewTaskCallback = new GetLiveViewTask.Callback() {
@Override
public void onGetResorce(MJpegInputStream inMjis) {
mjis = inMjis;
}
@Override
public void onLivePreviewFrame(byte[] previewByteArray) {
latestLvFrame = previewByteArray;
//Update timeout monitor
if (mTimeOutTask!=null) {
mTimeOutTask.cancel(false);
mTimeOutTask=null;
}
mTimeOutTask = new MjisTimeOutTask(mMjisTimeOutTaskCallback, FRAME_READ_TIMEOUT_MSEC);
mTimeOutTask.execute();
}
@Override
public void onCancelled(Boolean inTimeoutOccurred) {
mGetLiveViewTask = null;
latestLvFrame = null;
if (inTimeoutOccurred) {
startPreview(mGetLiveViewTaskCallback, previewFormatNo);
}
}
};
/**
* MjisTimeOutTask Callback.
*/
private MjisTimeOutTask.Callback mMjisTimeOutTaskCallback = new MjisTimeOutTask.Callback() {
@Override
public void onTimeoutExec(){
if (mjis!=null) {
try {
// Force an IOException to `mjis.readMJpegFrame()' in GetLiveViewTask()
mjis.close();
} catch (IOException e) {
Log.d(TAG, "[timeout] mjis.close() IOException");
e.printStackTrace();
}
mjis=null;
}
}
};
/**
* WebServer Callback.
*/
private WebServer.Callback mWebServerCallback = new WebServer.Callback() {
@Override
public void execStartPreview(int format) {
previewFormatNo = format;
startPreview(mGetLiveViewTaskCallback, format);
}
@Override
public void execStopPreview() {
stopPreview();
}
@Override
public boolean execGetPreviewStat() {
if (mGetLiveViewTask==null) {
return false;
} else {
return true;
}
}
@Override
public byte[] getLatestFrame() {
//return latestLvFrame;
return latestFrame_Result;
}
};
//==============================================================
// Image processing Thread
//==============================================================
private static final String TF_OD_API_MODEL_FILE = "detect.tflite";
private static final String TF_OD_API_LABELS_FILE = "file:///android_asset/labelmap.txt";
private static final int TF_OD_API_INPUT_SIZE = 300;
private static final boolean TF_OD_API_IS_QUANTIZED = true;
//Object Detection dir
private int equiW = 0;
private int equiH = 0;
boolean detectFlag = false;
private int lastDetectYaw=512;
private int lastDetectPitch=256;
public void imageProcessingThread() {
new Thread(new Runnable() {
@Override
public void run() {
int outFps=0;
long startTime = System.currentTimeMillis();
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_BACKGROUND);
///////////////////////////////////////////////////////////////////////
// TFLite Initial detector
///////////////////////////////////////////////////////////////////////
Classifier detector=null;
try {
Log.d(TAG, "### TFLite Initial detector ###");
detector = TFLiteObjectDetectionAPIModel.create(
getAssets(),
TF_OD_API_MODEL_FILE,
TF_OD_API_LABELS_FILE,
TF_OD_API_INPUT_SIZE,
TF_OD_API_IS_QUANTIZED);
} catch (final IOException e) {
e.printStackTrace();
Log.d(TAG, "IOException:" + e);
mFinished = true;
}
//set detection area offset
int offsetX=0;
int offsetY=0;
while (mFinished == false) {
detectFlag = false;
//set detection area offset
if ( (previewFormatNo==GetLiveViewTask.FORMAT_NO_640_8FPS) ||
(previewFormatNo==GetLiveViewTask.FORMAT_NO_640_30FPS) ) {
offsetX = 170;
offsetY = 10;
equiW = 640;
} else if ( (previewFormatNo==GetLiveViewTask.FORMAT_NO_1024_8FPS) ||
(previewFormatNo==GetLiveViewTask.FORMAT_NO_1024_30FPS) ) {
offsetX = 362;
offsetY = 106;
equiW = 1024;
} else if ( (previewFormatNo==GetLiveViewTask.FORMAT_NO_1920_8FPS) ) {
offsetX = 810;
offsetY = 330;
equiW = 1920;
} else {
offsetX = 170;
offsetY = 10;
equiW = 640;
}
equiH = equiW/2;
byte[] jpegFrame = latestLvFrame;
if ( jpegFrame != null ) {
//JPEG -> Bitmap
BitmapFactory.Options options = new BitmapFactory.Options();
options.inMutable = true;
Bitmap bitmap = BitmapFactory.decodeByteArray(jpegFrame, 0, jpegFrame.length, options);
//rotation yaw
bitmap = rotationYaw(lastDetectYaw, equiW, bitmap);
//crop detect area
Bitmap cropBitmap = Bitmap.createBitmap(bitmap, offsetX, offsetY, TF_OD_API_INPUT_SIZE, TF_OD_API_INPUT_SIZE, null, true);
//make result canvas
Canvas resultCanvas = new Canvas(bitmap);
Paint mPaint = new Paint();
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setColor( Color.GREEN );
resultCanvas.drawRect(offsetX, offsetY, offsetX+TF_OD_API_INPUT_SIZE, offsetY+TF_OD_API_INPUT_SIZE, mPaint);
///////////////////////////////////////////////////////////////////////
// TFLite Object detection
///////////////////////////////////////////////////////////////////////
final List<Classifier.Recognition> results = detector.recognizeImage(cropBitmap);
Log.d(TAG, "### TFLite Object detection [result] ###");
for (final Classifier.Recognition result : results) {
drawDetectResult(result, resultCanvas, mPaint, offsetX, offsetY);
}
//set result image
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, baos);
latestFrame_Result = baos.toByteArray();
outFps++;
} else {
try {
Thread.sleep(33);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
//Dislpay Detect Dir to OLED
double lastDetectYawDig = (lastDetectYaw-equiW/2)*360.0/equiW;
double lastDetectPitchDig = (equiH/2-lastDetectPitch)*180/equiH;
displayResult(lastDetectYawDig, lastDetectPitchDig, detectFlag);
long curTime = System.currentTimeMillis();
long diffTime = curTime - startTime;
if (diffTime >= 1000 ) {
Log.d(TAG, "[OLED]" + String.valueOf(outFps) + "[fps]" );
startTime = curTime;
outFps =0;
}
}
}
}).start();
}
private Bitmap rotationYaw(int inLastDetectYaw, int equiW, Bitmap inBitmap) {
//Yaw axis rotation [Moving the detection frame]
Log.d(TAG, "### Yaw axis rotation START [result] ###");
//Yaw axis rotation [Image rotation]
Bitmap rotationBmp = Bitmap.createBitmap(equiW, (equiW/2), Bitmap.Config.ARGB_8888);
Canvas rotationCanvas = new Canvas(rotationBmp);
if ( (equiW/2) < inLastDetectYaw ) {
Log.d(TAG, "Case 1 [result]");
int leftWidth = (equiW/2) + ( equiW - inLastDetectYaw ) ;
Bitmap leftBmp = Bitmap.createBitmap(inBitmap, (inLastDetectYaw-(equiW/2)), 0, leftWidth, (equiW/2), null, true);
Bitmap rightBmp = Bitmap.createBitmap(inBitmap, 0, 0, (inLastDetectYaw-(equiW/2)), (equiW/2), null, true);
Paint mPaint = new Paint();
rotationCanvas.drawBitmap(leftBmp, 0, 0, mPaint);
rotationCanvas.drawBitmap(rightBmp, leftWidth, 0, mPaint);
} else if ( inLastDetectYaw<(equiW/2) ) {
Log.d(TAG, "Case 2 [result]");
Bitmap leftBmp = Bitmap.createBitmap(inBitmap, (inLastDetectYaw+(equiW/2)), 0, ((equiW/2)-inLastDetectYaw), (equiW/2), null, true);
Bitmap rightBmp = Bitmap.createBitmap(inBitmap, 0, 0, (inLastDetectYaw+(equiW/2)), (equiW/2), null, true);
Paint mPaint = new Paint();
rotationCanvas.drawBitmap(leftBmp, 0, 0, mPaint);
rotationCanvas.drawBitmap(rightBmp, ((equiW/2)-inLastDetectYaw), 0, mPaint);
} else {
Log.d(TAG, "Case 3 [result]");
Paint mPaint = new Paint();
rotationCanvas.drawBitmap(inBitmap, 0, 0, mPaint);
}
Log.d(TAG, "### Yaw axis rotation END [result] ###");
return rotationBmp;
}
private void drawDetectResult(Classifier.Recognition inResult, Canvas inResultCanvas, Paint inPaint, int inOffsetX, int inOffsetY){
double confidence = Double.valueOf(inResult.getConfidence());
if ( confidence >= 0.54 ) {
Log.d(TAG, "[result] Title:" + inResult.getTitle());
Log.d(TAG, "[result] Confidence:" + inResult.getConfidence());
Log.d(TAG, "[result] Location:" + inResult.getLocation());
// draw result
if (confidence >= 0.56) {
String title = inResult.getTitle();
if ( title.equals("apple")) {
inPaint.setColor( Color.RED );
} else if ( title.equals("banana") ) {
inPaint.setColor( Color.YELLOW );
detectFlag = true;
updateDetectInfo(inResult, inOffsetX, inOffsetY);
} else if ( title.equals("orange") ) {
inPaint.setColor(Color.CYAN );
} else {
inPaint.setColor( Color.BLUE );
}
} else {
inPaint.setColor( Color.DKGRAY );
}
RectF offsetRectF = new RectF(inResult.getLocation().left, inResult.getLocation().top, inResult.getLocation().right, inResult.getLocation().bottom);
offsetRectF.offset( (float) inOffsetX, (float) inOffsetY );
inResultCanvas.drawRect( offsetRectF, inPaint );
inResultCanvas.drawText(inResult.getTitle() + " : " + inResult.getConfidence(), offsetRectF.left, offsetRectF.top, inPaint);
}
}
private void updateDetectInfo(Classifier.Recognition inResult, int inOffsetX, int inOffsetY){
int tmp = lastDetectYaw;
int curDetectYaw = (int)( inOffsetX + inResult.getLocation().left + ((inResult.getLocation().right-inResult.getLocation().left)/2) );
if ( curDetectYaw <= (equiW/2) ) {
lastDetectYaw -= ((equiW/2)-curDetectYaw);
} else {
lastDetectYaw += (curDetectYaw-(equiW/2));
}
if ( equiW < lastDetectYaw ) {
lastDetectYaw -= equiW ;
} else if (lastDetectYaw<0) {
lastDetectYaw = equiW + lastDetectYaw;
}
Log.d(TAG, "[result] lastDetectYaw=" + String.valueOf(lastDetectYaw) + ", befor=" +String.valueOf(tmp) );
int curDetectPitch = (int)( inOffsetY + inResult.getLocation().top + ((inResult.getLocation().bottom-inResult.getLocation().top)/2) );
lastDetectPitch = curDetectPitch;
}
private void displayResult(double detectYawDig, double detectPitchDig, boolean inDetectFlag) {
double lineLength = 10.0;
double lineEndDig = detectYawDig-90.0;
double lineEndX = lineLength * Math.cos( Math.toRadians( lineEndDig ) );
double lineEndY = lineLength * Math.sin( Math.toRadians( lineEndDig ) );
double arrowLength = 6.0;
double arrowEndX1 = arrowLength * Math.cos( Math.toRadians( lineEndDig+210.0 ) );
double arrowEndY1 = arrowLength * Math.sin( Math.toRadians( lineEndDig+210.0 ) );
double arrowEndX2 = arrowLength * Math.cos( Math.toRadians( lineEndDig-210.0 ) );
double arrowEndY2 = arrowLength * Math.sin( Math.toRadians( lineEndDig-210.0 ) );
int centerX = 15;
int centerY = 12;
oledDisplay.clear();
oledDisplay.circle(centerX, centerY, 11);
oledDisplay.line(centerX, centerY, (int)(centerX+lineEndX+0.5), (int)(centerY+lineEndY+0.5));
oledDisplay.line((int)(centerX+lineEndX+0.5), (int)(centerY+lineEndY+0.5), (int)(centerX+lineEndX+arrowEndX1+0.5), (int)(centerY+lineEndY+arrowEndY1+0.5) );
oledDisplay.line((int)(centerX+lineEndX+0.5), (int)(centerY+lineEndY+0.5), (int)(centerX+lineEndX+arrowEndX2+0.5), (int)(centerY+lineEndY+arrowEndY2+0.5) );
String line1Str = "";
if (mGetLiveViewTask!=null) {
if (inDetectFlag) {
line1Str = "** Lock-On! **";
} else {
line1Str = "- can't find -";
}
} else {
line1Str = "STOP Detection";
}
String line2Str = "Yaw : " + String.valueOf( (int)detectYawDig );
String line3Str = "Pitch : " + String.valueOf( (int)detectPitchDig );
int textLine1 = 0;
int textLine2 = 8;
int textLine3 = 16;
oledDisplay.setString(35, textLine1,line1Str);
oledDisplay.setString(35, textLine2,line2Str);
oledDisplay.setString(35, textLine3,line3Str);
oledDisplay.draw();
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment