Last active
September 29, 2016 13:01
-
-
Save wenfahu/2b196bbc88f866d71a3182d6f1ced7f1 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<?xml version="1.0" encoding="utf-8"?> | |
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" | |
xmlns:tools="http://schemas.android.com/tools" | |
xmlns:opencv="http://schemas.android.com/apk/res-auto" | |
android:layout_width="match_parent" | |
android:layout_height="match_parent" | |
android:paddingBottom="@dimen/activity_vertical_margin" | |
android:paddingLeft="@dimen/activity_horizontal_margin" | |
android:paddingRight="@dimen/activity_horizontal_margin" | |
android:paddingTop="@dimen/activity_vertical_margin" | |
tools:context="com.example.wenfahu.simplecam.MainActivity"> | |
<org.opencv.android.JavaCameraView | |
android:layout_width="fill_parent" | |
android:layout_height="fill_parent" | |
android:visibility="gone" | |
android:id="@+id/HelloOpenCvView" | |
opencv:show_fps="true" | |
opencv:camera_id="any" | |
android:layout_alignParentTop="true" | |
android:layout_alignParentStart="true" /> | |
<LinearLayout | |
android:orientation="vertical" | |
android:layout_width="wrap_content" | |
android:layout_height="match_parent" | |
android:layout_gravity="center_vertical" | |
android:layout_weight="0.02" | |
android:weightSum="1"> | |
<TextView | |
android:layout_width="84dp" | |
android:layout_height="wrap_content" | |
android:textAppearance="?android:attr/textAppearanceMedium" | |
android:text="0 lines" | |
android:textColor="#FF0000" | |
android:id="@+id/laneLabel" | |
android:layout_weight="0.06" /> | |
<TextView | |
android:layout_width="84dp" | |
android:layout_height="wrap_content" | |
android:textAppearance="?android:attr/textAppearanceMedium" | |
android:text="0 cars" | |
android:textColor="#FF0000" | |
android:id="@+id/carLabel" | |
android:layout_weight="0.06" /> | |
<TextView | |
android:layout_width="84dp" | |
android:layout_height="wrap_content" | |
android:textAppearance="?android:attr/textAppearanceMedium" | |
android:text="0 people" | |
android:textColor="#FF0000" | |
android:id="@+id/peopleLabel" | |
android:layout_weight="0.06" /> | |
<ImageView | |
android:layout_width="match_parent" | |
android:layout_height="wrap_content" | |
android:id="@+id/imageView" | |
android:layout_weight="0.28" | |
android:src="@android:drawable/presence_online" /> | |
</LinearLayout> | |
</RelativeLayout> |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* DO NOT EDIT THIS FILE - it is machine generated */ | |
#include <jni.h> | |
/* Header for class com_example_wenfahu_simplecam_JniTools */ | |
#ifndef _Included_com_example_wenfahu_simplecam_JniTools | |
#define _Included_com_example_wenfahu_simplecam_JniTools | |
#ifdef __cplusplus | |
extern "C" { | |
#endif | |
/* | |
* Class: com_example_wenfahu_simplecam_JniTools | |
* Method: lanePlus | |
* Signature: (J)[I | |
*/ | |
JNIEXPORT jintArray JNICALL Java_com_example_wenfahu_simplecam_JniTools_lanePlus | |
(JNIEnv *, jclass, jlong); | |
#ifdef __cplusplus | |
} | |
#endif | |
#endif |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// Created by wenfahu on 16-8-31. | |
// | |
#include <jni.h> | |
//#include <CL/cl.h> | |
#include <opencv2/imgproc/imgproc.hpp> | |
#include <opencv2/objdetect.hpp> | |
//#include <opencv2/core/ocl.hpp> | |
#include <opencv2/opencv.hpp> | |
#include <android/log.h> | |
#include <vector> | |
#include <string> | |
#include <chrono> | |
using namespace cv; | |
using namespace std; | |
#define COLS 480 | |
#define MAX_KERNEL_LENGTH 5 | |
#define LOG_TAG "[jni jobs]" | |
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) | |
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN,LOG_TAG,__VA_ARGS__) | |
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__) | |
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) | |
Mat M = (Mat_<float>(3,3) << 319.06943, 157.91666, -32377.916, | |
0, 114.68594, 22176.051, | |
0, 0.99755102, 117.95755); | |
vector<Scalar> COLORS = {Scalar(255, 255, 0), Scalar(0,255,255), Scalar(255, 0, 255)}; | |
static CascadeClassifier *car_classifier = NULL; | |
static HOGDescriptor *people_classifier = NULL; | |
bool isEqual2(const Vec4i& l1, const Vec4i& l2){ | |
float length1 = sqrtf((l1[2] - l1[0])*(l1[2] - l1[0]) + (l1[3] - l1[1])*(l1[3] - l1[1])); | |
float length2 = sqrtf((l2[2] - l2[0])*(l2[2] - l2[0]) + (l2[3] - l2[1])*(l2[3] - l2[1])); | |
float product = (l1[2] - l1[0])*(l2[2] - l2[0]) + (l1[3] - l1[1])*(l2[3] - l2[1]); | |
if (fabs(product / (length1 * length2)) < cos(CV_PI / 30)) | |
return false; | |
int margin1 = abs(l1[0] - l2[2]); | |
int margin2 = abs(l1[2] - l2[0]); | |
float x_margin = min(margin1, margin2); | |
if (x_margin > 8) | |
return false; | |
return true; | |
} | |
Point reverseMapping(Mat cor){ | |
Mat res = M * cor; | |
float u = res.at<float>(0,0); | |
float v = res.at<float>(1,0); | |
float w = res.at<float>(2,0); | |
Point pt = Point(int(u/w), int(v/w)); | |
return pt; | |
} | |
Vec4i line_fitting(vector<Vec4i> lines, Mat canvas, int idx){ | |
vector<Vec2i> pts; | |
int ymin, ymax; | |
for(auto i = lines.begin(); i != lines.end(); i++){ | |
int x1 = (*i)[0] + 100; | |
int y1 = (*i)[1]; | |
int x2 = (*i)[2] + 100; | |
int y2 = (*i)[3]; | |
int _ymin = y1 < y2 ? y1 : y2; | |
ymin = _ymin < ymin ? _ymin : ymin; | |
int _ymax = y1 > y2 ? y1 : y2; | |
ymax = _ymax > ymax ? _ymax : ymax; | |
pts.push_back(Vec2i(x1, y1)); | |
pts.push_back(Vec2i(x2, y2)); | |
} | |
Vec4f fitted; | |
fitLine(pts, fitted, CV_DIST_L2, 0, 0.01, 0.01); | |
float vx = fitted[0]; | |
float vy = fitted[1]; | |
float x0 = fitted[2]; | |
float y0 = fitted[3]; | |
int top = vx*(ymax-y0)/vy + x0; | |
int bottom = vx*(ymin-y0)/vy + x0; | |
Mat w_cor1 = (Mat_<float>(3, 1) << top, ymax, 1); | |
Mat w_cor2 = (Mat_<float>(3, 1) << bottom, ymin, 1); | |
Point pt1 = reverseMapping(w_cor1); | |
Point pt2 = reverseMapping(w_cor2); | |
// line(canvas, pt1, pt2, COLORS[idx], 3); | |
return Vec4i(pt1.x, pt1.y, pt2.x, pt2.y); | |
} | |
vector<int> process_lane(const Mat& frame){ | |
auto t1 = std::chrono::high_resolution_clock::now(); | |
int rows = frame.rows; | |
int cols = frame.cols; | |
Mat plane; | |
warpPerspective(frame, plane, M, Size(cols, rows), INTER_LINEAR | WARP_INVERSE_MAP); | |
Mat contours, roi; | |
roi = plane.colRange(100, 220); | |
Canny(roi, contours, 100, 300); | |
/* | |
Mat view = plane; | |
Mat gass, sharp; | |
GaussianBlur(view, gass, Size(0,0), 3); | |
addWeighted(view, 1.5, gass, -0.5, 0, sharp); | |
Mat hsl; | |
cvtColor(sharp, hsl, COLOR_BGR2HSV); | |
Mat mask; | |
inRange(hsl, Scalar(0, 100, 150), Scalar(20, 255, 255), mask); | |
Mat res; | |
bitwise_and(view, view, res, mask); | |
Mat blurred = res; | |
vector<Mat> hslChannels; | |
split(blurred, hslChannels); | |
Mat chan = hslChannels[2]; | |
Mat closing = chan ; | |
*/ | |
vector<int> result; | |
vector<Vec4i> lines; | |
HoughLinesP(contours, lines, 5, 2*CV_PI/180, 70, 15, 10); | |
if(!lines.empty()){ | |
vector<int> labels; | |
int number_of_lines = cv::partition(lines, labels, isEqual2); | |
vector<vector<Vec4i> > lines_group(number_of_lines); | |
for(int i =0; i != lines.size(); i++){ | |
lines_group[labels[i]].push_back(lines[i]); | |
} | |
int idx = 0; | |
for(auto i = lines_group.begin(); i != lines_group.end(); i++){ | |
Vec4i line = line_fitting(*i, frame, idx); | |
result.push_back(line[0]); | |
result.push_back(line[1]); | |
result.push_back(line[2]); | |
result.push_back(line[3]); | |
idx++; | |
} | |
} | |
auto t2 = std::chrono::high_resolution_clock::now(); | |
stringstream ss; | |
ss << std::chrono::duration_cast<std::chrono::milliseconds>(t2-t1).count(); | |
string str = ss.str(); | |
LOGD("[lane] %s ms, %d lines", str.c_str(), ((int)result.size()) / 4); | |
return result; | |
} | |
vector<float> process_cars(const Mat& frame, CascadeClassifier *car_classifier) { | |
auto t1 = std::chrono::high_resolution_clock::now(); | |
Mat image; | |
cv::cvtColor(frame, image, CV_RGBA2GRAY); | |
int resize_rows = min(400, frame.rows); | |
float scale = static_cast<float>(image.rows) / static_cast<float>(resize_rows); | |
int resize_cols = static_cast<int>(image.cols / scale); | |
resize(image, image, Size(resize_cols, resize_rows), 0, 0, INTER_LINEAR); | |
vector<Rect> rects; | |
car_classifier->detectMultiScale(image, rects, 1.1, 3, 0, Size(60, 60), Size(260, 260)); | |
/* Mat orig; | |
image.copyTo(orig); | |
for (vector<Rect>::iterator iter = rects.begin(); iter != rects.end(); iter++){ | |
rectangle(orig, *iter, Scalar(0, 0, 255)); | |
} | |
imshow("win", orig); //*/ | |
vector<float> bboxes(rects.size() * 5); | |
int i = 0; | |
for (auto iter = rects.begin(); iter != rects.end(); iter++){ | |
bboxes[i++] = iter->tl().x * scale; | |
bboxes[i++] = iter->tl().y * scale; | |
bboxes[i++] = iter->br().x * scale; | |
bboxes[i++] = iter->br().y * scale; | |
bboxes[i++] = 0.0f; | |
} | |
auto t2 = std::chrono::high_resolution_clock::now(); | |
LOGD("[cars job] %ld ms, %d cars, scale = %f", | |
std::chrono::duration_cast<std::chrono::milliseconds>(t2-t1).count(), | |
((int)bboxes.size()) / 5, 1 / scale); | |
return bboxes; | |
} | |
vector<float> process_people(const Mat& frame, HOGDescriptor *people_classifier) { | |
auto t1 = std::chrono::high_resolution_clock::now(); | |
Mat image; | |
cv::cvtColor(frame, image, CV_RGBA2GRAY); | |
int resize_rows = min(400, frame.rows); | |
float scale = static_cast<float>(image.rows) / static_cast<float>(resize_rows); | |
int resize_cols = static_cast<int>(image.cols / scale); | |
resize(image, image, Size(resize_cols, resize_rows), 0, 0, INTER_LINEAR); | |
vector<Rect> rects; | |
people_classifier->detectMultiScale(image, rects, 1.1, Size(40, 30), Size(260, 260)); | |
/* Mat orig; | |
image.copyTo(orig); | |
for (vector<Rect>::iterator iter = rects.begin(); iter != rects.end(); iter++){ | |
rectangle(orig, *iter, Scalar(0, 0, 255)); | |
} | |
imshow("win", orig); //*/ | |
vector<float> bboxes(rects.size() * 5); | |
int i = 0; | |
for (auto iter = rects.begin(); iter != rects.end(); iter++){ | |
bboxes[i++] = iter->tl().x * scale; | |
bboxes[i++] = iter->tl().y * scale; | |
bboxes[i++] = iter->br().x * scale; | |
bboxes[i++] = iter->br().y * scale; | |
bboxes[i++] = 0.0f; | |
} | |
auto t2 = std::chrono::high_resolution_clock::now(); | |
LOGD("[people job] %ld ms, %d peoples, scale = %f", | |
std::chrono::duration_cast<std::chrono::milliseconds>(t2-t1).count(), | |
((int)bboxes.size()) / 5, 1 / scale); | |
return bboxes; | |
} | |
#ifdef __cplusplus | |
extern "C" { | |
#endif | |
#define jni_funcname(name) Java_com_example_wenfahu_simplecam_JniTools_##name | |
JNIEXPORT jint JNICALL jni_funcname(test) (JNIEnv *env, jobject thiz) { | |
return 123; | |
} | |
JNIEXPORT jintArray JNICALL jni_funcname(lanePlus) (JNIEnv *env, jobject thiz, jlong frameAddr) { | |
const Mat& frame = *(const Mat*)frameAddr; | |
LOGD("[lane] begin to process"); | |
vector<int> arr = process_lane(frame); | |
jintArray ret_arr = env->NewIntArray(arr.size()); | |
env->SetIntArrayRegion(ret_arr, 0, arr.size(), arr.data()); | |
return ret_arr; | |
} | |
JNIEXPORT jfloatArray JNICALL jni_funcname(detCars) (JNIEnv *env, jobject thiz, jlong frameAddr) { | |
const Mat& frame = *(const Mat*)frameAddr; | |
LOGD("[cars job] begin to process"); | |
if (car_classifier == NULL) { | |
car_classifier = new CascadeClassifier(); | |
if (!car_classifier->load("/sdcard/simplecam/car_model.xml")) { | |
LOGE("[cars job] fail to load the model file: /sdcard/simplecam/car_model.xml"); | |
} | |
} | |
vector<float> arr = process_cars(frame, car_classifier); | |
jfloatArray ret_arr = env->NewFloatArray(arr.size()); | |
env->SetFloatArrayRegion(ret_arr, 0, arr.size(), arr.data()); | |
return ret_arr; | |
} | |
JNIEXPORT jfloatArray JNICALL jni_funcname(detPeople) (JNIEnv *env, jobject thiz, jlong frameAddr) { | |
const Mat& frame = *(const Mat*)frameAddr; | |
LOGD("[people job] begin to process"); | |
if (people_classifier == NULL) { | |
people_classifier = new HOGDescriptor(); | |
people_classifier->setSVMDetector(people_classifier->getDefaultPeopleDetector()); | |
} | |
vector<float> arr = process_people(frame, people_classifier); | |
jfloatArray ret_arr = env->NewFloatArray(arr.size()); | |
env->SetFloatArrayRegion(ret_arr, 0, arr.size(), arr.data()); | |
return ret_arr; | |
} | |
#ifdef __cplusplus | |
} | |
#endif |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package com.example.wenfahu.simplecam; | |
import android.content.Context; | |
import android.os.Bundle; | |
import android.os.Handler; | |
import android.os.Message; | |
import android.util.Log; | |
import org.opencv.core.Mat; | |
import java.io.File; | |
import java.io.IOException; | |
import java.util.List; | |
/** | |
* Created by wenfahu on 16-9-21. | |
*/ | |
public abstract class CWorker implements Runnable { | |
private Handler handler; | |
private Context context; | |
public Bundle data; | |
public Mat input; | |
public int lastError; | |
public boolean running; | |
public CWorker(Handler handler, Context context, Mat input) { | |
this.handler = handler; | |
this.context = context; | |
this.data = null; | |
this.running = false; | |
this.input = input; | |
} | |
public void run() { | |
this.running = true; | |
this.data = new Bundle(); | |
this.lastError = 0; | |
this.lastError = doRun(this.input); | |
} | |
public abstract int work(Mat input, Bundle data); | |
public void updateUI(Bundle data) {} | |
public void postData(Mat output, Bundle data) {} | |
public int doRun(Mat input) { | |
int error = -128; | |
try { | |
error = work(input, data); | |
Log.d(this.getClass().getName(), "doRun: end"); | |
} catch (Exception e) { | |
Log.e(this.getClass().getName(), e.getMessage()); | |
e.printStackTrace(); | |
} | |
if (handler != null) { | |
Message msg = new Message(); | |
msg.setData(data); | |
handler.sendMessage(msg); | |
} | |
return error; | |
} | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package com.example.wenfahu.simplecam; | |
import android.util.Log; | |
import java.util.ArrayList; | |
import java.util.List; | |
/** | |
* Created by wenfahu on 16-9-21. | |
*/ | |
public class JniTools { | |
static { | |
System.loadLibrary("native"); | |
if (test() != 123) { | |
Log.e("[load lib]", "fail to load native lib"); | |
} | |
} | |
public static native int test(); | |
public static native int[] lanePlus(long framePtr); | |
public static native float[] detCars(long framePtr); | |
public static native float[] detPeople(long framePtr); | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package com.example.wenfahu.simplecam; | |
import android.app.Activity; | |
import android.content.res.Resources; | |
import android.os.Environment; | |
import android.os.Handler; | |
import android.os.Message; | |
import android.support.v7.app.AppCompatActivity; | |
import android.os.Bundle; | |
import android.util.Log; | |
import android.view.MotionEvent; | |
import android.view.SurfaceView; | |
import android.widget.ImageView; | |
import android.widget.TextView; | |
import org.opencv.android.CameraBridgeViewBase; | |
import org.opencv.core.Mat; | |
import org.opencv.core.Point; | |
import org.opencv.core.Scalar; | |
import org.opencv.imgcodecs.*; | |
import org.opencv.imgproc.*; | |
import java.io.File; | |
import java.sql.Timestamp; | |
import java.util.ArrayList; | |
import java.util.List; | |
public class MainActivity extends Activity implements CameraBridgeViewBase.CvCameraViewListener2 { | |
private CameraBridgeViewBase mOpenCvCameraView; | |
private static final String TAG = "OCV-lane"; | |
private ImageView driving_status; | |
public CWorker[] workers = new CWorker [3]; | |
public Bundle[] bundles = new Bundle[3]; | |
public int JOB_LANE = 0, JOB_PERSON = 1, JOB_CAR = 2; | |
TextView laneLabel, carLabel, peopleLabel; | |
@Override | |
protected void onCreate(Bundle savedInstanceState) { | |
super.onCreate(savedInstanceState); | |
setContentView(R.layout.activity_main); | |
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.HelloOpenCvView); | |
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE); | |
// mOpenCvCameraView.setMaxFrameSize(320, 240); | |
mOpenCvCameraView.setCvCameraViewListener(this); | |
driving_status = (ImageView) findViewById(R.id.imageView); | |
driving_status.setImageResource(R.drawable.rgba); | |
laneLabel = (TextView)findViewById(R.id.laneLabel); | |
carLabel = (TextView)findViewById(R.id.carLabel); | |
peopleLabel = (TextView)findViewById(R.id.peopleLabel); | |
JobHandler handler; | |
int index = -1; | |
handler = new JobHandler(++index); | |
workers[index] = new CWorker(handler, this.getApplicationContext(), null) { | |
@Override | |
public int work(Mat input, Bundle data) { | |
int[] result = JniTools.lanePlus(input.getNativeObjAddr()); | |
data.putIntArray("lines", result); | |
return 0; | |
} | |
@Override | |
public void postData(Mat output, Bundle data) { | |
int[] lines = data.getIntArray("lines"); | |
if (lines == null) { return; } | |
for (int i = 0; i < lines.length; i += 4) { | |
Imgproc.line(output, new Point(lines[i], lines[i + 1]), | |
new Point(lines[i + 2], lines[i + 3]), | |
new Scalar(255, 0, 0), 3); | |
} | |
} | |
@Override | |
public void updateUI(Bundle data) { | |
int[] lines = data.getIntArray("lines"); | |
if (lines != null) { | |
laneLabel.setText(String.valueOf(lines.length / 4) + " lines"); | |
} | |
} | |
}; | |
handler = new JobHandler(++index); | |
workers[index] = new CWorker(handler, this.getApplicationContext(), null) { | |
@Override | |
public int work(Mat input, Bundle data) { | |
float[] result = JniTools.detCars(input.getNativeObjAddr()); | |
data.putFloatArray("cars", result); | |
return 0; | |
} | |
@Override | |
public void postData(Mat output, Bundle data) { | |
float[] lines = data.getFloatArray("cars"); | |
if (lines == null) { return; } | |
for (int i = 0; i < lines.length; i += 5) { | |
Imgproc.rectangle(output, new Point(lines[i], lines[i + 1]), | |
new Point(lines[i + 2], lines[i + 3]), | |
new Scalar(0, 0, 255), 3); | |
} | |
} | |
@Override | |
public void updateUI(Bundle data) { | |
float[] lines = data.getFloatArray("cars"); | |
if (lines != null) { | |
carLabel.setText(String.valueOf(lines.length / 5) + " cars"); | |
} | |
} | |
}; //*/ | |
handler = new JobHandler(++index); | |
workers[index] = /**/ null; /*/ new CWorker(handler, this.getApplicationContext(), null) { | |
@Override | |
public int work(Mat input, Bundle data) { | |
float[] result = JniTools.detPeople(input.getNativeObjAddr()); | |
data.putFloatArray("people", result); | |
return 0; | |
} | |
@Override | |
public void postData(Mat output, Bundle data) { | |
float[] lines = data.getFloatArray("people"); | |
if (lines == null) { return; } | |
for (int i = 0; i < lines.length; i += 5) { | |
Imgproc.rectangle(output, new Point(lines[i], lines[i + 1]), | |
new Point(lines[i + 2], lines[i + 3]), | |
new Scalar(255, 0, 255), 3); | |
} | |
} | |
@Override | |
public void updateUI(Bundle data) { | |
float[] lines = data.getFloatArray("people"); | |
if (lines != null) { | |
peopleLabel.setText(String.valueOf(lines.length / 5) + " people"); | |
} | |
} | |
}; //*/ | |
} | |
static { | |
System.loadLibrary("opencv_java3"); | |
} | |
@Override | |
public void onResume(){ | |
super.onResume(); | |
mOpenCvCameraView.enableView(); | |
} | |
@Override | |
public void onPause(){ | |
super.onPause(); | |
if (mOpenCvCameraView != null) | |
mOpenCvCameraView.disableView(); | |
} | |
@Override | |
public void onDestroy(){ | |
super.onDestroy(); | |
if (mOpenCvCameraView != null) | |
mOpenCvCameraView.disableView(); | |
} | |
public void onCameraViewStarted(int width, int height) { | |
} | |
public void onCameraViewStopped() { | |
} | |
@Override | |
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { | |
Mat frame = inputFrame.rgba(); | |
// if (frame != null && !frame.empty()) return frame; | |
boolean needRerun = false; | |
for (CWorker worker: workers) { | |
if (worker != null && !worker.running) { | |
needRerun = true; | |
break; | |
} | |
} | |
if (needRerun) { | |
Mat jobInput = frame.clone(); | |
for (CWorker worker: workers) { | |
if (worker != null && !worker.running) { | |
worker.running = true; | |
worker.input = jobInput; | |
new Thread(worker).start(); | |
break; | |
} | |
} | |
} | |
for (int i = 0; i < workers.length; i++) { | |
if (bundles[i] != null) { | |
workers[i].postData(frame, bundles[i]); | |
} | |
} | |
return frame; | |
} | |
public class JobHandler extends Handler { | |
private final int index; | |
public JobHandler(int index) { | |
this.index = index; | |
} | |
@Override | |
public void handleMessage(Message msg) { | |
super.handleMessage(msg); | |
Bundle data = msg.getData(); | |
bundles[index] = data; | |
workers[index].running = false; | |
workers[index].updateUI(data); | |
Log.i("job handler", "recv: " + String.valueOf(index)); | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment